Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#define CONFIG_64BIT 1
15#include <asm/nops.h>
16
17#include <asm/orc_types.h>
18#include <objtool/check.h>
19#include <objtool/elf.h>
20#include <objtool/arch.h>
21#include <objtool/warn.h>
22#include <objtool/endianness.h>
23#include <objtool/builtin.h>
24#include <arch/elf.h>
25
26int arch_ftrace_match(char *name)
27{
28 return !strcmp(name, "__fentry__");
29}
30
31static int is_x86_64(const struct elf *elf)
32{
33 switch (elf->ehdr.e_machine) {
34 case EM_X86_64:
35 return 1;
36 case EM_386:
37 return 0;
38 default:
39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
40 return -1;
41 }
42}
43
44bool arch_callee_saved_reg(unsigned char reg)
45{
46 switch (reg) {
47 case CFI_BP:
48 case CFI_BX:
49 case CFI_R12:
50 case CFI_R13:
51 case CFI_R14:
52 case CFI_R15:
53 return true;
54
55 case CFI_AX:
56 case CFI_CX:
57 case CFI_DX:
58 case CFI_SI:
59 case CFI_DI:
60 case CFI_SP:
61 case CFI_R8:
62 case CFI_R9:
63 case CFI_R10:
64 case CFI_R11:
65 case CFI_RA:
66 default:
67 return false;
68 }
69}
70
71unsigned long arch_dest_reloc_offset(int addend)
72{
73 return addend + 4;
74}
75
76unsigned long arch_jump_destination(struct instruction *insn)
77{
78 return insn->offset + insn->len + insn->immediate;
79}
80
81bool arch_pc_relative_reloc(struct reloc *reloc)
82{
83 /*
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
86 */
87 switch (reloc_type(reloc)) {
88 case R_X86_64_PC8:
89 case R_X86_64_PC16:
90 case R_X86_64_PC32:
91 case R_X86_64_PC64:
92
93 case R_X86_64_PLT32:
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
96 return true;
97
98 default:
99 break;
100 }
101
102 return false;
103}
104
105#define ADD_OP(op) \
106 if (!(op = calloc(1, sizeof(*op)))) \
107 return -1; \
108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
119 * 11 | r/ m |
120 */
121
122#define mod_is_mem() (modrm_mod != 3)
123#define mod_is_reg() (modrm_mod == 3)
124
125#define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128/*
129 * Check the ModRM register. If there is a SIB byte then check with
130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and
131 * ModRM mod is 0 then there is no base register.
132 */
133#define rm_is(reg) (have_SIB() ? \
134 sib_base == (reg) && sib_index == CFI_SP && \
135 (sib_base != CFI_BP || modrm_mod != 0) : \
136 modrm_rm == (reg))
137
138#define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
139#define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
140
141static bool has_notrack_prefix(struct insn *insn)
142{
143 int i;
144
145 for (i = 0; i < insn->prefixes.nbytes; i++) {
146 if (insn->prefixes.bytes[i] == 0x3e)
147 return true;
148 }
149
150 return false;
151}
152
153int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
154 unsigned long offset, unsigned int maxlen,
155 struct instruction *insn)
156{
157 struct stack_op **ops_list = &insn->stack_ops;
158 const struct elf *elf = file->elf;
159 struct insn ins;
160 int x86_64, ret;
161 unsigned char op1, op2, op3, prefix,
162 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
163 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
164 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
165 struct stack_op *op = NULL;
166 struct symbol *sym;
167 u64 imm;
168
169 x86_64 = is_x86_64(elf);
170 if (x86_64 == -1)
171 return -1;
172
173 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
174 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
175 if (ret < 0) {
176 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
177 return -1;
178 }
179
180 insn->len = ins.length;
181 insn->type = INSN_OTHER;
182
183 if (ins.vex_prefix.nbytes)
184 return 0;
185
186 prefix = ins.prefixes.bytes[0];
187
188 op1 = ins.opcode.bytes[0];
189 op2 = ins.opcode.bytes[1];
190 op3 = ins.opcode.bytes[2];
191
192 if (ins.rex_prefix.nbytes) {
193 rex = ins.rex_prefix.bytes[0];
194 rex_w = X86_REX_W(rex) >> 3;
195 rex_r = X86_REX_R(rex) >> 2;
196 rex_x = X86_REX_X(rex) >> 1;
197 rex_b = X86_REX_B(rex);
198 }
199
200 if (ins.modrm.nbytes) {
201 modrm = ins.modrm.bytes[0];
202 modrm_mod = X86_MODRM_MOD(modrm);
203 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
204 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
205 }
206
207 if (ins.sib.nbytes) {
208 sib = ins.sib.bytes[0];
209 /* sib_scale = X86_SIB_SCALE(sib); */
210 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
211 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
212 }
213
214 switch (op1) {
215
216 case 0x1:
217 case 0x29:
218 if (rex_w && rm_is_reg(CFI_SP)) {
219
220 /* add/sub reg, %rsp */
221 ADD_OP(op) {
222 op->src.type = OP_SRC_ADD;
223 op->src.reg = modrm_reg;
224 op->dest.type = OP_DEST_REG;
225 op->dest.reg = CFI_SP;
226 }
227 }
228 break;
229
230 case 0x50 ... 0x57:
231
232 /* push reg */
233 ADD_OP(op) {
234 op->src.type = OP_SRC_REG;
235 op->src.reg = (op1 & 0x7) + 8*rex_b;
236 op->dest.type = OP_DEST_PUSH;
237 }
238
239 break;
240
241 case 0x58 ... 0x5f:
242
243 /* pop reg */
244 ADD_OP(op) {
245 op->src.type = OP_SRC_POP;
246 op->dest.type = OP_DEST_REG;
247 op->dest.reg = (op1 & 0x7) + 8*rex_b;
248 }
249
250 break;
251
252 case 0x68:
253 case 0x6a:
254 /* push immediate */
255 ADD_OP(op) {
256 op->src.type = OP_SRC_CONST;
257 op->dest.type = OP_DEST_PUSH;
258 }
259 break;
260
261 case 0x70 ... 0x7f:
262 insn->type = INSN_JUMP_CONDITIONAL;
263 break;
264
265 case 0x80 ... 0x83:
266 /*
267 * 1000 00sw : mod OP r/m : immediate
268 *
269 * s - sign extend immediate
270 * w - imm8 / imm32
271 *
272 * OP: 000 ADD 100 AND
273 * 001 OR 101 SUB
274 * 010 ADC 110 XOR
275 * 011 SBB 111 CMP
276 */
277
278 /* 64bit only */
279 if (!rex_w)
280 break;
281
282 /* %rsp target only */
283 if (!rm_is_reg(CFI_SP))
284 break;
285
286 imm = ins.immediate.value;
287 if (op1 & 2) { /* sign extend */
288 if (op1 & 1) { /* imm32 */
289 imm <<= 32;
290 imm = (s64)imm >> 32;
291 } else { /* imm8 */
292 imm <<= 56;
293 imm = (s64)imm >> 56;
294 }
295 }
296
297 switch (modrm_reg & 7) {
298 case 5:
299 imm = -imm;
300 fallthrough;
301 case 0:
302 /* add/sub imm, %rsp */
303 ADD_OP(op) {
304 op->src.type = OP_SRC_ADD;
305 op->src.reg = CFI_SP;
306 op->src.offset = imm;
307 op->dest.type = OP_DEST_REG;
308 op->dest.reg = CFI_SP;
309 }
310 break;
311
312 case 4:
313 /* and imm, %rsp */
314 ADD_OP(op) {
315 op->src.type = OP_SRC_AND;
316 op->src.reg = CFI_SP;
317 op->src.offset = ins.immediate.value;
318 op->dest.type = OP_DEST_REG;
319 op->dest.reg = CFI_SP;
320 }
321 break;
322
323 default:
324 /* WARN ? */
325 break;
326 }
327
328 break;
329
330 case 0x89:
331 if (!rex_w)
332 break;
333
334 if (modrm_reg == CFI_SP) {
335
336 if (mod_is_reg()) {
337 /* mov %rsp, reg */
338 ADD_OP(op) {
339 op->src.type = OP_SRC_REG;
340 op->src.reg = CFI_SP;
341 op->dest.type = OP_DEST_REG;
342 op->dest.reg = modrm_rm;
343 }
344 break;
345
346 } else {
347 /* skip RIP relative displacement */
348 if (is_RIP())
349 break;
350
351 /* skip nontrivial SIB */
352 if (have_SIB()) {
353 modrm_rm = sib_base;
354 if (sib_index != CFI_SP)
355 break;
356 }
357
358 /* mov %rsp, disp(%reg) */
359 ADD_OP(op) {
360 op->src.type = OP_SRC_REG;
361 op->src.reg = CFI_SP;
362 op->dest.type = OP_DEST_REG_INDIRECT;
363 op->dest.reg = modrm_rm;
364 op->dest.offset = ins.displacement.value;
365 }
366 break;
367 }
368
369 break;
370 }
371
372 if (rm_is_reg(CFI_SP)) {
373
374 /* mov reg, %rsp */
375 ADD_OP(op) {
376 op->src.type = OP_SRC_REG;
377 op->src.reg = modrm_reg;
378 op->dest.type = OP_DEST_REG;
379 op->dest.reg = CFI_SP;
380 }
381 break;
382 }
383
384 fallthrough;
385 case 0x88:
386 if (!rex_w)
387 break;
388
389 if (rm_is_mem(CFI_BP)) {
390
391 /* mov reg, disp(%rbp) */
392 ADD_OP(op) {
393 op->src.type = OP_SRC_REG;
394 op->src.reg = modrm_reg;
395 op->dest.type = OP_DEST_REG_INDIRECT;
396 op->dest.reg = CFI_BP;
397 op->dest.offset = ins.displacement.value;
398 }
399 break;
400 }
401
402 if (rm_is_mem(CFI_SP)) {
403
404 /* mov reg, disp(%rsp) */
405 ADD_OP(op) {
406 op->src.type = OP_SRC_REG;
407 op->src.reg = modrm_reg;
408 op->dest.type = OP_DEST_REG_INDIRECT;
409 op->dest.reg = CFI_SP;
410 op->dest.offset = ins.displacement.value;
411 }
412 break;
413 }
414
415 break;
416
417 case 0x8b:
418 if (!rex_w)
419 break;
420
421 if (rm_is_mem(CFI_BP)) {
422
423 /* mov disp(%rbp), reg */
424 ADD_OP(op) {
425 op->src.type = OP_SRC_REG_INDIRECT;
426 op->src.reg = CFI_BP;
427 op->src.offset = ins.displacement.value;
428 op->dest.type = OP_DEST_REG;
429 op->dest.reg = modrm_reg;
430 }
431 break;
432 }
433
434 if (rm_is_mem(CFI_SP)) {
435
436 /* mov disp(%rsp), reg */
437 ADD_OP(op) {
438 op->src.type = OP_SRC_REG_INDIRECT;
439 op->src.reg = CFI_SP;
440 op->src.offset = ins.displacement.value;
441 op->dest.type = OP_DEST_REG;
442 op->dest.reg = modrm_reg;
443 }
444 break;
445 }
446
447 break;
448
449 case 0x8d:
450 if (mod_is_reg()) {
451 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
452 break;
453 }
454
455 /* skip non 64bit ops */
456 if (!rex_w)
457 break;
458
459 /* skip nontrivial SIB */
460 if (have_SIB()) {
461 modrm_rm = sib_base;
462 if (sib_index != CFI_SP)
463 break;
464 }
465
466 /* lea disp(%rip), %dst */
467 if (is_RIP()) {
468 insn->type = INSN_LEA_RIP;
469 break;
470 }
471
472 /* lea disp(%src), %dst */
473 ADD_OP(op) {
474 op->src.offset = ins.displacement.value;
475 if (!op->src.offset) {
476 /* lea (%src), %dst */
477 op->src.type = OP_SRC_REG;
478 } else {
479 /* lea disp(%src), %dst */
480 op->src.type = OP_SRC_ADD;
481 }
482 op->src.reg = modrm_rm;
483 op->dest.type = OP_DEST_REG;
484 op->dest.reg = modrm_reg;
485 }
486 break;
487
488 case 0x8f:
489 /* pop to mem */
490 ADD_OP(op) {
491 op->src.type = OP_SRC_POP;
492 op->dest.type = OP_DEST_MEM;
493 }
494 break;
495
496 case 0x90:
497 insn->type = INSN_NOP;
498 break;
499
500 case 0x9c:
501 /* pushf */
502 ADD_OP(op) {
503 op->src.type = OP_SRC_CONST;
504 op->dest.type = OP_DEST_PUSHF;
505 }
506 break;
507
508 case 0x9d:
509 /* popf */
510 ADD_OP(op) {
511 op->src.type = OP_SRC_POPF;
512 op->dest.type = OP_DEST_MEM;
513 }
514 break;
515
516 case 0x0f:
517
518 if (op2 == 0x01) {
519
520 switch (insn_last_prefix_id(&ins)) {
521 case INAT_PFX_REPE:
522 case INAT_PFX_REPNE:
523 if (modrm == 0xca)
524 /* eretu/erets */
525 insn->type = INSN_CONTEXT_SWITCH;
526 break;
527 default:
528 if (modrm == 0xca)
529 insn->type = INSN_CLAC;
530 else if (modrm == 0xcb)
531 insn->type = INSN_STAC;
532 break;
533 }
534 } else if (op2 >= 0x80 && op2 <= 0x8f) {
535
536 insn->type = INSN_JUMP_CONDITIONAL;
537
538 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
539 op2 == 0x35) {
540
541 /* sysenter, sysret */
542 insn->type = INSN_CONTEXT_SWITCH;
543
544 } else if (op2 == 0x0b || op2 == 0xb9) {
545
546 /* ud2 */
547 insn->type = INSN_BUG;
548
549 } else if (op2 == 0x0d || op2 == 0x1f) {
550
551 /* nopl/nopw */
552 insn->type = INSN_NOP;
553
554 } else if (op2 == 0x1e) {
555
556 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
557 insn->type = INSN_ENDBR;
558
559
560 } else if (op2 == 0x38 && op3 == 0xf8) {
561 if (ins.prefixes.nbytes == 1 &&
562 ins.prefixes.bytes[0] == 0xf2) {
563 /* ENQCMD cannot be used in the kernel. */
564 WARN("ENQCMD instruction at %s:%lx", sec->name,
565 offset);
566 }
567
568 } else if (op2 == 0xa0 || op2 == 0xa8) {
569
570 /* push fs/gs */
571 ADD_OP(op) {
572 op->src.type = OP_SRC_CONST;
573 op->dest.type = OP_DEST_PUSH;
574 }
575
576 } else if (op2 == 0xa1 || op2 == 0xa9) {
577
578 /* pop fs/gs */
579 ADD_OP(op) {
580 op->src.type = OP_SRC_POP;
581 op->dest.type = OP_DEST_MEM;
582 }
583 }
584
585 break;
586
587 case 0xc9:
588 /*
589 * leave
590 *
591 * equivalent to:
592 * mov bp, sp
593 * pop bp
594 */
595 ADD_OP(op) {
596 op->src.type = OP_SRC_REG;
597 op->src.reg = CFI_BP;
598 op->dest.type = OP_DEST_REG;
599 op->dest.reg = CFI_SP;
600 }
601 ADD_OP(op) {
602 op->src.type = OP_SRC_POP;
603 op->dest.type = OP_DEST_REG;
604 op->dest.reg = CFI_BP;
605 }
606 break;
607
608 case 0xcc:
609 /* int3 */
610 insn->type = INSN_TRAP;
611 break;
612
613 case 0xe3:
614 /* jecxz/jrcxz */
615 insn->type = INSN_JUMP_CONDITIONAL;
616 break;
617
618 case 0xe9:
619 case 0xeb:
620 insn->type = INSN_JUMP_UNCONDITIONAL;
621 break;
622
623 case 0xc2:
624 case 0xc3:
625 insn->type = INSN_RETURN;
626 break;
627
628 case 0xc7: /* mov imm, r/m */
629 if (!opts.noinstr)
630 break;
631
632 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
633 struct reloc *immr, *disp;
634 struct symbol *func;
635 int idx;
636
637 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
638 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
639
640 if (!immr || strcmp(immr->sym->name, "pv_ops"))
641 break;
642
643 idx = (reloc_addend(immr) + 8) / sizeof(void *);
644
645 func = disp->sym;
646 if (disp->sym->type == STT_SECTION)
647 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
648 if (!func) {
649 WARN("no func for pv_ops[]");
650 return -1;
651 }
652
653 objtool_pv_add(file, idx, func);
654 }
655
656 break;
657
658 case 0xcf: /* iret */
659 /*
660 * Handle sync_core(), which has an IRET to self.
661 * All other IRET are in STT_NONE entry code.
662 */
663 sym = find_symbol_containing(sec, offset);
664 if (sym && sym->type == STT_FUNC) {
665 ADD_OP(op) {
666 /* add $40, %rsp */
667 op->src.type = OP_SRC_ADD;
668 op->src.reg = CFI_SP;
669 op->src.offset = 5*8;
670 op->dest.type = OP_DEST_REG;
671 op->dest.reg = CFI_SP;
672 }
673 break;
674 }
675
676 fallthrough;
677
678 case 0xca: /* retf */
679 case 0xcb: /* retf */
680 insn->type = INSN_CONTEXT_SWITCH;
681 break;
682
683 case 0xe0: /* loopne */
684 case 0xe1: /* loope */
685 case 0xe2: /* loop */
686 insn->type = INSN_JUMP_CONDITIONAL;
687 break;
688
689 case 0xe8:
690 insn->type = INSN_CALL;
691 /*
692 * For the impact on the stack, a CALL behaves like
693 * a PUSH of an immediate value (the return address).
694 */
695 ADD_OP(op) {
696 op->src.type = OP_SRC_CONST;
697 op->dest.type = OP_DEST_PUSH;
698 }
699 break;
700
701 case 0xfc:
702 insn->type = INSN_CLD;
703 break;
704
705 case 0xfd:
706 insn->type = INSN_STD;
707 break;
708
709 case 0xff:
710 if (modrm_reg == 2 || modrm_reg == 3) {
711
712 insn->type = INSN_CALL_DYNAMIC;
713 if (has_notrack_prefix(&ins))
714 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
715
716 } else if (modrm_reg == 4) {
717
718 insn->type = INSN_JUMP_DYNAMIC;
719 if (has_notrack_prefix(&ins))
720 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
721
722 } else if (modrm_reg == 5) {
723
724 /* jmpf */
725 insn->type = INSN_CONTEXT_SWITCH;
726
727 } else if (modrm_reg == 6) {
728
729 /* push from mem */
730 ADD_OP(op) {
731 op->src.type = OP_SRC_CONST;
732 op->dest.type = OP_DEST_PUSH;
733 }
734 }
735
736 break;
737
738 default:
739 break;
740 }
741
742 if (ins.immediate.nbytes)
743 insn->immediate = ins.immediate.value;
744 else if (ins.displacement.nbytes)
745 insn->immediate = ins.displacement.value;
746
747 return 0;
748}
749
750void arch_initial_func_cfi_state(struct cfi_init_state *state)
751{
752 int i;
753
754 for (i = 0; i < CFI_NUM_REGS; i++) {
755 state->regs[i].base = CFI_UNDEFINED;
756 state->regs[i].offset = 0;
757 }
758
759 /* initial CFA (call frame address) */
760 state->cfa.base = CFI_SP;
761 state->cfa.offset = 8;
762
763 /* initial RA (return address) */
764 state->regs[CFI_RA].base = CFI_CFA;
765 state->regs[CFI_RA].offset = -8;
766}
767
768const char *arch_nop_insn(int len)
769{
770 static const char nops[5][5] = {
771 { BYTES_NOP1 },
772 { BYTES_NOP2 },
773 { BYTES_NOP3 },
774 { BYTES_NOP4 },
775 { BYTES_NOP5 },
776 };
777
778 if (len < 1 || len > 5) {
779 WARN("invalid NOP size: %d\n", len);
780 return NULL;
781 }
782
783 return nops[len-1];
784}
785
786#define BYTE_RET 0xC3
787
788const char *arch_ret_insn(int len)
789{
790 static const char ret[5][5] = {
791 { BYTE_RET },
792 { BYTE_RET, 0xcc },
793 { BYTE_RET, 0xcc, BYTES_NOP1 },
794 { BYTE_RET, 0xcc, BYTES_NOP2 },
795 { BYTE_RET, 0xcc, BYTES_NOP3 },
796 };
797
798 if (len < 1 || len > 5) {
799 WARN("invalid RET size: %d\n", len);
800 return NULL;
801 }
802
803 return ret[len-1];
804}
805
806int arch_decode_hint_reg(u8 sp_reg, int *base)
807{
808 switch (sp_reg) {
809 case ORC_REG_UNDEFINED:
810 *base = CFI_UNDEFINED;
811 break;
812 case ORC_REG_SP:
813 *base = CFI_SP;
814 break;
815 case ORC_REG_BP:
816 *base = CFI_BP;
817 break;
818 case ORC_REG_SP_INDIRECT:
819 *base = CFI_SP_INDIRECT;
820 break;
821 case ORC_REG_R10:
822 *base = CFI_R10;
823 break;
824 case ORC_REG_R13:
825 *base = CFI_R13;
826 break;
827 case ORC_REG_DI:
828 *base = CFI_DI;
829 break;
830 case ORC_REG_DX:
831 *base = CFI_DX;
832 break;
833 default:
834 return -1;
835 }
836
837 return 0;
838}
839
840bool arch_is_retpoline(struct symbol *sym)
841{
842 return !strncmp(sym->name, "__x86_indirect_", 15);
843}
844
845bool arch_is_rethunk(struct symbol *sym)
846{
847 return !strcmp(sym->name, "__x86_return_thunk");
848}
849
850bool arch_is_embedded_insn(struct symbol *sym)
851{
852 return !strcmp(sym->name, "retbleed_return_thunk") ||
853 !strcmp(sym->name, "srso_safe_ret");
854}
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#include "../../check.h"
15#include "../../elf.h"
16#include "../../arch.h"
17#include "../../warn.h"
18
19static unsigned char op_to_cfi_reg[][2] = {
20 {CFI_AX, CFI_R8},
21 {CFI_CX, CFI_R9},
22 {CFI_DX, CFI_R10},
23 {CFI_BX, CFI_R11},
24 {CFI_SP, CFI_R12},
25 {CFI_BP, CFI_R13},
26 {CFI_SI, CFI_R14},
27 {CFI_DI, CFI_R15},
28};
29
30static int is_x86_64(const struct elf *elf)
31{
32 switch (elf->ehdr.e_machine) {
33 case EM_X86_64:
34 return 1;
35 case EM_386:
36 return 0;
37 default:
38 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
39 return -1;
40 }
41}
42
43bool arch_callee_saved_reg(unsigned char reg)
44{
45 switch (reg) {
46 case CFI_BP:
47 case CFI_BX:
48 case CFI_R12:
49 case CFI_R13:
50 case CFI_R14:
51 case CFI_R15:
52 return true;
53
54 case CFI_AX:
55 case CFI_CX:
56 case CFI_DX:
57 case CFI_SI:
58 case CFI_DI:
59 case CFI_SP:
60 case CFI_R8:
61 case CFI_R9:
62 case CFI_R10:
63 case CFI_R11:
64 case CFI_RA:
65 default:
66 return false;
67 }
68}
69
70unsigned long arch_dest_reloc_offset(int addend)
71{
72 return addend + 4;
73}
74
75unsigned long arch_jump_destination(struct instruction *insn)
76{
77 return insn->offset + insn->len + insn->immediate;
78}
79
80#define ADD_OP(op) \
81 if (!(op = calloc(1, sizeof(*op)))) \
82 return -1; \
83 else for (list_add_tail(&op->list, ops_list); op; op = NULL)
84
85int arch_decode_instruction(const struct elf *elf, const struct section *sec,
86 unsigned long offset, unsigned int maxlen,
87 unsigned int *len, enum insn_type *type,
88 unsigned long *immediate,
89 struct list_head *ops_list)
90{
91 struct insn insn;
92 int x86_64, sign;
93 unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0,
94 rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0,
95 modrm_reg = 0, sib = 0;
96 struct stack_op *op = NULL;
97 struct symbol *sym;
98
99 x86_64 = is_x86_64(elf);
100 if (x86_64 == -1)
101 return -1;
102
103 insn_init(&insn, sec->data->d_buf + offset, maxlen, x86_64);
104 insn_get_length(&insn);
105
106 if (!insn_complete(&insn)) {
107 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
108 return -1;
109 }
110
111 *len = insn.length;
112 *type = INSN_OTHER;
113
114 if (insn.vex_prefix.nbytes)
115 return 0;
116
117 op1 = insn.opcode.bytes[0];
118 op2 = insn.opcode.bytes[1];
119
120 if (insn.rex_prefix.nbytes) {
121 rex = insn.rex_prefix.bytes[0];
122 rex_w = X86_REX_W(rex) >> 3;
123 rex_r = X86_REX_R(rex) >> 2;
124 rex_x = X86_REX_X(rex) >> 1;
125 rex_b = X86_REX_B(rex);
126 }
127
128 if (insn.modrm.nbytes) {
129 modrm = insn.modrm.bytes[0];
130 modrm_mod = X86_MODRM_MOD(modrm);
131 modrm_reg = X86_MODRM_REG(modrm);
132 modrm_rm = X86_MODRM_RM(modrm);
133 }
134
135 if (insn.sib.nbytes)
136 sib = insn.sib.bytes[0];
137
138 switch (op1) {
139
140 case 0x1:
141 case 0x29:
142 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
143
144 /* add/sub reg, %rsp */
145 ADD_OP(op) {
146 op->src.type = OP_SRC_ADD;
147 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
148 op->dest.type = OP_DEST_REG;
149 op->dest.reg = CFI_SP;
150 }
151 }
152 break;
153
154 case 0x50 ... 0x57:
155
156 /* push reg */
157 ADD_OP(op) {
158 op->src.type = OP_SRC_REG;
159 op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
160 op->dest.type = OP_DEST_PUSH;
161 }
162
163 break;
164
165 case 0x58 ... 0x5f:
166
167 /* pop reg */
168 ADD_OP(op) {
169 op->src.type = OP_SRC_POP;
170 op->dest.type = OP_DEST_REG;
171 op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
172 }
173
174 break;
175
176 case 0x68:
177 case 0x6a:
178 /* push immediate */
179 ADD_OP(op) {
180 op->src.type = OP_SRC_CONST;
181 op->dest.type = OP_DEST_PUSH;
182 }
183 break;
184
185 case 0x70 ... 0x7f:
186 *type = INSN_JUMP_CONDITIONAL;
187 break;
188
189 case 0x81:
190 case 0x83:
191 if (rex != 0x48)
192 break;
193
194 if (modrm == 0xe4) {
195 /* and imm, %rsp */
196 ADD_OP(op) {
197 op->src.type = OP_SRC_AND;
198 op->src.reg = CFI_SP;
199 op->src.offset = insn.immediate.value;
200 op->dest.type = OP_DEST_REG;
201 op->dest.reg = CFI_SP;
202 }
203 break;
204 }
205
206 if (modrm == 0xc4)
207 sign = 1;
208 else if (modrm == 0xec)
209 sign = -1;
210 else
211 break;
212
213 /* add/sub imm, %rsp */
214 ADD_OP(op) {
215 op->src.type = OP_SRC_ADD;
216 op->src.reg = CFI_SP;
217 op->src.offset = insn.immediate.value * sign;
218 op->dest.type = OP_DEST_REG;
219 op->dest.reg = CFI_SP;
220 }
221 break;
222
223 case 0x89:
224 if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) {
225
226 /* mov %rsp, reg */
227 ADD_OP(op) {
228 op->src.type = OP_SRC_REG;
229 op->src.reg = CFI_SP;
230 op->dest.type = OP_DEST_REG;
231 op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
232 }
233 break;
234 }
235
236 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
237
238 /* mov reg, %rsp */
239 ADD_OP(op) {
240 op->src.type = OP_SRC_REG;
241 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
242 op->dest.type = OP_DEST_REG;
243 op->dest.reg = CFI_SP;
244 }
245 break;
246 }
247
248 /* fallthrough */
249 case 0x88:
250 if (!rex_b &&
251 (modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) {
252
253 /* mov reg, disp(%rbp) */
254 ADD_OP(op) {
255 op->src.type = OP_SRC_REG;
256 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
257 op->dest.type = OP_DEST_REG_INDIRECT;
258 op->dest.reg = CFI_BP;
259 op->dest.offset = insn.displacement.value;
260 }
261
262 } else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) {
263
264 /* mov reg, disp(%rsp) */
265 ADD_OP(op) {
266 op->src.type = OP_SRC_REG;
267 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
268 op->dest.type = OP_DEST_REG_INDIRECT;
269 op->dest.reg = CFI_SP;
270 op->dest.offset = insn.displacement.value;
271 }
272 }
273
274 break;
275
276 case 0x8b:
277 if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) {
278
279 /* mov disp(%rbp), reg */
280 ADD_OP(op) {
281 op->src.type = OP_SRC_REG_INDIRECT;
282 op->src.reg = CFI_BP;
283 op->src.offset = insn.displacement.value;
284 op->dest.type = OP_DEST_REG;
285 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
286 }
287
288 } else if (rex_w && !rex_b && sib == 0x24 &&
289 modrm_mod != 3 && modrm_rm == 4) {
290
291 /* mov disp(%rsp), reg */
292 ADD_OP(op) {
293 op->src.type = OP_SRC_REG_INDIRECT;
294 op->src.reg = CFI_SP;
295 op->src.offset = insn.displacement.value;
296 op->dest.type = OP_DEST_REG;
297 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
298 }
299 }
300
301 break;
302
303 case 0x8d:
304 if (sib == 0x24 && rex_w && !rex_b && !rex_x) {
305
306 ADD_OP(op) {
307 if (!insn.displacement.value) {
308 /* lea (%rsp), reg */
309 op->src.type = OP_SRC_REG;
310 } else {
311 /* lea disp(%rsp), reg */
312 op->src.type = OP_SRC_ADD;
313 op->src.offset = insn.displacement.value;
314 }
315 op->src.reg = CFI_SP;
316 op->dest.type = OP_DEST_REG;
317 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
318 }
319
320 } else if (rex == 0x48 && modrm == 0x65) {
321
322 /* lea disp(%rbp), %rsp */
323 ADD_OP(op) {
324 op->src.type = OP_SRC_ADD;
325 op->src.reg = CFI_BP;
326 op->src.offset = insn.displacement.value;
327 op->dest.type = OP_DEST_REG;
328 op->dest.reg = CFI_SP;
329 }
330
331 } else if (rex == 0x49 && modrm == 0x62 &&
332 insn.displacement.value == -8) {
333
334 /*
335 * lea -0x8(%r10), %rsp
336 *
337 * Restoring rsp back to its original value after a
338 * stack realignment.
339 */
340 ADD_OP(op) {
341 op->src.type = OP_SRC_ADD;
342 op->src.reg = CFI_R10;
343 op->src.offset = -8;
344 op->dest.type = OP_DEST_REG;
345 op->dest.reg = CFI_SP;
346 }
347
348 } else if (rex == 0x49 && modrm == 0x65 &&
349 insn.displacement.value == -16) {
350
351 /*
352 * lea -0x10(%r13), %rsp
353 *
354 * Restoring rsp back to its original value after a
355 * stack realignment.
356 */
357 ADD_OP(op) {
358 op->src.type = OP_SRC_ADD;
359 op->src.reg = CFI_R13;
360 op->src.offset = -16;
361 op->dest.type = OP_DEST_REG;
362 op->dest.reg = CFI_SP;
363 }
364 }
365
366 break;
367
368 case 0x8f:
369 /* pop to mem */
370 ADD_OP(op) {
371 op->src.type = OP_SRC_POP;
372 op->dest.type = OP_DEST_MEM;
373 }
374 break;
375
376 case 0x90:
377 *type = INSN_NOP;
378 break;
379
380 case 0x9c:
381 /* pushf */
382 ADD_OP(op) {
383 op->src.type = OP_SRC_CONST;
384 op->dest.type = OP_DEST_PUSHF;
385 }
386 break;
387
388 case 0x9d:
389 /* popf */
390 ADD_OP(op) {
391 op->src.type = OP_SRC_POPF;
392 op->dest.type = OP_DEST_MEM;
393 }
394 break;
395
396 case 0x0f:
397
398 if (op2 == 0x01) {
399
400 if (modrm == 0xca)
401 *type = INSN_CLAC;
402 else if (modrm == 0xcb)
403 *type = INSN_STAC;
404
405 } else if (op2 >= 0x80 && op2 <= 0x8f) {
406
407 *type = INSN_JUMP_CONDITIONAL;
408
409 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
410 op2 == 0x35) {
411
412 /* sysenter, sysret */
413 *type = INSN_CONTEXT_SWITCH;
414
415 } else if (op2 == 0x0b || op2 == 0xb9) {
416
417 /* ud2 */
418 *type = INSN_BUG;
419
420 } else if (op2 == 0x0d || op2 == 0x1f) {
421
422 /* nopl/nopw */
423 *type = INSN_NOP;
424
425 } else if (op2 == 0xa0 || op2 == 0xa8) {
426
427 /* push fs/gs */
428 ADD_OP(op) {
429 op->src.type = OP_SRC_CONST;
430 op->dest.type = OP_DEST_PUSH;
431 }
432
433 } else if (op2 == 0xa1 || op2 == 0xa9) {
434
435 /* pop fs/gs */
436 ADD_OP(op) {
437 op->src.type = OP_SRC_POP;
438 op->dest.type = OP_DEST_MEM;
439 }
440 }
441
442 break;
443
444 case 0xc9:
445 /*
446 * leave
447 *
448 * equivalent to:
449 * mov bp, sp
450 * pop bp
451 */
452 ADD_OP(op)
453 op->dest.type = OP_DEST_LEAVE;
454
455 break;
456
457 case 0xe3:
458 /* jecxz/jrcxz */
459 *type = INSN_JUMP_CONDITIONAL;
460 break;
461
462 case 0xe9:
463 case 0xeb:
464 *type = INSN_JUMP_UNCONDITIONAL;
465 break;
466
467 case 0xc2:
468 case 0xc3:
469 *type = INSN_RETURN;
470 break;
471
472 case 0xcf: /* iret */
473 /*
474 * Handle sync_core(), which has an IRET to self.
475 * All other IRET are in STT_NONE entry code.
476 */
477 sym = find_symbol_containing(sec, offset);
478 if (sym && sym->type == STT_FUNC) {
479 ADD_OP(op) {
480 /* add $40, %rsp */
481 op->src.type = OP_SRC_ADD;
482 op->src.reg = CFI_SP;
483 op->src.offset = 5*8;
484 op->dest.type = OP_DEST_REG;
485 op->dest.reg = CFI_SP;
486 }
487 break;
488 }
489
490 /* fallthrough */
491
492 case 0xca: /* retf */
493 case 0xcb: /* retf */
494 *type = INSN_CONTEXT_SWITCH;
495 break;
496
497 case 0xe8:
498 *type = INSN_CALL;
499 /*
500 * For the impact on the stack, a CALL behaves like
501 * a PUSH of an immediate value (the return address).
502 */
503 ADD_OP(op) {
504 op->src.type = OP_SRC_CONST;
505 op->dest.type = OP_DEST_PUSH;
506 }
507 break;
508
509 case 0xfc:
510 *type = INSN_CLD;
511 break;
512
513 case 0xfd:
514 *type = INSN_STD;
515 break;
516
517 case 0xff:
518 if (modrm_reg == 2 || modrm_reg == 3)
519
520 *type = INSN_CALL_DYNAMIC;
521
522 else if (modrm_reg == 4)
523
524 *type = INSN_JUMP_DYNAMIC;
525
526 else if (modrm_reg == 5)
527
528 /* jmpf */
529 *type = INSN_CONTEXT_SWITCH;
530
531 else if (modrm_reg == 6) {
532
533 /* push from mem */
534 ADD_OP(op) {
535 op->src.type = OP_SRC_CONST;
536 op->dest.type = OP_DEST_PUSH;
537 }
538 }
539
540 break;
541
542 default:
543 break;
544 }
545
546 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
547
548 return 0;
549}
550
551void arch_initial_func_cfi_state(struct cfi_init_state *state)
552{
553 int i;
554
555 for (i = 0; i < CFI_NUM_REGS; i++) {
556 state->regs[i].base = CFI_UNDEFINED;
557 state->regs[i].offset = 0;
558 }
559
560 /* initial CFA (call frame address) */
561 state->cfa.base = CFI_SP;
562 state->cfa.offset = 8;
563
564 /* initial RA (return address) */
565 state->regs[16].base = CFI_CFA;
566 state->regs[16].offset = -8;
567}
568
569const char *arch_nop_insn(int len)
570{
571 static const char nops[5][5] = {
572 /* 1 */ { 0x90 },
573 /* 2 */ { 0x66, 0x90 },
574 /* 3 */ { 0x0f, 0x1f, 0x00 },
575 /* 4 */ { 0x0f, 0x1f, 0x40, 0x00 },
576 /* 5 */ { 0x0f, 0x1f, 0x44, 0x00, 0x00 },
577 };
578
579 if (len < 1 || len > 5) {
580 WARN("invalid NOP size: %d\n", len);
581 return NULL;
582 }
583
584 return nops[len-1];
585}