Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#define CONFIG_64BIT 1
15#include <asm/nops.h>
16
17#include <asm/orc_types.h>
18#include <objtool/check.h>
19#include <objtool/elf.h>
20#include <objtool/arch.h>
21#include <objtool/warn.h>
22#include <objtool/endianness.h>
23#include <arch/elf.h>
24
25static int is_x86_64(const struct elf *elf)
26{
27 switch (elf->ehdr.e_machine) {
28 case EM_X86_64:
29 return 1;
30 case EM_386:
31 return 0;
32 default:
33 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
34 return -1;
35 }
36}
37
38bool arch_callee_saved_reg(unsigned char reg)
39{
40 switch (reg) {
41 case CFI_BP:
42 case CFI_BX:
43 case CFI_R12:
44 case CFI_R13:
45 case CFI_R14:
46 case CFI_R15:
47 return true;
48
49 case CFI_AX:
50 case CFI_CX:
51 case CFI_DX:
52 case CFI_SI:
53 case CFI_DI:
54 case CFI_SP:
55 case CFI_R8:
56 case CFI_R9:
57 case CFI_R10:
58 case CFI_R11:
59 case CFI_RA:
60 default:
61 return false;
62 }
63}
64
65unsigned long arch_dest_reloc_offset(int addend)
66{
67 return addend + 4;
68}
69
70unsigned long arch_jump_destination(struct instruction *insn)
71{
72 return insn->offset + insn->len + insn->immediate;
73}
74
75#define ADD_OP(op) \
76 if (!(op = calloc(1, sizeof(*op)))) \
77 return -1; \
78 else for (list_add_tail(&op->list, ops_list); op; op = NULL)
79
80/*
81 * Helpers to decode ModRM/SIB:
82 *
83 * r/m| AX CX DX BX | SP | BP | SI DI |
84 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
85 * Mod+----------------+-----+-----+---------+
86 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
87 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
88 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
89 * 11 | r/ m |
90 */
91
92#define mod_is_mem() (modrm_mod != 3)
93#define mod_is_reg() (modrm_mod == 3)
94
95#define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
96#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
97
98#define rm_is(reg) (have_SIB() ? \
99 sib_base == (reg) && sib_index == CFI_SP : \
100 modrm_rm == (reg))
101
102#define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
103#define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
104
105int arch_decode_instruction(const struct elf *elf, const struct section *sec,
106 unsigned long offset, unsigned int maxlen,
107 unsigned int *len, enum insn_type *type,
108 unsigned long *immediate,
109 struct list_head *ops_list)
110{
111 struct insn insn;
112 int x86_64, ret;
113 unsigned char op1, op2,
114 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
115 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
116 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
117 struct stack_op *op = NULL;
118 struct symbol *sym;
119 u64 imm;
120
121 x86_64 = is_x86_64(elf);
122 if (x86_64 == -1)
123 return -1;
124
125 ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
126 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
127 if (ret < 0) {
128 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
129 return -1;
130 }
131
132 *len = insn.length;
133 *type = INSN_OTHER;
134
135 if (insn.vex_prefix.nbytes)
136 return 0;
137
138 op1 = insn.opcode.bytes[0];
139 op2 = insn.opcode.bytes[1];
140
141 if (insn.rex_prefix.nbytes) {
142 rex = insn.rex_prefix.bytes[0];
143 rex_w = X86_REX_W(rex) >> 3;
144 rex_r = X86_REX_R(rex) >> 2;
145 rex_x = X86_REX_X(rex) >> 1;
146 rex_b = X86_REX_B(rex);
147 }
148
149 if (insn.modrm.nbytes) {
150 modrm = insn.modrm.bytes[0];
151 modrm_mod = X86_MODRM_MOD(modrm);
152 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
153 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
154 }
155
156 if (insn.sib.nbytes) {
157 sib = insn.sib.bytes[0];
158 /* sib_scale = X86_SIB_SCALE(sib); */
159 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
160 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
161 }
162
163 switch (op1) {
164
165 case 0x1:
166 case 0x29:
167 if (rex_w && rm_is_reg(CFI_SP)) {
168
169 /* add/sub reg, %rsp */
170 ADD_OP(op) {
171 op->src.type = OP_SRC_ADD;
172 op->src.reg = modrm_reg;
173 op->dest.type = OP_DEST_REG;
174 op->dest.reg = CFI_SP;
175 }
176 }
177 break;
178
179 case 0x50 ... 0x57:
180
181 /* push reg */
182 ADD_OP(op) {
183 op->src.type = OP_SRC_REG;
184 op->src.reg = (op1 & 0x7) + 8*rex_b;
185 op->dest.type = OP_DEST_PUSH;
186 }
187
188 break;
189
190 case 0x58 ... 0x5f:
191
192 /* pop reg */
193 ADD_OP(op) {
194 op->src.type = OP_SRC_POP;
195 op->dest.type = OP_DEST_REG;
196 op->dest.reg = (op1 & 0x7) + 8*rex_b;
197 }
198
199 break;
200
201 case 0x68:
202 case 0x6a:
203 /* push immediate */
204 ADD_OP(op) {
205 op->src.type = OP_SRC_CONST;
206 op->dest.type = OP_DEST_PUSH;
207 }
208 break;
209
210 case 0x70 ... 0x7f:
211 *type = INSN_JUMP_CONDITIONAL;
212 break;
213
214 case 0x80 ... 0x83:
215 /*
216 * 1000 00sw : mod OP r/m : immediate
217 *
218 * s - sign extend immediate
219 * w - imm8 / imm32
220 *
221 * OP: 000 ADD 100 AND
222 * 001 OR 101 SUB
223 * 010 ADC 110 XOR
224 * 011 SBB 111 CMP
225 */
226
227 /* 64bit only */
228 if (!rex_w)
229 break;
230
231 /* %rsp target only */
232 if (!rm_is_reg(CFI_SP))
233 break;
234
235 imm = insn.immediate.value;
236 if (op1 & 2) { /* sign extend */
237 if (op1 & 1) { /* imm32 */
238 imm <<= 32;
239 imm = (s64)imm >> 32;
240 } else { /* imm8 */
241 imm <<= 56;
242 imm = (s64)imm >> 56;
243 }
244 }
245
246 switch (modrm_reg & 7) {
247 case 5:
248 imm = -imm;
249 /* fallthrough */
250 case 0:
251 /* add/sub imm, %rsp */
252 ADD_OP(op) {
253 op->src.type = OP_SRC_ADD;
254 op->src.reg = CFI_SP;
255 op->src.offset = imm;
256 op->dest.type = OP_DEST_REG;
257 op->dest.reg = CFI_SP;
258 }
259 break;
260
261 case 4:
262 /* and imm, %rsp */
263 ADD_OP(op) {
264 op->src.type = OP_SRC_AND;
265 op->src.reg = CFI_SP;
266 op->src.offset = insn.immediate.value;
267 op->dest.type = OP_DEST_REG;
268 op->dest.reg = CFI_SP;
269 }
270 break;
271
272 default:
273 /* WARN ? */
274 break;
275 }
276
277 break;
278
279 case 0x89:
280 if (!rex_w)
281 break;
282
283 if (modrm_reg == CFI_SP) {
284
285 if (mod_is_reg()) {
286 /* mov %rsp, reg */
287 ADD_OP(op) {
288 op->src.type = OP_SRC_REG;
289 op->src.reg = CFI_SP;
290 op->dest.type = OP_DEST_REG;
291 op->dest.reg = modrm_rm;
292 }
293 break;
294
295 } else {
296 /* skip RIP relative displacement */
297 if (is_RIP())
298 break;
299
300 /* skip nontrivial SIB */
301 if (have_SIB()) {
302 modrm_rm = sib_base;
303 if (sib_index != CFI_SP)
304 break;
305 }
306
307 /* mov %rsp, disp(%reg) */
308 ADD_OP(op) {
309 op->src.type = OP_SRC_REG;
310 op->src.reg = CFI_SP;
311 op->dest.type = OP_DEST_REG_INDIRECT;
312 op->dest.reg = modrm_rm;
313 op->dest.offset = insn.displacement.value;
314 }
315 break;
316 }
317
318 break;
319 }
320
321 if (rm_is_reg(CFI_SP)) {
322
323 /* mov reg, %rsp */
324 ADD_OP(op) {
325 op->src.type = OP_SRC_REG;
326 op->src.reg = modrm_reg;
327 op->dest.type = OP_DEST_REG;
328 op->dest.reg = CFI_SP;
329 }
330 break;
331 }
332
333 /* fallthrough */
334 case 0x88:
335 if (!rex_w)
336 break;
337
338 if (rm_is_mem(CFI_BP)) {
339
340 /* mov reg, disp(%rbp) */
341 ADD_OP(op) {
342 op->src.type = OP_SRC_REG;
343 op->src.reg = modrm_reg;
344 op->dest.type = OP_DEST_REG_INDIRECT;
345 op->dest.reg = CFI_BP;
346 op->dest.offset = insn.displacement.value;
347 }
348 break;
349 }
350
351 if (rm_is_mem(CFI_SP)) {
352
353 /* mov reg, disp(%rsp) */
354 ADD_OP(op) {
355 op->src.type = OP_SRC_REG;
356 op->src.reg = modrm_reg;
357 op->dest.type = OP_DEST_REG_INDIRECT;
358 op->dest.reg = CFI_SP;
359 op->dest.offset = insn.displacement.value;
360 }
361 break;
362 }
363
364 break;
365
366 case 0x8b:
367 if (!rex_w)
368 break;
369
370 if (rm_is_mem(CFI_BP)) {
371
372 /* mov disp(%rbp), reg */
373 ADD_OP(op) {
374 op->src.type = OP_SRC_REG_INDIRECT;
375 op->src.reg = CFI_BP;
376 op->src.offset = insn.displacement.value;
377 op->dest.type = OP_DEST_REG;
378 op->dest.reg = modrm_reg;
379 }
380 break;
381 }
382
383 if (rm_is_mem(CFI_SP)) {
384
385 /* mov disp(%rsp), reg */
386 ADD_OP(op) {
387 op->src.type = OP_SRC_REG_INDIRECT;
388 op->src.reg = CFI_SP;
389 op->src.offset = insn.displacement.value;
390 op->dest.type = OP_DEST_REG;
391 op->dest.reg = modrm_reg;
392 }
393 break;
394 }
395
396 break;
397
398 case 0x8d:
399 if (mod_is_reg()) {
400 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
401 break;
402 }
403
404 /* skip non 64bit ops */
405 if (!rex_w)
406 break;
407
408 /* skip RIP relative displacement */
409 if (is_RIP())
410 break;
411
412 /* skip nontrivial SIB */
413 if (have_SIB()) {
414 modrm_rm = sib_base;
415 if (sib_index != CFI_SP)
416 break;
417 }
418
419 /* lea disp(%src), %dst */
420 ADD_OP(op) {
421 op->src.offset = insn.displacement.value;
422 if (!op->src.offset) {
423 /* lea (%src), %dst */
424 op->src.type = OP_SRC_REG;
425 } else {
426 /* lea disp(%src), %dst */
427 op->src.type = OP_SRC_ADD;
428 }
429 op->src.reg = modrm_rm;
430 op->dest.type = OP_DEST_REG;
431 op->dest.reg = modrm_reg;
432 }
433 break;
434
435 case 0x8f:
436 /* pop to mem */
437 ADD_OP(op) {
438 op->src.type = OP_SRC_POP;
439 op->dest.type = OP_DEST_MEM;
440 }
441 break;
442
443 case 0x90:
444 *type = INSN_NOP;
445 break;
446
447 case 0x9c:
448 /* pushf */
449 ADD_OP(op) {
450 op->src.type = OP_SRC_CONST;
451 op->dest.type = OP_DEST_PUSHF;
452 }
453 break;
454
455 case 0x9d:
456 /* popf */
457 ADD_OP(op) {
458 op->src.type = OP_SRC_POPF;
459 op->dest.type = OP_DEST_MEM;
460 }
461 break;
462
463 case 0x0f:
464
465 if (op2 == 0x01) {
466
467 if (modrm == 0xca)
468 *type = INSN_CLAC;
469 else if (modrm == 0xcb)
470 *type = INSN_STAC;
471
472 } else if (op2 >= 0x80 && op2 <= 0x8f) {
473
474 *type = INSN_JUMP_CONDITIONAL;
475
476 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
477 op2 == 0x35) {
478
479 /* sysenter, sysret */
480 *type = INSN_CONTEXT_SWITCH;
481
482 } else if (op2 == 0x0b || op2 == 0xb9) {
483
484 /* ud2 */
485 *type = INSN_BUG;
486
487 } else if (op2 == 0x0d || op2 == 0x1f) {
488
489 /* nopl/nopw */
490 *type = INSN_NOP;
491
492 } else if (op2 == 0xa0 || op2 == 0xa8) {
493
494 /* push fs/gs */
495 ADD_OP(op) {
496 op->src.type = OP_SRC_CONST;
497 op->dest.type = OP_DEST_PUSH;
498 }
499
500 } else if (op2 == 0xa1 || op2 == 0xa9) {
501
502 /* pop fs/gs */
503 ADD_OP(op) {
504 op->src.type = OP_SRC_POP;
505 op->dest.type = OP_DEST_MEM;
506 }
507 }
508
509 break;
510
511 case 0xc9:
512 /*
513 * leave
514 *
515 * equivalent to:
516 * mov bp, sp
517 * pop bp
518 */
519 ADD_OP(op) {
520 op->src.type = OP_SRC_REG;
521 op->src.reg = CFI_BP;
522 op->dest.type = OP_DEST_REG;
523 op->dest.reg = CFI_SP;
524 }
525 ADD_OP(op) {
526 op->src.type = OP_SRC_POP;
527 op->dest.type = OP_DEST_REG;
528 op->dest.reg = CFI_BP;
529 }
530 break;
531
532 case 0xe3:
533 /* jecxz/jrcxz */
534 *type = INSN_JUMP_CONDITIONAL;
535 break;
536
537 case 0xe9:
538 case 0xeb:
539 *type = INSN_JUMP_UNCONDITIONAL;
540 break;
541
542 case 0xc2:
543 case 0xc3:
544 *type = INSN_RETURN;
545 break;
546
547 case 0xcf: /* iret */
548 /*
549 * Handle sync_core(), which has an IRET to self.
550 * All other IRET are in STT_NONE entry code.
551 */
552 sym = find_symbol_containing(sec, offset);
553 if (sym && sym->type == STT_FUNC) {
554 ADD_OP(op) {
555 /* add $40, %rsp */
556 op->src.type = OP_SRC_ADD;
557 op->src.reg = CFI_SP;
558 op->src.offset = 5*8;
559 op->dest.type = OP_DEST_REG;
560 op->dest.reg = CFI_SP;
561 }
562 break;
563 }
564
565 /* fallthrough */
566
567 case 0xca: /* retf */
568 case 0xcb: /* retf */
569 *type = INSN_CONTEXT_SWITCH;
570 break;
571
572 case 0xe8:
573 *type = INSN_CALL;
574 /*
575 * For the impact on the stack, a CALL behaves like
576 * a PUSH of an immediate value (the return address).
577 */
578 ADD_OP(op) {
579 op->src.type = OP_SRC_CONST;
580 op->dest.type = OP_DEST_PUSH;
581 }
582 break;
583
584 case 0xfc:
585 *type = INSN_CLD;
586 break;
587
588 case 0xfd:
589 *type = INSN_STD;
590 break;
591
592 case 0xff:
593 if (modrm_reg == 2 || modrm_reg == 3)
594
595 *type = INSN_CALL_DYNAMIC;
596
597 else if (modrm_reg == 4)
598
599 *type = INSN_JUMP_DYNAMIC;
600
601 else if (modrm_reg == 5)
602
603 /* jmpf */
604 *type = INSN_CONTEXT_SWITCH;
605
606 else if (modrm_reg == 6) {
607
608 /* push from mem */
609 ADD_OP(op) {
610 op->src.type = OP_SRC_CONST;
611 op->dest.type = OP_DEST_PUSH;
612 }
613 }
614
615 break;
616
617 default:
618 break;
619 }
620
621 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
622
623 return 0;
624}
625
626void arch_initial_func_cfi_state(struct cfi_init_state *state)
627{
628 int i;
629
630 for (i = 0; i < CFI_NUM_REGS; i++) {
631 state->regs[i].base = CFI_UNDEFINED;
632 state->regs[i].offset = 0;
633 }
634
635 /* initial CFA (call frame address) */
636 state->cfa.base = CFI_SP;
637 state->cfa.offset = 8;
638
639 /* initial RA (return address) */
640 state->regs[CFI_RA].base = CFI_CFA;
641 state->regs[CFI_RA].offset = -8;
642}
643
644const char *arch_nop_insn(int len)
645{
646 static const char nops[5][5] = {
647 { BYTES_NOP1 },
648 { BYTES_NOP2 },
649 { BYTES_NOP3 },
650 { BYTES_NOP4 },
651 { BYTES_NOP5 },
652 };
653
654 if (len < 1 || len > 5) {
655 WARN("invalid NOP size: %d\n", len);
656 return NULL;
657 }
658
659 return nops[len-1];
660}
661
662/* asm/alternative.h ? */
663
664#define ALTINSTR_FLAG_INV (1 << 15)
665#define ALT_NOT(feat) ((feat) | ALTINSTR_FLAG_INV)
666
667struct alt_instr {
668 s32 instr_offset; /* original instruction */
669 s32 repl_offset; /* offset to replacement instruction */
670 u16 cpuid; /* cpuid bit set for replacement */
671 u8 instrlen; /* length of original instruction */
672 u8 replacementlen; /* length of new instruction */
673} __packed;
674
675static int elf_add_alternative(struct elf *elf,
676 struct instruction *orig, struct symbol *sym,
677 int cpuid, u8 orig_len, u8 repl_len)
678{
679 const int size = sizeof(struct alt_instr);
680 struct alt_instr *alt;
681 struct section *sec;
682 Elf_Scn *s;
683
684 sec = find_section_by_name(elf, ".altinstructions");
685 if (!sec) {
686 sec = elf_create_section(elf, ".altinstructions",
687 SHF_ALLOC, 0, 0);
688
689 if (!sec) {
690 WARN_ELF("elf_create_section");
691 return -1;
692 }
693 }
694
695 s = elf_getscn(elf->elf, sec->idx);
696 if (!s) {
697 WARN_ELF("elf_getscn");
698 return -1;
699 }
700
701 sec->data = elf_newdata(s);
702 if (!sec->data) {
703 WARN_ELF("elf_newdata");
704 return -1;
705 }
706
707 sec->data->d_size = size;
708 sec->data->d_align = 1;
709
710 alt = sec->data->d_buf = malloc(size);
711 if (!sec->data->d_buf) {
712 perror("malloc");
713 return -1;
714 }
715 memset(sec->data->d_buf, 0, size);
716
717 if (elf_add_reloc_to_insn(elf, sec, sec->sh.sh_size,
718 R_X86_64_PC32, orig->sec, orig->offset)) {
719 WARN("elf_create_reloc: alt_instr::instr_offset");
720 return -1;
721 }
722
723 if (elf_add_reloc(elf, sec, sec->sh.sh_size + 4,
724 R_X86_64_PC32, sym, 0)) {
725 WARN("elf_create_reloc: alt_instr::repl_offset");
726 return -1;
727 }
728
729 alt->cpuid = bswap_if_needed(cpuid);
730 alt->instrlen = orig_len;
731 alt->replacementlen = repl_len;
732
733 sec->sh.sh_size += size;
734 sec->changed = true;
735
736 return 0;
737}
738
739#define X86_FEATURE_RETPOLINE ( 7*32+12)
740
741int arch_rewrite_retpolines(struct objtool_file *file)
742{
743 struct instruction *insn;
744 struct reloc *reloc;
745 struct symbol *sym;
746 char name[32] = "";
747
748 list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
749
750 if (insn->type != INSN_JUMP_DYNAMIC &&
751 insn->type != INSN_CALL_DYNAMIC)
752 continue;
753
754 if (!strcmp(insn->sec->name, ".text.__x86.indirect_thunk"))
755 continue;
756
757 reloc = insn->reloc;
758
759 sprintf(name, "__x86_indirect_alt_%s_%s",
760 insn->type == INSN_JUMP_DYNAMIC ? "jmp" : "call",
761 reloc->sym->name + 21);
762
763 sym = find_symbol_by_name(file->elf, name);
764 if (!sym) {
765 sym = elf_create_undef_symbol(file->elf, name);
766 if (!sym) {
767 WARN("elf_create_undef_symbol");
768 return -1;
769 }
770 }
771
772 if (elf_add_alternative(file->elf, insn, sym,
773 ALT_NOT(X86_FEATURE_RETPOLINE), 5, 5)) {
774 WARN("elf_add_alternative");
775 return -1;
776 }
777 }
778
779 return 0;
780}
781
782int arch_decode_hint_reg(struct instruction *insn, u8 sp_reg)
783{
784 struct cfi_reg *cfa = &insn->cfi.cfa;
785
786 switch (sp_reg) {
787 case ORC_REG_UNDEFINED:
788 cfa->base = CFI_UNDEFINED;
789 break;
790 case ORC_REG_SP:
791 cfa->base = CFI_SP;
792 break;
793 case ORC_REG_BP:
794 cfa->base = CFI_BP;
795 break;
796 case ORC_REG_SP_INDIRECT:
797 cfa->base = CFI_SP_INDIRECT;
798 break;
799 case ORC_REG_R10:
800 cfa->base = CFI_R10;
801 break;
802 case ORC_REG_R13:
803 cfa->base = CFI_R13;
804 break;
805 case ORC_REG_DI:
806 cfa->base = CFI_DI;
807 break;
808 case ORC_REG_DX:
809 cfa->base = CFI_DX;
810 break;
811 default:
812 return -1;
813 }
814
815 return 0;
816}
817
818bool arch_is_retpoline(struct symbol *sym)
819{
820 return !strncmp(sym->name, "__x86_indirect_", 15);
821}
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#include "../../check.h"
15#include "../../elf.h"
16#include "../../arch.h"
17#include "../../warn.h"
18
19static unsigned char op_to_cfi_reg[][2] = {
20 {CFI_AX, CFI_R8},
21 {CFI_CX, CFI_R9},
22 {CFI_DX, CFI_R10},
23 {CFI_BX, CFI_R11},
24 {CFI_SP, CFI_R12},
25 {CFI_BP, CFI_R13},
26 {CFI_SI, CFI_R14},
27 {CFI_DI, CFI_R15},
28};
29
30static int is_x86_64(const struct elf *elf)
31{
32 switch (elf->ehdr.e_machine) {
33 case EM_X86_64:
34 return 1;
35 case EM_386:
36 return 0;
37 default:
38 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
39 return -1;
40 }
41}
42
43bool arch_callee_saved_reg(unsigned char reg)
44{
45 switch (reg) {
46 case CFI_BP:
47 case CFI_BX:
48 case CFI_R12:
49 case CFI_R13:
50 case CFI_R14:
51 case CFI_R15:
52 return true;
53
54 case CFI_AX:
55 case CFI_CX:
56 case CFI_DX:
57 case CFI_SI:
58 case CFI_DI:
59 case CFI_SP:
60 case CFI_R8:
61 case CFI_R9:
62 case CFI_R10:
63 case CFI_R11:
64 case CFI_RA:
65 default:
66 return false;
67 }
68}
69
70unsigned long arch_dest_reloc_offset(int addend)
71{
72 return addend + 4;
73}
74
75unsigned long arch_jump_destination(struct instruction *insn)
76{
77 return insn->offset + insn->len + insn->immediate;
78}
79
80#define ADD_OP(op) \
81 if (!(op = calloc(1, sizeof(*op)))) \
82 return -1; \
83 else for (list_add_tail(&op->list, ops_list); op; op = NULL)
84
85int arch_decode_instruction(const struct elf *elf, const struct section *sec,
86 unsigned long offset, unsigned int maxlen,
87 unsigned int *len, enum insn_type *type,
88 unsigned long *immediate,
89 struct list_head *ops_list)
90{
91 struct insn insn;
92 int x86_64, sign;
93 unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0,
94 rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0,
95 modrm_reg = 0, sib = 0;
96 struct stack_op *op = NULL;
97 struct symbol *sym;
98
99 x86_64 = is_x86_64(elf);
100 if (x86_64 == -1)
101 return -1;
102
103 insn_init(&insn, sec->data->d_buf + offset, maxlen, x86_64);
104 insn_get_length(&insn);
105
106 if (!insn_complete(&insn)) {
107 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
108 return -1;
109 }
110
111 *len = insn.length;
112 *type = INSN_OTHER;
113
114 if (insn.vex_prefix.nbytes)
115 return 0;
116
117 op1 = insn.opcode.bytes[0];
118 op2 = insn.opcode.bytes[1];
119
120 if (insn.rex_prefix.nbytes) {
121 rex = insn.rex_prefix.bytes[0];
122 rex_w = X86_REX_W(rex) >> 3;
123 rex_r = X86_REX_R(rex) >> 2;
124 rex_x = X86_REX_X(rex) >> 1;
125 rex_b = X86_REX_B(rex);
126 }
127
128 if (insn.modrm.nbytes) {
129 modrm = insn.modrm.bytes[0];
130 modrm_mod = X86_MODRM_MOD(modrm);
131 modrm_reg = X86_MODRM_REG(modrm);
132 modrm_rm = X86_MODRM_RM(modrm);
133 }
134
135 if (insn.sib.nbytes)
136 sib = insn.sib.bytes[0];
137
138 switch (op1) {
139
140 case 0x1:
141 case 0x29:
142 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
143
144 /* add/sub reg, %rsp */
145 ADD_OP(op) {
146 op->src.type = OP_SRC_ADD;
147 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
148 op->dest.type = OP_DEST_REG;
149 op->dest.reg = CFI_SP;
150 }
151 }
152 break;
153
154 case 0x50 ... 0x57:
155
156 /* push reg */
157 ADD_OP(op) {
158 op->src.type = OP_SRC_REG;
159 op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
160 op->dest.type = OP_DEST_PUSH;
161 }
162
163 break;
164
165 case 0x58 ... 0x5f:
166
167 /* pop reg */
168 ADD_OP(op) {
169 op->src.type = OP_SRC_POP;
170 op->dest.type = OP_DEST_REG;
171 op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
172 }
173
174 break;
175
176 case 0x68:
177 case 0x6a:
178 /* push immediate */
179 ADD_OP(op) {
180 op->src.type = OP_SRC_CONST;
181 op->dest.type = OP_DEST_PUSH;
182 }
183 break;
184
185 case 0x70 ... 0x7f:
186 *type = INSN_JUMP_CONDITIONAL;
187 break;
188
189 case 0x81:
190 case 0x83:
191 if (rex != 0x48)
192 break;
193
194 if (modrm == 0xe4) {
195 /* and imm, %rsp */
196 ADD_OP(op) {
197 op->src.type = OP_SRC_AND;
198 op->src.reg = CFI_SP;
199 op->src.offset = insn.immediate.value;
200 op->dest.type = OP_DEST_REG;
201 op->dest.reg = CFI_SP;
202 }
203 break;
204 }
205
206 if (modrm == 0xc4)
207 sign = 1;
208 else if (modrm == 0xec)
209 sign = -1;
210 else
211 break;
212
213 /* add/sub imm, %rsp */
214 ADD_OP(op) {
215 op->src.type = OP_SRC_ADD;
216 op->src.reg = CFI_SP;
217 op->src.offset = insn.immediate.value * sign;
218 op->dest.type = OP_DEST_REG;
219 op->dest.reg = CFI_SP;
220 }
221 break;
222
223 case 0x89:
224 if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) {
225
226 /* mov %rsp, reg */
227 ADD_OP(op) {
228 op->src.type = OP_SRC_REG;
229 op->src.reg = CFI_SP;
230 op->dest.type = OP_DEST_REG;
231 op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
232 }
233 break;
234 }
235
236 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
237
238 /* mov reg, %rsp */
239 ADD_OP(op) {
240 op->src.type = OP_SRC_REG;
241 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
242 op->dest.type = OP_DEST_REG;
243 op->dest.reg = CFI_SP;
244 }
245 break;
246 }
247
248 /* fallthrough */
249 case 0x88:
250 if (!rex_b &&
251 (modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) {
252
253 /* mov reg, disp(%rbp) */
254 ADD_OP(op) {
255 op->src.type = OP_SRC_REG;
256 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
257 op->dest.type = OP_DEST_REG_INDIRECT;
258 op->dest.reg = CFI_BP;
259 op->dest.offset = insn.displacement.value;
260 }
261
262 } else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) {
263
264 /* mov reg, disp(%rsp) */
265 ADD_OP(op) {
266 op->src.type = OP_SRC_REG;
267 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
268 op->dest.type = OP_DEST_REG_INDIRECT;
269 op->dest.reg = CFI_SP;
270 op->dest.offset = insn.displacement.value;
271 }
272 }
273
274 break;
275
276 case 0x8b:
277 if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) {
278
279 /* mov disp(%rbp), reg */
280 ADD_OP(op) {
281 op->src.type = OP_SRC_REG_INDIRECT;
282 op->src.reg = CFI_BP;
283 op->src.offset = insn.displacement.value;
284 op->dest.type = OP_DEST_REG;
285 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
286 }
287
288 } else if (rex_w && !rex_b && sib == 0x24 &&
289 modrm_mod != 3 && modrm_rm == 4) {
290
291 /* mov disp(%rsp), reg */
292 ADD_OP(op) {
293 op->src.type = OP_SRC_REG_INDIRECT;
294 op->src.reg = CFI_SP;
295 op->src.offset = insn.displacement.value;
296 op->dest.type = OP_DEST_REG;
297 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
298 }
299 }
300
301 break;
302
303 case 0x8d:
304 if (sib == 0x24 && rex_w && !rex_b && !rex_x) {
305
306 ADD_OP(op) {
307 if (!insn.displacement.value) {
308 /* lea (%rsp), reg */
309 op->src.type = OP_SRC_REG;
310 } else {
311 /* lea disp(%rsp), reg */
312 op->src.type = OP_SRC_ADD;
313 op->src.offset = insn.displacement.value;
314 }
315 op->src.reg = CFI_SP;
316 op->dest.type = OP_DEST_REG;
317 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
318 }
319
320 } else if (rex == 0x48 && modrm == 0x65) {
321
322 /* lea disp(%rbp), %rsp */
323 ADD_OP(op) {
324 op->src.type = OP_SRC_ADD;
325 op->src.reg = CFI_BP;
326 op->src.offset = insn.displacement.value;
327 op->dest.type = OP_DEST_REG;
328 op->dest.reg = CFI_SP;
329 }
330
331 } else if (rex == 0x49 && modrm == 0x62 &&
332 insn.displacement.value == -8) {
333
334 /*
335 * lea -0x8(%r10), %rsp
336 *
337 * Restoring rsp back to its original value after a
338 * stack realignment.
339 */
340 ADD_OP(op) {
341 op->src.type = OP_SRC_ADD;
342 op->src.reg = CFI_R10;
343 op->src.offset = -8;
344 op->dest.type = OP_DEST_REG;
345 op->dest.reg = CFI_SP;
346 }
347
348 } else if (rex == 0x49 && modrm == 0x65 &&
349 insn.displacement.value == -16) {
350
351 /*
352 * lea -0x10(%r13), %rsp
353 *
354 * Restoring rsp back to its original value after a
355 * stack realignment.
356 */
357 ADD_OP(op) {
358 op->src.type = OP_SRC_ADD;
359 op->src.reg = CFI_R13;
360 op->src.offset = -16;
361 op->dest.type = OP_DEST_REG;
362 op->dest.reg = CFI_SP;
363 }
364 }
365
366 break;
367
368 case 0x8f:
369 /* pop to mem */
370 ADD_OP(op) {
371 op->src.type = OP_SRC_POP;
372 op->dest.type = OP_DEST_MEM;
373 }
374 break;
375
376 case 0x90:
377 *type = INSN_NOP;
378 break;
379
380 case 0x9c:
381 /* pushf */
382 ADD_OP(op) {
383 op->src.type = OP_SRC_CONST;
384 op->dest.type = OP_DEST_PUSHF;
385 }
386 break;
387
388 case 0x9d:
389 /* popf */
390 ADD_OP(op) {
391 op->src.type = OP_SRC_POPF;
392 op->dest.type = OP_DEST_MEM;
393 }
394 break;
395
396 case 0x0f:
397
398 if (op2 == 0x01) {
399
400 if (modrm == 0xca)
401 *type = INSN_CLAC;
402 else if (modrm == 0xcb)
403 *type = INSN_STAC;
404
405 } else if (op2 >= 0x80 && op2 <= 0x8f) {
406
407 *type = INSN_JUMP_CONDITIONAL;
408
409 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
410 op2 == 0x35) {
411
412 /* sysenter, sysret */
413 *type = INSN_CONTEXT_SWITCH;
414
415 } else if (op2 == 0x0b || op2 == 0xb9) {
416
417 /* ud2 */
418 *type = INSN_BUG;
419
420 } else if (op2 == 0x0d || op2 == 0x1f) {
421
422 /* nopl/nopw */
423 *type = INSN_NOP;
424
425 } else if (op2 == 0xa0 || op2 == 0xa8) {
426
427 /* push fs/gs */
428 ADD_OP(op) {
429 op->src.type = OP_SRC_CONST;
430 op->dest.type = OP_DEST_PUSH;
431 }
432
433 } else if (op2 == 0xa1 || op2 == 0xa9) {
434
435 /* pop fs/gs */
436 ADD_OP(op) {
437 op->src.type = OP_SRC_POP;
438 op->dest.type = OP_DEST_MEM;
439 }
440 }
441
442 break;
443
444 case 0xc9:
445 /*
446 * leave
447 *
448 * equivalent to:
449 * mov bp, sp
450 * pop bp
451 */
452 ADD_OP(op)
453 op->dest.type = OP_DEST_LEAVE;
454
455 break;
456
457 case 0xe3:
458 /* jecxz/jrcxz */
459 *type = INSN_JUMP_CONDITIONAL;
460 break;
461
462 case 0xe9:
463 case 0xeb:
464 *type = INSN_JUMP_UNCONDITIONAL;
465 break;
466
467 case 0xc2:
468 case 0xc3:
469 *type = INSN_RETURN;
470 break;
471
472 case 0xcf: /* iret */
473 /*
474 * Handle sync_core(), which has an IRET to self.
475 * All other IRET are in STT_NONE entry code.
476 */
477 sym = find_symbol_containing(sec, offset);
478 if (sym && sym->type == STT_FUNC) {
479 ADD_OP(op) {
480 /* add $40, %rsp */
481 op->src.type = OP_SRC_ADD;
482 op->src.reg = CFI_SP;
483 op->src.offset = 5*8;
484 op->dest.type = OP_DEST_REG;
485 op->dest.reg = CFI_SP;
486 }
487 break;
488 }
489
490 /* fallthrough */
491
492 case 0xca: /* retf */
493 case 0xcb: /* retf */
494 *type = INSN_CONTEXT_SWITCH;
495 break;
496
497 case 0xe8:
498 *type = INSN_CALL;
499 /*
500 * For the impact on the stack, a CALL behaves like
501 * a PUSH of an immediate value (the return address).
502 */
503 ADD_OP(op) {
504 op->src.type = OP_SRC_CONST;
505 op->dest.type = OP_DEST_PUSH;
506 }
507 break;
508
509 case 0xfc:
510 *type = INSN_CLD;
511 break;
512
513 case 0xfd:
514 *type = INSN_STD;
515 break;
516
517 case 0xff:
518 if (modrm_reg == 2 || modrm_reg == 3)
519
520 *type = INSN_CALL_DYNAMIC;
521
522 else if (modrm_reg == 4)
523
524 *type = INSN_JUMP_DYNAMIC;
525
526 else if (modrm_reg == 5)
527
528 /* jmpf */
529 *type = INSN_CONTEXT_SWITCH;
530
531 else if (modrm_reg == 6) {
532
533 /* push from mem */
534 ADD_OP(op) {
535 op->src.type = OP_SRC_CONST;
536 op->dest.type = OP_DEST_PUSH;
537 }
538 }
539
540 break;
541
542 default:
543 break;
544 }
545
546 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
547
548 return 0;
549}
550
551void arch_initial_func_cfi_state(struct cfi_init_state *state)
552{
553 int i;
554
555 for (i = 0; i < CFI_NUM_REGS; i++) {
556 state->regs[i].base = CFI_UNDEFINED;
557 state->regs[i].offset = 0;
558 }
559
560 /* initial CFA (call frame address) */
561 state->cfa.base = CFI_SP;
562 state->cfa.offset = 8;
563
564 /* initial RA (return address) */
565 state->regs[16].base = CFI_CFA;
566 state->regs[16].offset = -8;
567}
568
569const char *arch_nop_insn(int len)
570{
571 static const char nops[5][5] = {
572 /* 1 */ { 0x90 },
573 /* 2 */ { 0x66, 0x90 },
574 /* 3 */ { 0x0f, 0x1f, 0x00 },
575 /* 4 */ { 0x0f, 0x1f, 0x40, 0x00 },
576 /* 5 */ { 0x0f, 0x1f, 0x44, 0x00, 0x00 },
577 };
578
579 if (len < 1 || len > 5) {
580 WARN("invalid NOP size: %d\n", len);
581 return NULL;
582 }
583
584 return nops[len-1];
585}