Linux Audio

Check our new training course

Loading...
v6.8
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
 
 
 
 
 
 
 
 
 
 
 
 
 
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <objtool/builtin.h>
 24#include <arch/elf.h>
 25
 26int arch_ftrace_match(char *name)
 27{
 28	return !strcmp(name, "__fentry__");
 29}
 30
 31static int is_x86_64(const struct elf *elf)
 32{
 33	switch (elf->ehdr.e_machine) {
 34	case EM_X86_64:
 35		return 1;
 36	case EM_386:
 37		return 0;
 38	default:
 39		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 40		return -1;
 41	}
 42}
 43
 44bool arch_callee_saved_reg(unsigned char reg)
 45{
 46	switch (reg) {
 47	case CFI_BP:
 48	case CFI_BX:
 49	case CFI_R12:
 50	case CFI_R13:
 51	case CFI_R14:
 52	case CFI_R15:
 53		return true;
 54
 55	case CFI_AX:
 56	case CFI_CX:
 57	case CFI_DX:
 58	case CFI_SI:
 59	case CFI_DI:
 60	case CFI_SP:
 61	case CFI_R8:
 62	case CFI_R9:
 63	case CFI_R10:
 64	case CFI_R11:
 65	case CFI_RA:
 66	default:
 67		return false;
 68	}
 69}
 70
 71unsigned long arch_dest_reloc_offset(int addend)
 72{
 73	return addend + 4;
 74}
 75
 76unsigned long arch_jump_destination(struct instruction *insn)
 77{
 78	return insn->offset + insn->len + insn->immediate;
 79}
 80
 81bool arch_pc_relative_reloc(struct reloc *reloc)
 82{
 83	/*
 84	 * All relocation types where P (the address of the target)
 85	 * is included in the computation.
 86	 */
 87	switch (reloc_type(reloc)) {
 88	case R_X86_64_PC8:
 89	case R_X86_64_PC16:
 90	case R_X86_64_PC32:
 91	case R_X86_64_PC64:
 92
 93	case R_X86_64_PLT32:
 94	case R_X86_64_GOTPC32:
 95	case R_X86_64_GOTPCREL:
 96		return true;
 97
 98	default:
 99		break;
100	}
101
102	return false;
103}
104
105#define ADD_OP(op) \
106	if (!(op = calloc(1, sizeof(*op)))) \
107		return -1; \
108	else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
114 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
117 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
118 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
119 * 11 |                   r/ m               |
120 */
121
122#define mod_is_mem()	(modrm_mod != 3)
123#define mod_is_reg()	(modrm_mod == 3)
124
125#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128#define rm_is(reg) (have_SIB() ? \
129		    sib_base == (reg) && sib_index == CFI_SP : \
130		    modrm_rm == (reg))
131
132#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
133#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
134
135static bool has_notrack_prefix(struct insn *insn)
136{
137	int i;
138
139	for (i = 0; i < insn->prefixes.nbytes; i++) {
140		if (insn->prefixes.bytes[i] == 0x3e)
141			return true;
142	}
143
144	return false;
145}
146
147int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148			    unsigned long offset, unsigned int maxlen,
149			    struct instruction *insn)
 
150{
151	struct stack_op **ops_list = &insn->stack_ops;
152	const struct elf *elf = file->elf;
153	struct insn ins;
154	int x86_64, ret;
155	unsigned char op1, op2, op3, prefix,
156		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
157		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
158		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
159	struct stack_op *op = NULL;
160	struct symbol *sym;
161	u64 imm;
162
163	x86_64 = is_x86_64(elf);
164	if (x86_64 == -1)
165		return -1;
166
167	ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
168			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
169	if (ret < 0) {
170		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
 
 
 
 
171		return -1;
172	}
173
174	insn->len = ins.length;
175	insn->type = INSN_OTHER;
176
177	if (ins.vex_prefix.nbytes)
178		return 0;
179
180	prefix = ins.prefixes.bytes[0];
181
182	op1 = ins.opcode.bytes[0];
183	op2 = ins.opcode.bytes[1];
184	op3 = ins.opcode.bytes[2];
185
186	if (ins.rex_prefix.nbytes) {
187		rex = ins.rex_prefix.bytes[0];
188		rex_w = X86_REX_W(rex) >> 3;
189		rex_r = X86_REX_R(rex) >> 2;
190		rex_x = X86_REX_X(rex) >> 1;
191		rex_b = X86_REX_B(rex);
192	}
193
194	if (ins.modrm.nbytes) {
195		modrm = ins.modrm.bytes[0];
196		modrm_mod = X86_MODRM_MOD(modrm);
197		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
198		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
199	}
200
201	if (ins.sib.nbytes) {
202		sib = ins.sib.bytes[0];
203		/* sib_scale = X86_SIB_SCALE(sib); */
204		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
205		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
206	}
207
208	switch (op1) {
209
210	case 0x1:
211	case 0x29:
212		if (rex_w && rm_is_reg(CFI_SP)) {
213
214			/* add/sub reg, %rsp */
215			ADD_OP(op) {
216				op->src.type = OP_SRC_ADD;
217				op->src.reg = modrm_reg;
218				op->dest.type = OP_DEST_REG;
219				op->dest.reg = CFI_SP;
220			}
221		}
222		break;
223
224	case 0x50 ... 0x57:
225
226		/* push reg */
227		ADD_OP(op) {
228			op->src.type = OP_SRC_REG;
229			op->src.reg = (op1 & 0x7) + 8*rex_b;
230			op->dest.type = OP_DEST_PUSH;
231		}
232
233		break;
234
235	case 0x58 ... 0x5f:
236
237		/* pop reg */
238		ADD_OP(op) {
239			op->src.type = OP_SRC_POP;
240			op->dest.type = OP_DEST_REG;
241			op->dest.reg = (op1 & 0x7) + 8*rex_b;
242		}
243
244		break;
245
246	case 0x68:
247	case 0x6a:
248		/* push immediate */
249		ADD_OP(op) {
250			op->src.type = OP_SRC_CONST;
251			op->dest.type = OP_DEST_PUSH;
252		}
253		break;
254
255	case 0x70 ... 0x7f:
256		insn->type = INSN_JUMP_CONDITIONAL;
257		break;
258
259	case 0x80 ... 0x83:
260		/*
261		 * 1000 00sw : mod OP r/m : immediate
262		 *
263		 * s - sign extend immediate
264		 * w - imm8 / imm32
265		 *
266		 * OP: 000 ADD    100 AND
267		 *     001 OR     101 SUB
268		 *     010 ADC    110 XOR
269		 *     011 SBB    111 CMP
270		 */
271
272		/* 64bit only */
273		if (!rex_w)
274			break;
275
276		/* %rsp target only */
277		if (!rm_is_reg(CFI_SP))
278			break;
279
280		imm = ins.immediate.value;
281		if (op1 & 2) { /* sign extend */
282			if (op1 & 1) { /* imm32 */
283				imm <<= 32;
284				imm = (s64)imm >> 32;
285			} else { /* imm8 */
286				imm <<= 56;
287				imm = (s64)imm >> 56;
288			}
289		}
290
291		switch (modrm_reg & 7) {
292		case 5:
293			imm = -imm;
294			fallthrough;
295		case 0:
296			/* add/sub imm, %rsp */
297			ADD_OP(op) {
298				op->src.type = OP_SRC_ADD;
299				op->src.reg = CFI_SP;
300				op->src.offset = imm;
301				op->dest.type = OP_DEST_REG;
302				op->dest.reg = CFI_SP;
303			}
304			break;
305
306		case 4:
307			/* and imm, %rsp */
308			ADD_OP(op) {
309				op->src.type = OP_SRC_AND;
310				op->src.reg = CFI_SP;
311				op->src.offset = ins.immediate.value;
312				op->dest.type = OP_DEST_REG;
313				op->dest.reg = CFI_SP;
314			}
315			break;
316
317		default:
318			/* WARN ? */
319			break;
320		}
321
322		break;
323
324	case 0x89:
325		if (!rex_w)
326			break;
327
328		if (modrm_reg == CFI_SP) {
329
330			if (mod_is_reg()) {
331				/* mov %rsp, reg */
332				ADD_OP(op) {
333					op->src.type = OP_SRC_REG;
334					op->src.reg = CFI_SP;
335					op->dest.type = OP_DEST_REG;
336					op->dest.reg = modrm_rm;
337				}
338				break;
339
340			} else {
341				/* skip RIP relative displacement */
342				if (is_RIP())
343					break;
344
345				/* skip nontrivial SIB */
346				if (have_SIB()) {
347					modrm_rm = sib_base;
348					if (sib_index != CFI_SP)
349						break;
350				}
351
352				/* mov %rsp, disp(%reg) */
353				ADD_OP(op) {
354					op->src.type = OP_SRC_REG;
355					op->src.reg = CFI_SP;
356					op->dest.type = OP_DEST_REG_INDIRECT;
357					op->dest.reg = modrm_rm;
358					op->dest.offset = ins.displacement.value;
359				}
360				break;
361			}
362
363			break;
364		}
365
366		if (rm_is_reg(CFI_SP)) {
367
368			/* mov reg, %rsp */
369			ADD_OP(op) {
370				op->src.type = OP_SRC_REG;
371				op->src.reg = modrm_reg;
372				op->dest.type = OP_DEST_REG;
373				op->dest.reg = CFI_SP;
374			}
375			break;
376		}
377
378		fallthrough;
379	case 0x88:
380		if (!rex_w)
381			break;
382
383		if (rm_is_mem(CFI_BP)) {
384
385			/* mov reg, disp(%rbp) */
386			ADD_OP(op) {
387				op->src.type = OP_SRC_REG;
388				op->src.reg = modrm_reg;
389				op->dest.type = OP_DEST_REG_INDIRECT;
390				op->dest.reg = CFI_BP;
391				op->dest.offset = ins.displacement.value;
392			}
393			break;
394		}
395
396		if (rm_is_mem(CFI_SP)) {
397
398			/* mov reg, disp(%rsp) */
399			ADD_OP(op) {
400				op->src.type = OP_SRC_REG;
401				op->src.reg = modrm_reg;
402				op->dest.type = OP_DEST_REG_INDIRECT;
403				op->dest.reg = CFI_SP;
404				op->dest.offset = ins.displacement.value;
405			}
406			break;
407		}
408
409		break;
410
411	case 0x8b:
412		if (!rex_w)
413			break;
414
415		if (rm_is_mem(CFI_BP)) {
416
417			/* mov disp(%rbp), reg */
418			ADD_OP(op) {
419				op->src.type = OP_SRC_REG_INDIRECT;
420				op->src.reg = CFI_BP;
421				op->src.offset = ins.displacement.value;
422				op->dest.type = OP_DEST_REG;
423				op->dest.reg = modrm_reg;
424			}
425			break;
426		}
427
428		if (rm_is_mem(CFI_SP)) {
429
430			/* mov disp(%rsp), reg */
431			ADD_OP(op) {
432				op->src.type = OP_SRC_REG_INDIRECT;
433				op->src.reg = CFI_SP;
434				op->src.offset = ins.displacement.value;
435				op->dest.type = OP_DEST_REG;
436				op->dest.reg = modrm_reg;
437			}
438			break;
439		}
440
441		break;
442
443	case 0x8d:
444		if (mod_is_reg()) {
445			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
446			break;
447		}
448
449		/* skip non 64bit ops */
450		if (!rex_w)
451			break;
452
453		/* skip RIP relative displacement */
454		if (is_RIP())
455			break;
456
457		/* skip nontrivial SIB */
458		if (have_SIB()) {
459			modrm_rm = sib_base;
460			if (sib_index != CFI_SP)
461				break;
462		}
463
464		/* lea disp(%src), %dst */
465		ADD_OP(op) {
466			op->src.offset = ins.displacement.value;
467			if (!op->src.offset) {
468				/* lea (%src), %dst */
469				op->src.type = OP_SRC_REG;
470			} else {
471				/* lea disp(%src), %dst */
472				op->src.type = OP_SRC_ADD;
473			}
474			op->src.reg = modrm_rm;
475			op->dest.type = OP_DEST_REG;
476			op->dest.reg = modrm_reg;
477		}
478		break;
479
480	case 0x8f:
481		/* pop to mem */
482		ADD_OP(op) {
483			op->src.type = OP_SRC_POP;
484			op->dest.type = OP_DEST_MEM;
485		}
486		break;
487
488	case 0x90:
489		insn->type = INSN_NOP;
490		break;
491
492	case 0x9c:
493		/* pushf */
494		ADD_OP(op) {
495			op->src.type = OP_SRC_CONST;
496			op->dest.type = OP_DEST_PUSHF;
497		}
498		break;
499
500	case 0x9d:
501		/* popf */
502		ADD_OP(op) {
503			op->src.type = OP_SRC_POPF;
504			op->dest.type = OP_DEST_MEM;
505		}
506		break;
507
508	case 0x0f:
509
510		if (op2 == 0x01) {
511
512			if (modrm == 0xca)
513				insn->type = INSN_CLAC;
514			else if (modrm == 0xcb)
515				insn->type = INSN_STAC;
516
517		} else if (op2 >= 0x80 && op2 <= 0x8f) {
518
519			insn->type = INSN_JUMP_CONDITIONAL;
520
521		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
522			   op2 == 0x35) {
523
524			/* sysenter, sysret */
525			insn->type = INSN_CONTEXT_SWITCH;
526
527		} else if (op2 == 0x0b || op2 == 0xb9) {
528
529			/* ud2 */
530			insn->type = INSN_BUG;
531
532		} else if (op2 == 0x0d || op2 == 0x1f) {
533
534			/* nopl/nopw */
535			insn->type = INSN_NOP;
536
537		} else if (op2 == 0x1e) {
 
 
 
538
539			if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
540				insn->type = INSN_ENDBR;
541
 
 
 
542
543		} else if (op2 == 0x38 && op3 == 0xf8) {
544			if (ins.prefixes.nbytes == 1 &&
545			    ins.prefixes.bytes[0] == 0xf2) {
546				/* ENQCMD cannot be used in the kernel. */
547				WARN("ENQCMD instruction at %s:%lx", sec->name,
548				     offset);
549			}
550
551		} else if (op2 == 0xa0 || op2 == 0xa8) {
552
553			/* push fs/gs */
554			ADD_OP(op) {
555				op->src.type = OP_SRC_CONST;
556				op->dest.type = OP_DEST_PUSH;
557			}
558
559		} else if (op2 == 0xa1 || op2 == 0xa9) {
560
561			/* pop fs/gs */
562			ADD_OP(op) {
563				op->src.type = OP_SRC_POP;
564				op->dest.type = OP_DEST_MEM;
565			}
566		}
567
568		break;
569
570	case 0xc9:
571		/*
572		 * leave
573		 *
574		 * equivalent to:
575		 * mov bp, sp
576		 * pop bp
577		 */
578		ADD_OP(op) {
579			op->src.type = OP_SRC_REG;
580			op->src.reg = CFI_BP;
581			op->dest.type = OP_DEST_REG;
582			op->dest.reg = CFI_SP;
583		}
584		ADD_OP(op) {
585			op->src.type = OP_SRC_POP;
586			op->dest.type = OP_DEST_REG;
587			op->dest.reg = CFI_BP;
588		}
589		break;
590
591	case 0xcc:
592		/* int3 */
593		insn->type = INSN_TRAP;
594		break;
595
596	case 0xe3:
597		/* jecxz/jrcxz */
598		insn->type = INSN_JUMP_CONDITIONAL;
599		break;
600
601	case 0xe9:
602	case 0xeb:
603		insn->type = INSN_JUMP_UNCONDITIONAL;
604		break;
605
606	case 0xc2:
607	case 0xc3:
608		insn->type = INSN_RETURN;
609		break;
610
611	case 0xc7: /* mov imm, r/m */
612		if (!opts.noinstr)
613			break;
614
615		if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
616			struct reloc *immr, *disp;
617			struct symbol *func;
618			int idx;
619
620			immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
621			disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
622
623			if (!immr || strcmp(immr->sym->name, "pv_ops"))
624				break;
625
626			idx = (reloc_addend(immr) + 8) / sizeof(void *);
627
628			func = disp->sym;
629			if (disp->sym->type == STT_SECTION)
630				func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
631			if (!func) {
632				WARN("no func for pv_ops[]");
633				return -1;
634			}
635
636			objtool_pv_add(file, idx, func);
637		}
638
639		break;
640
641	case 0xcf: /* iret */
642		/*
643		 * Handle sync_core(), which has an IRET to self.
644		 * All other IRET are in STT_NONE entry code.
645		 */
646		sym = find_symbol_containing(sec, offset);
647		if (sym && sym->type == STT_FUNC) {
648			ADD_OP(op) {
649				/* add $40, %rsp */
650				op->src.type = OP_SRC_ADD;
651				op->src.reg = CFI_SP;
652				op->src.offset = 5*8;
653				op->dest.type = OP_DEST_REG;
654				op->dest.reg = CFI_SP;
655			}
656			break;
657		}
658
659		fallthrough;
660
661	case 0xca: /* retf */
662	case 0xcb: /* retf */
663		insn->type = INSN_CONTEXT_SWITCH;
664		break;
665
666	case 0xe0: /* loopne */
667	case 0xe1: /* loope */
668	case 0xe2: /* loop */
669		insn->type = INSN_JUMP_CONDITIONAL;
670		break;
671
672	case 0xe8:
673		insn->type = INSN_CALL;
674		/*
675		 * For the impact on the stack, a CALL behaves like
676		 * a PUSH of an immediate value (the return address).
677		 */
678		ADD_OP(op) {
679			op->src.type = OP_SRC_CONST;
680			op->dest.type = OP_DEST_PUSH;
681		}
682		break;
683
684	case 0xfc:
685		insn->type = INSN_CLD;
686		break;
687
688	case 0xfd:
689		insn->type = INSN_STD;
690		break;
691
692	case 0xff:
693		if (modrm_reg == 2 || modrm_reg == 3) {
694
695			insn->type = INSN_CALL_DYNAMIC;
696			if (has_notrack_prefix(&ins))
697				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
698
699		} else if (modrm_reg == 4) {
700
701			insn->type = INSN_JUMP_DYNAMIC;
702			if (has_notrack_prefix(&ins))
703				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
704
705		} else if (modrm_reg == 5) {
706
707			/* jmpf */
708			insn->type = INSN_CONTEXT_SWITCH;
709
710		} else if (modrm_reg == 6) {
711
712			/* push from mem */
713			ADD_OP(op) {
714				op->src.type = OP_SRC_CONST;
715				op->dest.type = OP_DEST_PUSH;
716			}
717		}
718
719		break;
720
721	default:
722		break;
723	}
724
725	insn->immediate = ins.immediate.nbytes ? ins.immediate.value : 0;
726
727	return 0;
728}
729
730void arch_initial_func_cfi_state(struct cfi_init_state *state)
731{
732	int i;
733
734	for (i = 0; i < CFI_NUM_REGS; i++) {
735		state->regs[i].base = CFI_UNDEFINED;
736		state->regs[i].offset = 0;
737	}
738
739	/* initial CFA (call frame address) */
740	state->cfa.base = CFI_SP;
741	state->cfa.offset = 8;
742
743	/* initial RA (return address) */
744	state->regs[CFI_RA].base = CFI_CFA;
745	state->regs[CFI_RA].offset = -8;
746}
747
748const char *arch_nop_insn(int len)
749{
750	static const char nops[5][5] = {
751		{ BYTES_NOP1 },
752		{ BYTES_NOP2 },
753		{ BYTES_NOP3 },
754		{ BYTES_NOP4 },
755		{ BYTES_NOP5 },
756	};
757
758	if (len < 1 || len > 5) {
759		WARN("invalid NOP size: %d\n", len);
760		return NULL;
761	}
762
763	return nops[len-1];
764}
765
766#define BYTE_RET	0xC3
767
768const char *arch_ret_insn(int len)
769{
770	static const char ret[5][5] = {
771		{ BYTE_RET },
772		{ BYTE_RET, 0xcc },
773		{ BYTE_RET, 0xcc, BYTES_NOP1 },
774		{ BYTE_RET, 0xcc, BYTES_NOP2 },
775		{ BYTE_RET, 0xcc, BYTES_NOP3 },
776	};
777
778	if (len < 1 || len > 5) {
779		WARN("invalid RET size: %d\n", len);
780		return NULL;
781	}
782
783	return ret[len-1];
784}
785
786int arch_decode_hint_reg(u8 sp_reg, int *base)
787{
788	switch (sp_reg) {
789	case ORC_REG_UNDEFINED:
790		*base = CFI_UNDEFINED;
791		break;
792	case ORC_REG_SP:
793		*base = CFI_SP;
794		break;
795	case ORC_REG_BP:
796		*base = CFI_BP;
797		break;
798	case ORC_REG_SP_INDIRECT:
799		*base = CFI_SP_INDIRECT;
800		break;
801	case ORC_REG_R10:
802		*base = CFI_R10;
803		break;
804	case ORC_REG_R13:
805		*base = CFI_R13;
806		break;
807	case ORC_REG_DI:
808		*base = CFI_DI;
809		break;
810	case ORC_REG_DX:
811		*base = CFI_DX;
812		break;
813	default:
814		return -1;
815	}
816
817	return 0;
818}
819
820bool arch_is_retpoline(struct symbol *sym)
821{
822	return !strncmp(sym->name, "__x86_indirect_", 15);
823}
824
825bool arch_is_rethunk(struct symbol *sym)
826{
827	return !strcmp(sym->name, "__x86_return_thunk");
828}
829
830bool arch_is_embedded_insn(struct symbol *sym)
831{
832	return !strcmp(sym->name, "retbleed_return_thunk") ||
833	       !strcmp(sym->name, "srso_safe_ret");
834}
v4.10.11
 
  1/*
  2 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  3 *
  4 * This program is free software; you can redistribute it and/or
  5 * modify it under the terms of the GNU General Public License
  6 * as published by the Free Software Foundation; either version 2
  7 * of the License, or (at your option) any later version.
  8 *
  9 * This program is distributed in the hope that it will be useful,
 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 12 * GNU General Public License for more details.
 13 *
 14 * You should have received a copy of the GNU General Public License
 15 * along with this program; if not, see <http://www.gnu.org/licenses/>.
 16 */
 17
 18#include <stdio.h>
 19#include <stdlib.h>
 20
 21#define unlikely(cond) (cond)
 22#include "insn/insn.h"
 23#include "insn/inat.c"
 24#include "insn/insn.c"
 25
 26#include "../../elf.h"
 27#include "../../arch.h"
 28#include "../../warn.h"
 
 
 
 
 
 
 
 
 
 
 
 
 
 29
 30static int is_x86_64(struct elf *elf)
 31{
 32	switch (elf->ehdr.e_machine) {
 33	case EM_X86_64:
 34		return 1;
 35	case EM_386:
 36		return 0;
 37	default:
 38		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 39		return -1;
 40	}
 41}
 42
 43int arch_decode_instruction(struct elf *elf, struct section *sec,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 44			    unsigned long offset, unsigned int maxlen,
 45			    unsigned int *len, unsigned char *type,
 46			    unsigned long *immediate)
 47{
 48	struct insn insn;
 49	int x86_64;
 50	unsigned char op1, op2, ext;
 
 
 
 
 
 
 
 
 51
 52	x86_64 = is_x86_64(elf);
 53	if (x86_64 == -1)
 54		return -1;
 55
 56	insn_init(&insn, (void *)(sec->data + offset), maxlen, x86_64);
 57	insn_get_length(&insn);
 58	insn_get_opcode(&insn);
 59	insn_get_modrm(&insn);
 60	insn_get_immediate(&insn);
 61
 62	if (!insn_complete(&insn)) {
 63		WARN_FUNC("can't decode instruction", sec, offset);
 64		return -1;
 65	}
 66
 67	*len = insn.length;
 68	*type = INSN_OTHER;
 69
 70	if (insn.vex_prefix.nbytes)
 71		return 0;
 72
 73	op1 = insn.opcode.bytes[0];
 74	op2 = insn.opcode.bytes[1];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 75
 76	switch (op1) {
 77	case 0x55:
 78		if (!insn.rex_prefix.nbytes)
 79			/* push rbp */
 80			*type = INSN_FP_SAVE;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 81		break;
 82
 83	case 0x5d:
 84		if (!insn.rex_prefix.nbytes)
 85			/* pop rbp */
 86			*type = INSN_FP_RESTORE;
 
 
 
 87		break;
 88
 89	case 0x70 ... 0x7f:
 90		*type = INSN_JUMP_CONDITIONAL;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 91		break;
 92
 93	case 0x89:
 94		if (insn.rex_prefix.nbytes == 1 &&
 95		    insn.rex_prefix.bytes[0] == 0x48 &&
 96		    insn.modrm.nbytes && insn.modrm.bytes[0] == 0xe5)
 97			/* mov rsp, rbp */
 98			*type = INSN_FP_SETUP;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 99		break;
100
101	case 0x8d:
102		if (insn.rex_prefix.nbytes &&
103		    insn.rex_prefix.bytes[0] == 0x48 &&
104		    insn.modrm.nbytes && insn.modrm.bytes[0] == 0x2c &&
105		    insn.sib.nbytes && insn.sib.bytes[0] == 0x24)
106			/* lea %(rsp), %rbp */
107			*type = INSN_FP_SETUP;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108		break;
109
110	case 0x90:
111		*type = INSN_NOP;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112		break;
113
114	case 0x0f:
115		if (op2 >= 0x80 && op2 <= 0x8f)
116			*type = INSN_JUMP_CONDITIONAL;
117		else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
118			 op2 == 0x35)
 
 
 
 
 
 
 
 
 
 
 
119			/* sysenter, sysret */
120			*type = INSN_CONTEXT_SWITCH;
121		else if (op2 == 0x0b || op2 == 0xb9)
 
 
122			/* ud2 */
123			*type = INSN_BUG;
124		else if (op2 == 0x0d || op2 == 0x1f)
 
 
125			/* nopl/nopw */
126			*type = INSN_NOP;
127		else if (op2 == 0x01 && insn.modrm.nbytes &&
128			 (insn.modrm.bytes[0] == 0xc2 ||
129			  insn.modrm.bytes[0] == 0xd8))
130			/* vmlaunch, vmrun */
131			*type = INSN_CONTEXT_SWITCH;
132
133		break;
 
134
135	case 0xc9: /* leave */
136		*type = INSN_FP_RESTORE;
137		break;
138
139	case 0xe3: /* jecxz/jrcxz */
140		*type = INSN_JUMP_CONDITIONAL;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141		break;
142
143	case 0xe9:
144	case 0xeb:
145		*type = INSN_JUMP_UNCONDITIONAL;
146		break;
147
148	case 0xc2:
149	case 0xc3:
150		*type = INSN_RETURN;
151		break;
152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153	case 0xca: /* retf */
154	case 0xcb: /* retf */
155	case 0xcf: /* iret */
156		*type = INSN_CONTEXT_SWITCH;
 
 
 
 
 
157		break;
158
159	case 0xe8:
160		*type = INSN_CALL;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
161		break;
162
163	case 0xff:
164		ext = X86_MODRM_REG(insn.modrm.bytes[0]);
165		if (ext == 2 || ext == 3)
166			*type = INSN_CALL_DYNAMIC;
167		else if (ext == 4)
168			*type = INSN_JUMP_DYNAMIC;
169		else if (ext == 5) /*jmpf */
170			*type = INSN_CONTEXT_SWITCH;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
172		break;
173
174	default:
175		break;
176	}
177
178	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
179
180	return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
181}