Linux Audio

Check our new training course

Loading...
v6.2
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <objtool/builtin.h>
 24#include <arch/elf.h>
 25
 26int arch_ftrace_match(char *name)
 27{
 28	return !strcmp(name, "__fentry__");
 29}
 30
 31static int is_x86_64(const struct elf *elf)
 32{
 33	switch (elf->ehdr.e_machine) {
 34	case EM_X86_64:
 35		return 1;
 36	case EM_386:
 37		return 0;
 38	default:
 39		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 40		return -1;
 41	}
 42}
 43
 44bool arch_callee_saved_reg(unsigned char reg)
 45{
 46	switch (reg) {
 47	case CFI_BP:
 48	case CFI_BX:
 49	case CFI_R12:
 50	case CFI_R13:
 51	case CFI_R14:
 52	case CFI_R15:
 53		return true;
 54
 55	case CFI_AX:
 56	case CFI_CX:
 57	case CFI_DX:
 58	case CFI_SI:
 59	case CFI_DI:
 60	case CFI_SP:
 61	case CFI_R8:
 62	case CFI_R9:
 63	case CFI_R10:
 64	case CFI_R11:
 65	case CFI_RA:
 66	default:
 67		return false;
 68	}
 69}
 70
 71unsigned long arch_dest_reloc_offset(int addend)
 72{
 73	return addend + 4;
 74}
 75
 76unsigned long arch_jump_destination(struct instruction *insn)
 77{
 78	return insn->offset + insn->len + insn->immediate;
 79}
 80
 81bool arch_pc_relative_reloc(struct reloc *reloc)
 82{
 83	/*
 84	 * All relocation types where P (the address of the target)
 85	 * is included in the computation.
 86	 */
 87	switch (reloc->type) {
 88	case R_X86_64_PC8:
 89	case R_X86_64_PC16:
 90	case R_X86_64_PC32:
 91	case R_X86_64_PC64:
 92
 93	case R_X86_64_PLT32:
 94	case R_X86_64_GOTPC32:
 95	case R_X86_64_GOTPCREL:
 96		return true;
 97
 98	default:
 99		break;
100	}
101
102	return false;
103}
104
105#define ADD_OP(op) \
106	if (!(op = calloc(1, sizeof(*op)))) \
107		return -1; \
108	else for (list_add_tail(&op->list, ops_list); op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
114 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
117 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
118 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
119 * 11 |                   r/ m               |
120 */
121
122#define mod_is_mem()	(modrm_mod != 3)
123#define mod_is_reg()	(modrm_mod == 3)
124
125#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
 
 
 
 
 
128#define rm_is(reg) (have_SIB() ? \
129		    sib_base == (reg) && sib_index == CFI_SP : \
 
130		    modrm_rm == (reg))
131
132#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
133#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
134
135static bool has_notrack_prefix(struct insn *insn)
136{
137	int i;
138
139	for (i = 0; i < insn->prefixes.nbytes; i++) {
140		if (insn->prefixes.bytes[i] == 0x3e)
141			return true;
142	}
143
144	return false;
145}
146
147int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148			    unsigned long offset, unsigned int maxlen,
149			    unsigned int *len, enum insn_type *type,
150			    unsigned long *immediate,
151			    struct list_head *ops_list)
152{
 
153	const struct elf *elf = file->elf;
154	struct insn insn;
155	int x86_64, ret;
156	unsigned char op1, op2, op3, prefix,
157		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
158		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
159		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
160	struct stack_op *op = NULL;
161	struct symbol *sym;
162	u64 imm;
163
164	x86_64 = is_x86_64(elf);
165	if (x86_64 == -1)
166		return -1;
167
168	ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
169			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
170	if (ret < 0) {
171		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
172		return -1;
173	}
174
175	*len = insn.length;
176	*type = INSN_OTHER;
177
178	if (insn.vex_prefix.nbytes)
179		return 0;
180
181	prefix = insn.prefixes.bytes[0];
182
183	op1 = insn.opcode.bytes[0];
184	op2 = insn.opcode.bytes[1];
185	op3 = insn.opcode.bytes[2];
186
187	if (insn.rex_prefix.nbytes) {
188		rex = insn.rex_prefix.bytes[0];
189		rex_w = X86_REX_W(rex) >> 3;
190		rex_r = X86_REX_R(rex) >> 2;
191		rex_x = X86_REX_X(rex) >> 1;
192		rex_b = X86_REX_B(rex);
193	}
194
195	if (insn.modrm.nbytes) {
196		modrm = insn.modrm.bytes[0];
197		modrm_mod = X86_MODRM_MOD(modrm);
198		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
199		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
200	}
201
202	if (insn.sib.nbytes) {
203		sib = insn.sib.bytes[0];
204		/* sib_scale = X86_SIB_SCALE(sib); */
205		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
206		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
207	}
208
209	switch (op1) {
210
211	case 0x1:
212	case 0x29:
213		if (rex_w && rm_is_reg(CFI_SP)) {
214
215			/* add/sub reg, %rsp */
216			ADD_OP(op) {
217				op->src.type = OP_SRC_ADD;
218				op->src.reg = modrm_reg;
219				op->dest.type = OP_DEST_REG;
220				op->dest.reg = CFI_SP;
221			}
222		}
223		break;
224
225	case 0x50 ... 0x57:
226
227		/* push reg */
228		ADD_OP(op) {
229			op->src.type = OP_SRC_REG;
230			op->src.reg = (op1 & 0x7) + 8*rex_b;
231			op->dest.type = OP_DEST_PUSH;
232		}
233
234		break;
235
236	case 0x58 ... 0x5f:
237
238		/* pop reg */
239		ADD_OP(op) {
240			op->src.type = OP_SRC_POP;
241			op->dest.type = OP_DEST_REG;
242			op->dest.reg = (op1 & 0x7) + 8*rex_b;
243		}
244
245		break;
246
247	case 0x68:
248	case 0x6a:
249		/* push immediate */
250		ADD_OP(op) {
251			op->src.type = OP_SRC_CONST;
252			op->dest.type = OP_DEST_PUSH;
253		}
254		break;
255
256	case 0x70 ... 0x7f:
257		*type = INSN_JUMP_CONDITIONAL;
258		break;
259
260	case 0x80 ... 0x83:
261		/*
262		 * 1000 00sw : mod OP r/m : immediate
263		 *
264		 * s - sign extend immediate
265		 * w - imm8 / imm32
266		 *
267		 * OP: 000 ADD    100 AND
268		 *     001 OR     101 SUB
269		 *     010 ADC    110 XOR
270		 *     011 SBB    111 CMP
271		 */
272
273		/* 64bit only */
274		if (!rex_w)
275			break;
276
277		/* %rsp target only */
278		if (!rm_is_reg(CFI_SP))
279			break;
280
281		imm = insn.immediate.value;
282		if (op1 & 2) { /* sign extend */
283			if (op1 & 1) { /* imm32 */
284				imm <<= 32;
285				imm = (s64)imm >> 32;
286			} else { /* imm8 */
287				imm <<= 56;
288				imm = (s64)imm >> 56;
289			}
290		}
291
292		switch (modrm_reg & 7) {
293		case 5:
294			imm = -imm;
295			/* fallthrough */
296		case 0:
297			/* add/sub imm, %rsp */
298			ADD_OP(op) {
299				op->src.type = OP_SRC_ADD;
300				op->src.reg = CFI_SP;
301				op->src.offset = imm;
302				op->dest.type = OP_DEST_REG;
303				op->dest.reg = CFI_SP;
304			}
305			break;
306
307		case 4:
308			/* and imm, %rsp */
309			ADD_OP(op) {
310				op->src.type = OP_SRC_AND;
311				op->src.reg = CFI_SP;
312				op->src.offset = insn.immediate.value;
313				op->dest.type = OP_DEST_REG;
314				op->dest.reg = CFI_SP;
315			}
316			break;
317
318		default:
319			/* WARN ? */
320			break;
321		}
322
323		break;
324
325	case 0x89:
326		if (!rex_w)
327			break;
328
329		if (modrm_reg == CFI_SP) {
330
331			if (mod_is_reg()) {
332				/* mov %rsp, reg */
333				ADD_OP(op) {
334					op->src.type = OP_SRC_REG;
335					op->src.reg = CFI_SP;
336					op->dest.type = OP_DEST_REG;
337					op->dest.reg = modrm_rm;
338				}
339				break;
340
341			} else {
342				/* skip RIP relative displacement */
343				if (is_RIP())
344					break;
345
346				/* skip nontrivial SIB */
347				if (have_SIB()) {
348					modrm_rm = sib_base;
349					if (sib_index != CFI_SP)
350						break;
351				}
352
353				/* mov %rsp, disp(%reg) */
354				ADD_OP(op) {
355					op->src.type = OP_SRC_REG;
356					op->src.reg = CFI_SP;
357					op->dest.type = OP_DEST_REG_INDIRECT;
358					op->dest.reg = modrm_rm;
359					op->dest.offset = insn.displacement.value;
360				}
361				break;
362			}
363
364			break;
365		}
366
367		if (rm_is_reg(CFI_SP)) {
368
369			/* mov reg, %rsp */
370			ADD_OP(op) {
371				op->src.type = OP_SRC_REG;
372				op->src.reg = modrm_reg;
373				op->dest.type = OP_DEST_REG;
374				op->dest.reg = CFI_SP;
375			}
376			break;
377		}
378
379		/* fallthrough */
380	case 0x88:
381		if (!rex_w)
382			break;
383
384		if (rm_is_mem(CFI_BP)) {
385
386			/* mov reg, disp(%rbp) */
387			ADD_OP(op) {
388				op->src.type = OP_SRC_REG;
389				op->src.reg = modrm_reg;
390				op->dest.type = OP_DEST_REG_INDIRECT;
391				op->dest.reg = CFI_BP;
392				op->dest.offset = insn.displacement.value;
393			}
394			break;
395		}
396
397		if (rm_is_mem(CFI_SP)) {
398
399			/* mov reg, disp(%rsp) */
400			ADD_OP(op) {
401				op->src.type = OP_SRC_REG;
402				op->src.reg = modrm_reg;
403				op->dest.type = OP_DEST_REG_INDIRECT;
404				op->dest.reg = CFI_SP;
405				op->dest.offset = insn.displacement.value;
406			}
407			break;
408		}
409
410		break;
411
412	case 0x8b:
413		if (!rex_w)
414			break;
415
416		if (rm_is_mem(CFI_BP)) {
417
418			/* mov disp(%rbp), reg */
419			ADD_OP(op) {
420				op->src.type = OP_SRC_REG_INDIRECT;
421				op->src.reg = CFI_BP;
422				op->src.offset = insn.displacement.value;
423				op->dest.type = OP_DEST_REG;
424				op->dest.reg = modrm_reg;
425			}
426			break;
427		}
428
429		if (rm_is_mem(CFI_SP)) {
430
431			/* mov disp(%rsp), reg */
432			ADD_OP(op) {
433				op->src.type = OP_SRC_REG_INDIRECT;
434				op->src.reg = CFI_SP;
435				op->src.offset = insn.displacement.value;
436				op->dest.type = OP_DEST_REG;
437				op->dest.reg = modrm_reg;
438			}
439			break;
440		}
441
442		break;
443
444	case 0x8d:
445		if (mod_is_reg()) {
446			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
447			break;
448		}
449
450		/* skip non 64bit ops */
451		if (!rex_w)
452			break;
453
454		/* skip RIP relative displacement */
455		if (is_RIP())
456			break;
457
458		/* skip nontrivial SIB */
459		if (have_SIB()) {
460			modrm_rm = sib_base;
461			if (sib_index != CFI_SP)
462				break;
463		}
464
 
 
 
 
 
 
465		/* lea disp(%src), %dst */
466		ADD_OP(op) {
467			op->src.offset = insn.displacement.value;
468			if (!op->src.offset) {
469				/* lea (%src), %dst */
470				op->src.type = OP_SRC_REG;
471			} else {
472				/* lea disp(%src), %dst */
473				op->src.type = OP_SRC_ADD;
474			}
475			op->src.reg = modrm_rm;
476			op->dest.type = OP_DEST_REG;
477			op->dest.reg = modrm_reg;
478		}
479		break;
480
481	case 0x8f:
482		/* pop to mem */
483		ADD_OP(op) {
484			op->src.type = OP_SRC_POP;
485			op->dest.type = OP_DEST_MEM;
486		}
487		break;
488
489	case 0x90:
490		*type = INSN_NOP;
491		break;
492
493	case 0x9c:
494		/* pushf */
495		ADD_OP(op) {
496			op->src.type = OP_SRC_CONST;
497			op->dest.type = OP_DEST_PUSHF;
498		}
499		break;
500
501	case 0x9d:
502		/* popf */
503		ADD_OP(op) {
504			op->src.type = OP_SRC_POPF;
505			op->dest.type = OP_DEST_MEM;
506		}
507		break;
508
509	case 0x0f:
510
511		if (op2 == 0x01) {
512
513			if (modrm == 0xca)
514				*type = INSN_CLAC;
515			else if (modrm == 0xcb)
516				*type = INSN_STAC;
517
 
 
 
 
 
 
 
 
 
518		} else if (op2 >= 0x80 && op2 <= 0x8f) {
519
520			*type = INSN_JUMP_CONDITIONAL;
521
522		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
523			   op2 == 0x35) {
524
525			/* sysenter, sysret */
526			*type = INSN_CONTEXT_SWITCH;
527
528		} else if (op2 == 0x0b || op2 == 0xb9) {
529
530			/* ud2 */
531			*type = INSN_BUG;
532
533		} else if (op2 == 0x0d || op2 == 0x1f) {
534
535			/* nopl/nopw */
536			*type = INSN_NOP;
537
538		} else if (op2 == 0x1e) {
539
540			if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
541				*type = INSN_ENDBR;
542
543
544		} else if (op2 == 0x38 && op3 == 0xf8) {
545			if (insn.prefixes.nbytes == 1 &&
546			    insn.prefixes.bytes[0] == 0xf2) {
547				/* ENQCMD cannot be used in the kernel. */
548				WARN("ENQCMD instruction at %s:%lx", sec->name,
549				     offset);
550			}
551
552		} else if (op2 == 0xa0 || op2 == 0xa8) {
553
554			/* push fs/gs */
555			ADD_OP(op) {
556				op->src.type = OP_SRC_CONST;
557				op->dest.type = OP_DEST_PUSH;
558			}
559
560		} else if (op2 == 0xa1 || op2 == 0xa9) {
561
562			/* pop fs/gs */
563			ADD_OP(op) {
564				op->src.type = OP_SRC_POP;
565				op->dest.type = OP_DEST_MEM;
566			}
567		}
568
569		break;
570
571	case 0xc9:
572		/*
573		 * leave
574		 *
575		 * equivalent to:
576		 * mov bp, sp
577		 * pop bp
578		 */
579		ADD_OP(op) {
580			op->src.type = OP_SRC_REG;
581			op->src.reg = CFI_BP;
582			op->dest.type = OP_DEST_REG;
583			op->dest.reg = CFI_SP;
584		}
585		ADD_OP(op) {
586			op->src.type = OP_SRC_POP;
587			op->dest.type = OP_DEST_REG;
588			op->dest.reg = CFI_BP;
589		}
590		break;
591
592	case 0xcc:
593		/* int3 */
594		*type = INSN_TRAP;
595		break;
596
597	case 0xe3:
598		/* jecxz/jrcxz */
599		*type = INSN_JUMP_CONDITIONAL;
600		break;
601
602	case 0xe9:
603	case 0xeb:
604		*type = INSN_JUMP_UNCONDITIONAL;
605		break;
606
607	case 0xc2:
608	case 0xc3:
609		*type = INSN_RETURN;
610		break;
611
612	case 0xc7: /* mov imm, r/m */
613		if (!opts.noinstr)
614			break;
615
616		if (insn.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
617			struct reloc *immr, *disp;
618			struct symbol *func;
619			int idx;
620
621			immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
622			disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
623
624			if (!immr || strcmp(immr->sym->name, "pv_ops"))
625				break;
626
627			idx = (immr->addend + 8) / sizeof(void *);
628
629			func = disp->sym;
630			if (disp->sym->type == STT_SECTION)
631				func = find_symbol_by_offset(disp->sym->sec, disp->addend);
632			if (!func) {
633				WARN("no func for pv_ops[]");
634				return -1;
635			}
636
637			objtool_pv_add(file, idx, func);
638		}
639
640		break;
641
642	case 0xcf: /* iret */
643		/*
644		 * Handle sync_core(), which has an IRET to self.
645		 * All other IRET are in STT_NONE entry code.
646		 */
647		sym = find_symbol_containing(sec, offset);
648		if (sym && sym->type == STT_FUNC) {
649			ADD_OP(op) {
650				/* add $40, %rsp */
651				op->src.type = OP_SRC_ADD;
652				op->src.reg = CFI_SP;
653				op->src.offset = 5*8;
654				op->dest.type = OP_DEST_REG;
655				op->dest.reg = CFI_SP;
656			}
657			break;
658		}
659
660		/* fallthrough */
661
662	case 0xca: /* retf */
663	case 0xcb: /* retf */
664		*type = INSN_CONTEXT_SWITCH;
665		break;
666
667	case 0xe0: /* loopne */
668	case 0xe1: /* loope */
669	case 0xe2: /* loop */
670		*type = INSN_JUMP_CONDITIONAL;
671		break;
672
673	case 0xe8:
674		*type = INSN_CALL;
675		/*
676		 * For the impact on the stack, a CALL behaves like
677		 * a PUSH of an immediate value (the return address).
678		 */
679		ADD_OP(op) {
680			op->src.type = OP_SRC_CONST;
681			op->dest.type = OP_DEST_PUSH;
682		}
683		break;
684
685	case 0xfc:
686		*type = INSN_CLD;
687		break;
688
689	case 0xfd:
690		*type = INSN_STD;
691		break;
692
693	case 0xff:
694		if (modrm_reg == 2 || modrm_reg == 3) {
695
696			*type = INSN_CALL_DYNAMIC;
697			if (has_notrack_prefix(&insn))
698				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
699
700		} else if (modrm_reg == 4) {
701
702			*type = INSN_JUMP_DYNAMIC;
703			if (has_notrack_prefix(&insn))
704				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
705
706		} else if (modrm_reg == 5) {
707
708			/* jmpf */
709			*type = INSN_CONTEXT_SWITCH;
710
711		} else if (modrm_reg == 6) {
712
713			/* push from mem */
714			ADD_OP(op) {
715				op->src.type = OP_SRC_CONST;
716				op->dest.type = OP_DEST_PUSH;
717			}
718		}
719
720		break;
721
722	default:
723		break;
724	}
725
726	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
 
 
 
727
728	return 0;
729}
730
731void arch_initial_func_cfi_state(struct cfi_init_state *state)
732{
733	int i;
734
735	for (i = 0; i < CFI_NUM_REGS; i++) {
736		state->regs[i].base = CFI_UNDEFINED;
737		state->regs[i].offset = 0;
738	}
739
740	/* initial CFA (call frame address) */
741	state->cfa.base = CFI_SP;
742	state->cfa.offset = 8;
743
744	/* initial RA (return address) */
745	state->regs[CFI_RA].base = CFI_CFA;
746	state->regs[CFI_RA].offset = -8;
747}
748
749const char *arch_nop_insn(int len)
750{
751	static const char nops[5][5] = {
752		{ BYTES_NOP1 },
753		{ BYTES_NOP2 },
754		{ BYTES_NOP3 },
755		{ BYTES_NOP4 },
756		{ BYTES_NOP5 },
757	};
758
759	if (len < 1 || len > 5) {
760		WARN("invalid NOP size: %d\n", len);
761		return NULL;
762	}
763
764	return nops[len-1];
765}
766
767#define BYTE_RET	0xC3
768
769const char *arch_ret_insn(int len)
770{
771	static const char ret[5][5] = {
772		{ BYTE_RET },
773		{ BYTE_RET, 0xcc },
774		{ BYTE_RET, 0xcc, BYTES_NOP1 },
775		{ BYTE_RET, 0xcc, BYTES_NOP2 },
776		{ BYTE_RET, 0xcc, BYTES_NOP3 },
777	};
778
779	if (len < 1 || len > 5) {
780		WARN("invalid RET size: %d\n", len);
781		return NULL;
782	}
783
784	return ret[len-1];
785}
786
787int arch_decode_hint_reg(u8 sp_reg, int *base)
788{
789	switch (sp_reg) {
790	case ORC_REG_UNDEFINED:
791		*base = CFI_UNDEFINED;
792		break;
793	case ORC_REG_SP:
794		*base = CFI_SP;
795		break;
796	case ORC_REG_BP:
797		*base = CFI_BP;
798		break;
799	case ORC_REG_SP_INDIRECT:
800		*base = CFI_SP_INDIRECT;
801		break;
802	case ORC_REG_R10:
803		*base = CFI_R10;
804		break;
805	case ORC_REG_R13:
806		*base = CFI_R13;
807		break;
808	case ORC_REG_DI:
809		*base = CFI_DI;
810		break;
811	case ORC_REG_DX:
812		*base = CFI_DX;
813		break;
814	default:
815		return -1;
816	}
817
818	return 0;
819}
820
821bool arch_is_retpoline(struct symbol *sym)
822{
823	return !strncmp(sym->name, "__x86_indirect_", 15);
824}
825
826bool arch_is_rethunk(struct symbol *sym)
827{
828	return !strcmp(sym->name, "__x86_return_thunk");
 
 
 
 
 
 
829}
v6.13.7
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <objtool/builtin.h>
 24#include <arch/elf.h>
 25
 26int arch_ftrace_match(char *name)
 27{
 28	return !strcmp(name, "__fentry__");
 29}
 30
 31static int is_x86_64(const struct elf *elf)
 32{
 33	switch (elf->ehdr.e_machine) {
 34	case EM_X86_64:
 35		return 1;
 36	case EM_386:
 37		return 0;
 38	default:
 39		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 40		return -1;
 41	}
 42}
 43
 44bool arch_callee_saved_reg(unsigned char reg)
 45{
 46	switch (reg) {
 47	case CFI_BP:
 48	case CFI_BX:
 49	case CFI_R12:
 50	case CFI_R13:
 51	case CFI_R14:
 52	case CFI_R15:
 53		return true;
 54
 55	case CFI_AX:
 56	case CFI_CX:
 57	case CFI_DX:
 58	case CFI_SI:
 59	case CFI_DI:
 60	case CFI_SP:
 61	case CFI_R8:
 62	case CFI_R9:
 63	case CFI_R10:
 64	case CFI_R11:
 65	case CFI_RA:
 66	default:
 67		return false;
 68	}
 69}
 70
 71unsigned long arch_dest_reloc_offset(int addend)
 72{
 73	return addend + 4;
 74}
 75
 76unsigned long arch_jump_destination(struct instruction *insn)
 77{
 78	return insn->offset + insn->len + insn->immediate;
 79}
 80
 81bool arch_pc_relative_reloc(struct reloc *reloc)
 82{
 83	/*
 84	 * All relocation types where P (the address of the target)
 85	 * is included in the computation.
 86	 */
 87	switch (reloc_type(reloc)) {
 88	case R_X86_64_PC8:
 89	case R_X86_64_PC16:
 90	case R_X86_64_PC32:
 91	case R_X86_64_PC64:
 92
 93	case R_X86_64_PLT32:
 94	case R_X86_64_GOTPC32:
 95	case R_X86_64_GOTPCREL:
 96		return true;
 97
 98	default:
 99		break;
100	}
101
102	return false;
103}
104
105#define ADD_OP(op) \
106	if (!(op = calloc(1, sizeof(*op)))) \
107		return -1; \
108	else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
114 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
117 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
118 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
119 * 11 |                   r/ m               |
120 */
121
122#define mod_is_mem()	(modrm_mod != 3)
123#define mod_is_reg()	(modrm_mod == 3)
124
125#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128/*
129 * Check the ModRM register. If there is a SIB byte then check with
130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and
131 * ModRM mod is 0 then there is no base register.
132 */
133#define rm_is(reg) (have_SIB() ? \
134		    sib_base == (reg) && sib_index == CFI_SP && \
135		    (sib_base != CFI_BP || modrm_mod != 0) :	\
136		    modrm_rm == (reg))
137
138#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
139#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
140
141static bool has_notrack_prefix(struct insn *insn)
142{
143	int i;
144
145	for (i = 0; i < insn->prefixes.nbytes; i++) {
146		if (insn->prefixes.bytes[i] == 0x3e)
147			return true;
148	}
149
150	return false;
151}
152
153int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
154			    unsigned long offset, unsigned int maxlen,
155			    struct instruction *insn)
 
 
156{
157	struct stack_op **ops_list = &insn->stack_ops;
158	const struct elf *elf = file->elf;
159	struct insn ins;
160	int x86_64, ret;
161	unsigned char op1, op2, op3, prefix,
162		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
163		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
164		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
165	struct stack_op *op = NULL;
166	struct symbol *sym;
167	u64 imm;
168
169	x86_64 = is_x86_64(elf);
170	if (x86_64 == -1)
171		return -1;
172
173	ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
174			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
175	if (ret < 0) {
176		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
177		return -1;
178	}
179
180	insn->len = ins.length;
181	insn->type = INSN_OTHER;
182
183	if (ins.vex_prefix.nbytes)
184		return 0;
185
186	prefix = ins.prefixes.bytes[0];
187
188	op1 = ins.opcode.bytes[0];
189	op2 = ins.opcode.bytes[1];
190	op3 = ins.opcode.bytes[2];
191
192	if (ins.rex_prefix.nbytes) {
193		rex = ins.rex_prefix.bytes[0];
194		rex_w = X86_REX_W(rex) >> 3;
195		rex_r = X86_REX_R(rex) >> 2;
196		rex_x = X86_REX_X(rex) >> 1;
197		rex_b = X86_REX_B(rex);
198	}
199
200	if (ins.modrm.nbytes) {
201		modrm = ins.modrm.bytes[0];
202		modrm_mod = X86_MODRM_MOD(modrm);
203		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
204		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
205	}
206
207	if (ins.sib.nbytes) {
208		sib = ins.sib.bytes[0];
209		/* sib_scale = X86_SIB_SCALE(sib); */
210		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
211		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
212	}
213
214	switch (op1) {
215
216	case 0x1:
217	case 0x29:
218		if (rex_w && rm_is_reg(CFI_SP)) {
219
220			/* add/sub reg, %rsp */
221			ADD_OP(op) {
222				op->src.type = OP_SRC_ADD;
223				op->src.reg = modrm_reg;
224				op->dest.type = OP_DEST_REG;
225				op->dest.reg = CFI_SP;
226			}
227		}
228		break;
229
230	case 0x50 ... 0x57:
231
232		/* push reg */
233		ADD_OP(op) {
234			op->src.type = OP_SRC_REG;
235			op->src.reg = (op1 & 0x7) + 8*rex_b;
236			op->dest.type = OP_DEST_PUSH;
237		}
238
239		break;
240
241	case 0x58 ... 0x5f:
242
243		/* pop reg */
244		ADD_OP(op) {
245			op->src.type = OP_SRC_POP;
246			op->dest.type = OP_DEST_REG;
247			op->dest.reg = (op1 & 0x7) + 8*rex_b;
248		}
249
250		break;
251
252	case 0x68:
253	case 0x6a:
254		/* push immediate */
255		ADD_OP(op) {
256			op->src.type = OP_SRC_CONST;
257			op->dest.type = OP_DEST_PUSH;
258		}
259		break;
260
261	case 0x70 ... 0x7f:
262		insn->type = INSN_JUMP_CONDITIONAL;
263		break;
264
265	case 0x80 ... 0x83:
266		/*
267		 * 1000 00sw : mod OP r/m : immediate
268		 *
269		 * s - sign extend immediate
270		 * w - imm8 / imm32
271		 *
272		 * OP: 000 ADD    100 AND
273		 *     001 OR     101 SUB
274		 *     010 ADC    110 XOR
275		 *     011 SBB    111 CMP
276		 */
277
278		/* 64bit only */
279		if (!rex_w)
280			break;
281
282		/* %rsp target only */
283		if (!rm_is_reg(CFI_SP))
284			break;
285
286		imm = ins.immediate.value;
287		if (op1 & 2) { /* sign extend */
288			if (op1 & 1) { /* imm32 */
289				imm <<= 32;
290				imm = (s64)imm >> 32;
291			} else { /* imm8 */
292				imm <<= 56;
293				imm = (s64)imm >> 56;
294			}
295		}
296
297		switch (modrm_reg & 7) {
298		case 5:
299			imm = -imm;
300			fallthrough;
301		case 0:
302			/* add/sub imm, %rsp */
303			ADD_OP(op) {
304				op->src.type = OP_SRC_ADD;
305				op->src.reg = CFI_SP;
306				op->src.offset = imm;
307				op->dest.type = OP_DEST_REG;
308				op->dest.reg = CFI_SP;
309			}
310			break;
311
312		case 4:
313			/* and imm, %rsp */
314			ADD_OP(op) {
315				op->src.type = OP_SRC_AND;
316				op->src.reg = CFI_SP;
317				op->src.offset = ins.immediate.value;
318				op->dest.type = OP_DEST_REG;
319				op->dest.reg = CFI_SP;
320			}
321			break;
322
323		default:
324			/* WARN ? */
325			break;
326		}
327
328		break;
329
330	case 0x89:
331		if (!rex_w)
332			break;
333
334		if (modrm_reg == CFI_SP) {
335
336			if (mod_is_reg()) {
337				/* mov %rsp, reg */
338				ADD_OP(op) {
339					op->src.type = OP_SRC_REG;
340					op->src.reg = CFI_SP;
341					op->dest.type = OP_DEST_REG;
342					op->dest.reg = modrm_rm;
343				}
344				break;
345
346			} else {
347				/* skip RIP relative displacement */
348				if (is_RIP())
349					break;
350
351				/* skip nontrivial SIB */
352				if (have_SIB()) {
353					modrm_rm = sib_base;
354					if (sib_index != CFI_SP)
355						break;
356				}
357
358				/* mov %rsp, disp(%reg) */
359				ADD_OP(op) {
360					op->src.type = OP_SRC_REG;
361					op->src.reg = CFI_SP;
362					op->dest.type = OP_DEST_REG_INDIRECT;
363					op->dest.reg = modrm_rm;
364					op->dest.offset = ins.displacement.value;
365				}
366				break;
367			}
368
369			break;
370		}
371
372		if (rm_is_reg(CFI_SP)) {
373
374			/* mov reg, %rsp */
375			ADD_OP(op) {
376				op->src.type = OP_SRC_REG;
377				op->src.reg = modrm_reg;
378				op->dest.type = OP_DEST_REG;
379				op->dest.reg = CFI_SP;
380			}
381			break;
382		}
383
384		fallthrough;
385	case 0x88:
386		if (!rex_w)
387			break;
388
389		if (rm_is_mem(CFI_BP)) {
390
391			/* mov reg, disp(%rbp) */
392			ADD_OP(op) {
393				op->src.type = OP_SRC_REG;
394				op->src.reg = modrm_reg;
395				op->dest.type = OP_DEST_REG_INDIRECT;
396				op->dest.reg = CFI_BP;
397				op->dest.offset = ins.displacement.value;
398			}
399			break;
400		}
401
402		if (rm_is_mem(CFI_SP)) {
403
404			/* mov reg, disp(%rsp) */
405			ADD_OP(op) {
406				op->src.type = OP_SRC_REG;
407				op->src.reg = modrm_reg;
408				op->dest.type = OP_DEST_REG_INDIRECT;
409				op->dest.reg = CFI_SP;
410				op->dest.offset = ins.displacement.value;
411			}
412			break;
413		}
414
415		break;
416
417	case 0x8b:
418		if (!rex_w)
419			break;
420
421		if (rm_is_mem(CFI_BP)) {
422
423			/* mov disp(%rbp), reg */
424			ADD_OP(op) {
425				op->src.type = OP_SRC_REG_INDIRECT;
426				op->src.reg = CFI_BP;
427				op->src.offset = ins.displacement.value;
428				op->dest.type = OP_DEST_REG;
429				op->dest.reg = modrm_reg;
430			}
431			break;
432		}
433
434		if (rm_is_mem(CFI_SP)) {
435
436			/* mov disp(%rsp), reg */
437			ADD_OP(op) {
438				op->src.type = OP_SRC_REG_INDIRECT;
439				op->src.reg = CFI_SP;
440				op->src.offset = ins.displacement.value;
441				op->dest.type = OP_DEST_REG;
442				op->dest.reg = modrm_reg;
443			}
444			break;
445		}
446
447		break;
448
449	case 0x8d:
450		if (mod_is_reg()) {
451			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
452			break;
453		}
454
455		/* skip non 64bit ops */
456		if (!rex_w)
457			break;
458
 
 
 
 
459		/* skip nontrivial SIB */
460		if (have_SIB()) {
461			modrm_rm = sib_base;
462			if (sib_index != CFI_SP)
463				break;
464		}
465
466		/* lea disp(%rip), %dst */
467		if (is_RIP()) {
468			insn->type = INSN_LEA_RIP;
469			break;
470		}
471
472		/* lea disp(%src), %dst */
473		ADD_OP(op) {
474			op->src.offset = ins.displacement.value;
475			if (!op->src.offset) {
476				/* lea (%src), %dst */
477				op->src.type = OP_SRC_REG;
478			} else {
479				/* lea disp(%src), %dst */
480				op->src.type = OP_SRC_ADD;
481			}
482			op->src.reg = modrm_rm;
483			op->dest.type = OP_DEST_REG;
484			op->dest.reg = modrm_reg;
485		}
486		break;
487
488	case 0x8f:
489		/* pop to mem */
490		ADD_OP(op) {
491			op->src.type = OP_SRC_POP;
492			op->dest.type = OP_DEST_MEM;
493		}
494		break;
495
496	case 0x90:
497		insn->type = INSN_NOP;
498		break;
499
500	case 0x9c:
501		/* pushf */
502		ADD_OP(op) {
503			op->src.type = OP_SRC_CONST;
504			op->dest.type = OP_DEST_PUSHF;
505		}
506		break;
507
508	case 0x9d:
509		/* popf */
510		ADD_OP(op) {
511			op->src.type = OP_SRC_POPF;
512			op->dest.type = OP_DEST_MEM;
513		}
514		break;
515
516	case 0x0f:
517
518		if (op2 == 0x01) {
519
520			switch (insn_last_prefix_id(&ins)) {
521			case INAT_PFX_REPE:
522			case INAT_PFX_REPNE:
523				if (modrm == 0xca)
524					/* eretu/erets */
525					insn->type = INSN_CONTEXT_SWITCH;
526				break;
527			default:
528				if (modrm == 0xca)
529					insn->type = INSN_CLAC;
530				else if (modrm == 0xcb)
531					insn->type = INSN_STAC;
532				break;
533			}
534		} else if (op2 >= 0x80 && op2 <= 0x8f) {
535
536			insn->type = INSN_JUMP_CONDITIONAL;
537
538		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
539			   op2 == 0x35) {
540
541			/* sysenter, sysret */
542			insn->type = INSN_CONTEXT_SWITCH;
543
544		} else if (op2 == 0x0b || op2 == 0xb9) {
545
546			/* ud2 */
547			insn->type = INSN_BUG;
548
549		} else if (op2 == 0x0d || op2 == 0x1f) {
550
551			/* nopl/nopw */
552			insn->type = INSN_NOP;
553
554		} else if (op2 == 0x1e) {
555
556			if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
557				insn->type = INSN_ENDBR;
558
559
560		} else if (op2 == 0x38 && op3 == 0xf8) {
561			if (ins.prefixes.nbytes == 1 &&
562			    ins.prefixes.bytes[0] == 0xf2) {
563				/* ENQCMD cannot be used in the kernel. */
564				WARN("ENQCMD instruction at %s:%lx", sec->name,
565				     offset);
566			}
567
568		} else if (op2 == 0xa0 || op2 == 0xa8) {
569
570			/* push fs/gs */
571			ADD_OP(op) {
572				op->src.type = OP_SRC_CONST;
573				op->dest.type = OP_DEST_PUSH;
574			}
575
576		} else if (op2 == 0xa1 || op2 == 0xa9) {
577
578			/* pop fs/gs */
579			ADD_OP(op) {
580				op->src.type = OP_SRC_POP;
581				op->dest.type = OP_DEST_MEM;
582			}
583		}
584
585		break;
586
587	case 0xc9:
588		/*
589		 * leave
590		 *
591		 * equivalent to:
592		 * mov bp, sp
593		 * pop bp
594		 */
595		ADD_OP(op) {
596			op->src.type = OP_SRC_REG;
597			op->src.reg = CFI_BP;
598			op->dest.type = OP_DEST_REG;
599			op->dest.reg = CFI_SP;
600		}
601		ADD_OP(op) {
602			op->src.type = OP_SRC_POP;
603			op->dest.type = OP_DEST_REG;
604			op->dest.reg = CFI_BP;
605		}
606		break;
607
608	case 0xcc:
609		/* int3 */
610		insn->type = INSN_TRAP;
611		break;
612
613	case 0xe3:
614		/* jecxz/jrcxz */
615		insn->type = INSN_JUMP_CONDITIONAL;
616		break;
617
618	case 0xe9:
619	case 0xeb:
620		insn->type = INSN_JUMP_UNCONDITIONAL;
621		break;
622
623	case 0xc2:
624	case 0xc3:
625		insn->type = INSN_RETURN;
626		break;
627
628	case 0xc7: /* mov imm, r/m */
629		if (!opts.noinstr)
630			break;
631
632		if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
633			struct reloc *immr, *disp;
634			struct symbol *func;
635			int idx;
636
637			immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
638			disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
639
640			if (!immr || strcmp(immr->sym->name, "pv_ops"))
641				break;
642
643			idx = (reloc_addend(immr) + 8) / sizeof(void *);
644
645			func = disp->sym;
646			if (disp->sym->type == STT_SECTION)
647				func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
648			if (!func) {
649				WARN("no func for pv_ops[]");
650				return -1;
651			}
652
653			objtool_pv_add(file, idx, func);
654		}
655
656		break;
657
658	case 0xcf: /* iret */
659		/*
660		 * Handle sync_core(), which has an IRET to self.
661		 * All other IRET are in STT_NONE entry code.
662		 */
663		sym = find_symbol_containing(sec, offset);
664		if (sym && sym->type == STT_FUNC) {
665			ADD_OP(op) {
666				/* add $40, %rsp */
667				op->src.type = OP_SRC_ADD;
668				op->src.reg = CFI_SP;
669				op->src.offset = 5*8;
670				op->dest.type = OP_DEST_REG;
671				op->dest.reg = CFI_SP;
672			}
673			break;
674		}
675
676		fallthrough;
677
678	case 0xca: /* retf */
679	case 0xcb: /* retf */
680		insn->type = INSN_CONTEXT_SWITCH;
681		break;
682
683	case 0xe0: /* loopne */
684	case 0xe1: /* loope */
685	case 0xe2: /* loop */
686		insn->type = INSN_JUMP_CONDITIONAL;
687		break;
688
689	case 0xe8:
690		insn->type = INSN_CALL;
691		/*
692		 * For the impact on the stack, a CALL behaves like
693		 * a PUSH of an immediate value (the return address).
694		 */
695		ADD_OP(op) {
696			op->src.type = OP_SRC_CONST;
697			op->dest.type = OP_DEST_PUSH;
698		}
699		break;
700
701	case 0xfc:
702		insn->type = INSN_CLD;
703		break;
704
705	case 0xfd:
706		insn->type = INSN_STD;
707		break;
708
709	case 0xff:
710		if (modrm_reg == 2 || modrm_reg == 3) {
711
712			insn->type = INSN_CALL_DYNAMIC;
713			if (has_notrack_prefix(&ins))
714				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
715
716		} else if (modrm_reg == 4) {
717
718			insn->type = INSN_JUMP_DYNAMIC;
719			if (has_notrack_prefix(&ins))
720				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
721
722		} else if (modrm_reg == 5) {
723
724			/* jmpf */
725			insn->type = INSN_CONTEXT_SWITCH;
726
727		} else if (modrm_reg == 6) {
728
729			/* push from mem */
730			ADD_OP(op) {
731				op->src.type = OP_SRC_CONST;
732				op->dest.type = OP_DEST_PUSH;
733			}
734		}
735
736		break;
737
738	default:
739		break;
740	}
741
742	if (ins.immediate.nbytes)
743		insn->immediate = ins.immediate.value;
744	else if (ins.displacement.nbytes)
745		insn->immediate = ins.displacement.value;
746
747	return 0;
748}
749
750void arch_initial_func_cfi_state(struct cfi_init_state *state)
751{
752	int i;
753
754	for (i = 0; i < CFI_NUM_REGS; i++) {
755		state->regs[i].base = CFI_UNDEFINED;
756		state->regs[i].offset = 0;
757	}
758
759	/* initial CFA (call frame address) */
760	state->cfa.base = CFI_SP;
761	state->cfa.offset = 8;
762
763	/* initial RA (return address) */
764	state->regs[CFI_RA].base = CFI_CFA;
765	state->regs[CFI_RA].offset = -8;
766}
767
768const char *arch_nop_insn(int len)
769{
770	static const char nops[5][5] = {
771		{ BYTES_NOP1 },
772		{ BYTES_NOP2 },
773		{ BYTES_NOP3 },
774		{ BYTES_NOP4 },
775		{ BYTES_NOP5 },
776	};
777
778	if (len < 1 || len > 5) {
779		WARN("invalid NOP size: %d\n", len);
780		return NULL;
781	}
782
783	return nops[len-1];
784}
785
786#define BYTE_RET	0xC3
787
788const char *arch_ret_insn(int len)
789{
790	static const char ret[5][5] = {
791		{ BYTE_RET },
792		{ BYTE_RET, 0xcc },
793		{ BYTE_RET, 0xcc, BYTES_NOP1 },
794		{ BYTE_RET, 0xcc, BYTES_NOP2 },
795		{ BYTE_RET, 0xcc, BYTES_NOP3 },
796	};
797
798	if (len < 1 || len > 5) {
799		WARN("invalid RET size: %d\n", len);
800		return NULL;
801	}
802
803	return ret[len-1];
804}
805
806int arch_decode_hint_reg(u8 sp_reg, int *base)
807{
808	switch (sp_reg) {
809	case ORC_REG_UNDEFINED:
810		*base = CFI_UNDEFINED;
811		break;
812	case ORC_REG_SP:
813		*base = CFI_SP;
814		break;
815	case ORC_REG_BP:
816		*base = CFI_BP;
817		break;
818	case ORC_REG_SP_INDIRECT:
819		*base = CFI_SP_INDIRECT;
820		break;
821	case ORC_REG_R10:
822		*base = CFI_R10;
823		break;
824	case ORC_REG_R13:
825		*base = CFI_R13;
826		break;
827	case ORC_REG_DI:
828		*base = CFI_DI;
829		break;
830	case ORC_REG_DX:
831		*base = CFI_DX;
832		break;
833	default:
834		return -1;
835	}
836
837	return 0;
838}
839
840bool arch_is_retpoline(struct symbol *sym)
841{
842	return !strncmp(sym->name, "__x86_indirect_", 15);
843}
844
845bool arch_is_rethunk(struct symbol *sym)
846{
847	return !strcmp(sym->name, "__x86_return_thunk");
848}
849
850bool arch_is_embedded_insn(struct symbol *sym)
851{
852	return !strcmp(sym->name, "retbleed_return_thunk") ||
853	       !strcmp(sym->name, "srso_safe_ret");
854}