Linux Audio

Check our new training course

Loading...
v5.14.15
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 
 23#include <arch/elf.h>
 24
 
 
 
 
 
 25static int is_x86_64(const struct elf *elf)
 26{
 27	switch (elf->ehdr.e_machine) {
 28	case EM_X86_64:
 29		return 1;
 30	case EM_386:
 31		return 0;
 32	default:
 33		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 34		return -1;
 35	}
 36}
 37
 38bool arch_callee_saved_reg(unsigned char reg)
 39{
 40	switch (reg) {
 41	case CFI_BP:
 42	case CFI_BX:
 43	case CFI_R12:
 44	case CFI_R13:
 45	case CFI_R14:
 46	case CFI_R15:
 47		return true;
 48
 49	case CFI_AX:
 50	case CFI_CX:
 51	case CFI_DX:
 52	case CFI_SI:
 53	case CFI_DI:
 54	case CFI_SP:
 55	case CFI_R8:
 56	case CFI_R9:
 57	case CFI_R10:
 58	case CFI_R11:
 59	case CFI_RA:
 60	default:
 61		return false;
 62	}
 63}
 64
 65unsigned long arch_dest_reloc_offset(int addend)
 66{
 67	return addend + 4;
 68}
 69
 70unsigned long arch_jump_destination(struct instruction *insn)
 71{
 72	return insn->offset + insn->len + insn->immediate;
 73}
 74
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 75#define ADD_OP(op) \
 76	if (!(op = calloc(1, sizeof(*op)))) \
 77		return -1; \
 78	else for (list_add_tail(&op->list, ops_list); op; op = NULL)
 79
 80/*
 81 * Helpers to decode ModRM/SIB:
 82 *
 83 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
 84 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
 85 * Mod+----------------+-----+-----+---------+
 86 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
 87 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
 88 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
 89 * 11 |                   r/ m               |
 90 */
 91
 92#define mod_is_mem()	(modrm_mod != 3)
 93#define mod_is_reg()	(modrm_mod == 3)
 94
 95#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
 96#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
 97
 98#define rm_is(reg) (have_SIB() ? \
 99		    sib_base == (reg) && sib_index == CFI_SP : \
100		    modrm_rm == (reg))
101
102#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
103#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
104
105int arch_decode_instruction(const struct elf *elf, const struct section *sec,
 
 
 
 
 
 
 
 
 
 
 
 
106			    unsigned long offset, unsigned int maxlen,
107			    unsigned int *len, enum insn_type *type,
108			    unsigned long *immediate,
109			    struct list_head *ops_list)
110{
 
111	struct insn insn;
112	int x86_64, ret;
113	unsigned char op1, op2,
114		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
115		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
116		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
117	struct stack_op *op = NULL;
118	struct symbol *sym;
119	u64 imm;
120
121	x86_64 = is_x86_64(elf);
122	if (x86_64 == -1)
123		return -1;
124
125	ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
126			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
127	if (ret < 0) {
128		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
129		return -1;
130	}
131
132	*len = insn.length;
133	*type = INSN_OTHER;
134
135	if (insn.vex_prefix.nbytes)
136		return 0;
137
 
 
138	op1 = insn.opcode.bytes[0];
139	op2 = insn.opcode.bytes[1];
 
140
141	if (insn.rex_prefix.nbytes) {
142		rex = insn.rex_prefix.bytes[0];
143		rex_w = X86_REX_W(rex) >> 3;
144		rex_r = X86_REX_R(rex) >> 2;
145		rex_x = X86_REX_X(rex) >> 1;
146		rex_b = X86_REX_B(rex);
147	}
148
149	if (insn.modrm.nbytes) {
150		modrm = insn.modrm.bytes[0];
151		modrm_mod = X86_MODRM_MOD(modrm);
152		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
153		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
154	}
155
156	if (insn.sib.nbytes) {
157		sib = insn.sib.bytes[0];
158		/* sib_scale = X86_SIB_SCALE(sib); */
159		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
160		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
161	}
162
163	switch (op1) {
164
165	case 0x1:
166	case 0x29:
167		if (rex_w && rm_is_reg(CFI_SP)) {
168
169			/* add/sub reg, %rsp */
170			ADD_OP(op) {
171				op->src.type = OP_SRC_ADD;
172				op->src.reg = modrm_reg;
173				op->dest.type = OP_DEST_REG;
174				op->dest.reg = CFI_SP;
175			}
176		}
177		break;
178
179	case 0x50 ... 0x57:
180
181		/* push reg */
182		ADD_OP(op) {
183			op->src.type = OP_SRC_REG;
184			op->src.reg = (op1 & 0x7) + 8*rex_b;
185			op->dest.type = OP_DEST_PUSH;
186		}
187
188		break;
189
190	case 0x58 ... 0x5f:
191
192		/* pop reg */
193		ADD_OP(op) {
194			op->src.type = OP_SRC_POP;
195			op->dest.type = OP_DEST_REG;
196			op->dest.reg = (op1 & 0x7) + 8*rex_b;
197		}
198
199		break;
200
201	case 0x68:
202	case 0x6a:
203		/* push immediate */
204		ADD_OP(op) {
205			op->src.type = OP_SRC_CONST;
206			op->dest.type = OP_DEST_PUSH;
207		}
208		break;
209
210	case 0x70 ... 0x7f:
211		*type = INSN_JUMP_CONDITIONAL;
212		break;
213
214	case 0x80 ... 0x83:
215		/*
216		 * 1000 00sw : mod OP r/m : immediate
217		 *
218		 * s - sign extend immediate
219		 * w - imm8 / imm32
220		 *
221		 * OP: 000 ADD    100 AND
222		 *     001 OR     101 SUB
223		 *     010 ADC    110 XOR
224		 *     011 SBB    111 CMP
225		 */
226
227		/* 64bit only */
228		if (!rex_w)
229			break;
230
231		/* %rsp target only */
232		if (!rm_is_reg(CFI_SP))
233			break;
234
235		imm = insn.immediate.value;
236		if (op1 & 2) { /* sign extend */
237			if (op1 & 1) { /* imm32 */
238				imm <<= 32;
239				imm = (s64)imm >> 32;
240			} else { /* imm8 */
241				imm <<= 56;
242				imm = (s64)imm >> 56;
243			}
244		}
245
246		switch (modrm_reg & 7) {
247		case 5:
248			imm = -imm;
249			/* fallthrough */
250		case 0:
251			/* add/sub imm, %rsp */
252			ADD_OP(op) {
253				op->src.type = OP_SRC_ADD;
254				op->src.reg = CFI_SP;
255				op->src.offset = imm;
256				op->dest.type = OP_DEST_REG;
257				op->dest.reg = CFI_SP;
258			}
259			break;
260
261		case 4:
262			/* and imm, %rsp */
263			ADD_OP(op) {
264				op->src.type = OP_SRC_AND;
265				op->src.reg = CFI_SP;
266				op->src.offset = insn.immediate.value;
267				op->dest.type = OP_DEST_REG;
268				op->dest.reg = CFI_SP;
269			}
270			break;
271
272		default:
273			/* WARN ? */
274			break;
275		}
276
277		break;
278
279	case 0x89:
280		if (!rex_w)
281			break;
282
283		if (modrm_reg == CFI_SP) {
284
285			if (mod_is_reg()) {
286				/* mov %rsp, reg */
287				ADD_OP(op) {
288					op->src.type = OP_SRC_REG;
289					op->src.reg = CFI_SP;
290					op->dest.type = OP_DEST_REG;
291					op->dest.reg = modrm_rm;
292				}
293				break;
294
295			} else {
296				/* skip RIP relative displacement */
297				if (is_RIP())
298					break;
299
300				/* skip nontrivial SIB */
301				if (have_SIB()) {
302					modrm_rm = sib_base;
303					if (sib_index != CFI_SP)
304						break;
305				}
306
307				/* mov %rsp, disp(%reg) */
308				ADD_OP(op) {
309					op->src.type = OP_SRC_REG;
310					op->src.reg = CFI_SP;
311					op->dest.type = OP_DEST_REG_INDIRECT;
312					op->dest.reg = modrm_rm;
313					op->dest.offset = insn.displacement.value;
314				}
315				break;
316			}
317
318			break;
319		}
320
321		if (rm_is_reg(CFI_SP)) {
322
323			/* mov reg, %rsp */
324			ADD_OP(op) {
325				op->src.type = OP_SRC_REG;
326				op->src.reg = modrm_reg;
327				op->dest.type = OP_DEST_REG;
328				op->dest.reg = CFI_SP;
329			}
330			break;
331		}
332
333		/* fallthrough */
334	case 0x88:
335		if (!rex_w)
336			break;
337
338		if (rm_is_mem(CFI_BP)) {
339
340			/* mov reg, disp(%rbp) */
341			ADD_OP(op) {
342				op->src.type = OP_SRC_REG;
343				op->src.reg = modrm_reg;
344				op->dest.type = OP_DEST_REG_INDIRECT;
345				op->dest.reg = CFI_BP;
346				op->dest.offset = insn.displacement.value;
347			}
348			break;
349		}
350
351		if (rm_is_mem(CFI_SP)) {
352
353			/* mov reg, disp(%rsp) */
354			ADD_OP(op) {
355				op->src.type = OP_SRC_REG;
356				op->src.reg = modrm_reg;
357				op->dest.type = OP_DEST_REG_INDIRECT;
358				op->dest.reg = CFI_SP;
359				op->dest.offset = insn.displacement.value;
360			}
361			break;
362		}
363
364		break;
365
366	case 0x8b:
367		if (!rex_w)
368			break;
369
370		if (rm_is_mem(CFI_BP)) {
371
372			/* mov disp(%rbp), reg */
373			ADD_OP(op) {
374				op->src.type = OP_SRC_REG_INDIRECT;
375				op->src.reg = CFI_BP;
376				op->src.offset = insn.displacement.value;
377				op->dest.type = OP_DEST_REG;
378				op->dest.reg = modrm_reg;
379			}
380			break;
381		}
382
383		if (rm_is_mem(CFI_SP)) {
384
385			/* mov disp(%rsp), reg */
386			ADD_OP(op) {
387				op->src.type = OP_SRC_REG_INDIRECT;
388				op->src.reg = CFI_SP;
389				op->src.offset = insn.displacement.value;
390				op->dest.type = OP_DEST_REG;
391				op->dest.reg = modrm_reg;
392			}
393			break;
394		}
395
396		break;
397
398	case 0x8d:
399		if (mod_is_reg()) {
400			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
401			break;
402		}
403
404		/* skip non 64bit ops */
405		if (!rex_w)
406			break;
407
408		/* skip RIP relative displacement */
409		if (is_RIP())
410			break;
411
412		/* skip nontrivial SIB */
413		if (have_SIB()) {
414			modrm_rm = sib_base;
415			if (sib_index != CFI_SP)
416				break;
417		}
418
419		/* lea disp(%src), %dst */
420		ADD_OP(op) {
421			op->src.offset = insn.displacement.value;
422			if (!op->src.offset) {
423				/* lea (%src), %dst */
424				op->src.type = OP_SRC_REG;
425			} else {
426				/* lea disp(%src), %dst */
427				op->src.type = OP_SRC_ADD;
428			}
429			op->src.reg = modrm_rm;
430			op->dest.type = OP_DEST_REG;
431			op->dest.reg = modrm_reg;
432		}
433		break;
434
435	case 0x8f:
436		/* pop to mem */
437		ADD_OP(op) {
438			op->src.type = OP_SRC_POP;
439			op->dest.type = OP_DEST_MEM;
440		}
441		break;
442
443	case 0x90:
444		*type = INSN_NOP;
445		break;
446
447	case 0x9c:
448		/* pushf */
449		ADD_OP(op) {
450			op->src.type = OP_SRC_CONST;
451			op->dest.type = OP_DEST_PUSHF;
452		}
453		break;
454
455	case 0x9d:
456		/* popf */
457		ADD_OP(op) {
458			op->src.type = OP_SRC_POPF;
459			op->dest.type = OP_DEST_MEM;
460		}
461		break;
462
463	case 0x0f:
464
465		if (op2 == 0x01) {
466
467			if (modrm == 0xca)
468				*type = INSN_CLAC;
469			else if (modrm == 0xcb)
470				*type = INSN_STAC;
471
472		} else if (op2 >= 0x80 && op2 <= 0x8f) {
473
474			*type = INSN_JUMP_CONDITIONAL;
475
476		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
477			   op2 == 0x35) {
478
479			/* sysenter, sysret */
480			*type = INSN_CONTEXT_SWITCH;
481
482		} else if (op2 == 0x0b || op2 == 0xb9) {
483
484			/* ud2 */
485			*type = INSN_BUG;
486
487		} else if (op2 == 0x0d || op2 == 0x1f) {
488
489			/* nopl/nopw */
490			*type = INSN_NOP;
491
 
 
 
 
 
 
 
 
 
 
 
 
 
 
492		} else if (op2 == 0xa0 || op2 == 0xa8) {
493
494			/* push fs/gs */
495			ADD_OP(op) {
496				op->src.type = OP_SRC_CONST;
497				op->dest.type = OP_DEST_PUSH;
498			}
499
500		} else if (op2 == 0xa1 || op2 == 0xa9) {
501
502			/* pop fs/gs */
503			ADD_OP(op) {
504				op->src.type = OP_SRC_POP;
505				op->dest.type = OP_DEST_MEM;
506			}
507		}
508
509		break;
510
511	case 0xc9:
512		/*
513		 * leave
514		 *
515		 * equivalent to:
516		 * mov bp, sp
517		 * pop bp
518		 */
519		ADD_OP(op) {
520			op->src.type = OP_SRC_REG;
521			op->src.reg = CFI_BP;
522			op->dest.type = OP_DEST_REG;
523			op->dest.reg = CFI_SP;
524		}
525		ADD_OP(op) {
526			op->src.type = OP_SRC_POP;
527			op->dest.type = OP_DEST_REG;
528			op->dest.reg = CFI_BP;
529		}
530		break;
531
 
 
 
 
 
532	case 0xe3:
533		/* jecxz/jrcxz */
534		*type = INSN_JUMP_CONDITIONAL;
535		break;
536
537	case 0xe9:
538	case 0xeb:
539		*type = INSN_JUMP_UNCONDITIONAL;
540		break;
541
542	case 0xc2:
543	case 0xc3:
544		*type = INSN_RETURN;
545		break;
546
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
547	case 0xcf: /* iret */
548		/*
549		 * Handle sync_core(), which has an IRET to self.
550		 * All other IRET are in STT_NONE entry code.
551		 */
552		sym = find_symbol_containing(sec, offset);
553		if (sym && sym->type == STT_FUNC) {
554			ADD_OP(op) {
555				/* add $40, %rsp */
556				op->src.type = OP_SRC_ADD;
557				op->src.reg = CFI_SP;
558				op->src.offset = 5*8;
559				op->dest.type = OP_DEST_REG;
560				op->dest.reg = CFI_SP;
561			}
562			break;
563		}
564
565		/* fallthrough */
566
567	case 0xca: /* retf */
568	case 0xcb: /* retf */
569		*type = INSN_CONTEXT_SWITCH;
570		break;
571
 
 
 
 
 
 
572	case 0xe8:
573		*type = INSN_CALL;
574		/*
575		 * For the impact on the stack, a CALL behaves like
576		 * a PUSH of an immediate value (the return address).
577		 */
578		ADD_OP(op) {
579			op->src.type = OP_SRC_CONST;
580			op->dest.type = OP_DEST_PUSH;
581		}
582		break;
583
584	case 0xfc:
585		*type = INSN_CLD;
586		break;
587
588	case 0xfd:
589		*type = INSN_STD;
590		break;
591
592	case 0xff:
593		if (modrm_reg == 2 || modrm_reg == 3)
594
595			*type = INSN_CALL_DYNAMIC;
 
 
596
597		else if (modrm_reg == 4)
598
599			*type = INSN_JUMP_DYNAMIC;
 
 
600
601		else if (modrm_reg == 5)
602
603			/* jmpf */
604			*type = INSN_CONTEXT_SWITCH;
605
606		else if (modrm_reg == 6) {
607
608			/* push from mem */
609			ADD_OP(op) {
610				op->src.type = OP_SRC_CONST;
611				op->dest.type = OP_DEST_PUSH;
612			}
613		}
614
615		break;
616
617	default:
618		break;
619	}
620
621	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
622
623	return 0;
624}
625
626void arch_initial_func_cfi_state(struct cfi_init_state *state)
627{
628	int i;
629
630	for (i = 0; i < CFI_NUM_REGS; i++) {
631		state->regs[i].base = CFI_UNDEFINED;
632		state->regs[i].offset = 0;
633	}
634
635	/* initial CFA (call frame address) */
636	state->cfa.base = CFI_SP;
637	state->cfa.offset = 8;
638
639	/* initial RA (return address) */
640	state->regs[CFI_RA].base = CFI_CFA;
641	state->regs[CFI_RA].offset = -8;
642}
643
644const char *arch_nop_insn(int len)
645{
646	static const char nops[5][5] = {
647		{ BYTES_NOP1 },
648		{ BYTES_NOP2 },
649		{ BYTES_NOP3 },
650		{ BYTES_NOP4 },
651		{ BYTES_NOP5 },
652	};
653
654	if (len < 1 || len > 5) {
655		WARN("invalid NOP size: %d\n", len);
656		return NULL;
657	}
658
659	return nops[len-1];
660}
661
662/* asm/alternative.h ? */
663
664#define ALTINSTR_FLAG_INV	(1 << 15)
665#define ALT_NOT(feat)		((feat) | ALTINSTR_FLAG_INV)
666
667struct alt_instr {
668	s32 instr_offset;	/* original instruction */
669	s32 repl_offset;	/* offset to replacement instruction */
670	u16 cpuid;		/* cpuid bit set for replacement */
671	u8  instrlen;		/* length of original instruction */
672	u8  replacementlen;	/* length of new instruction */
673} __packed;
674
675static int elf_add_alternative(struct elf *elf,
676			       struct instruction *orig, struct symbol *sym,
677			       int cpuid, u8 orig_len, u8 repl_len)
678{
679	const int size = sizeof(struct alt_instr);
680	struct alt_instr *alt;
681	struct section *sec;
682	Elf_Scn *s;
683
684	sec = find_section_by_name(elf, ".altinstructions");
685	if (!sec) {
686		sec = elf_create_section(elf, ".altinstructions",
687					 SHF_ALLOC, 0, 0);
688
689		if (!sec) {
690			WARN_ELF("elf_create_section");
691			return -1;
692		}
693	}
694
695	s = elf_getscn(elf->elf, sec->idx);
696	if (!s) {
697		WARN_ELF("elf_getscn");
698		return -1;
699	}
700
701	sec->data = elf_newdata(s);
702	if (!sec->data) {
703		WARN_ELF("elf_newdata");
704		return -1;
705	}
706
707	sec->data->d_size = size;
708	sec->data->d_align = 1;
709
710	alt = sec->data->d_buf = malloc(size);
711	if (!sec->data->d_buf) {
712		perror("malloc");
713		return -1;
714	}
715	memset(sec->data->d_buf, 0, size);
716
717	if (elf_add_reloc_to_insn(elf, sec, sec->sh.sh_size,
718				  R_X86_64_PC32, orig->sec, orig->offset)) {
719		WARN("elf_create_reloc: alt_instr::instr_offset");
720		return -1;
721	}
722
723	if (elf_add_reloc(elf, sec, sec->sh.sh_size + 4,
724			  R_X86_64_PC32, sym, 0)) {
725		WARN("elf_create_reloc: alt_instr::repl_offset");
726		return -1;
727	}
728
729	alt->cpuid = bswap_if_needed(cpuid);
730	alt->instrlen = orig_len;
731	alt->replacementlen = repl_len;
732
733	sec->sh.sh_size += size;
734	sec->changed = true;
735
736	return 0;
737}
738
739#define X86_FEATURE_RETPOLINE                ( 7*32+12)
740
741int arch_rewrite_retpolines(struct objtool_file *file)
742{
743	struct instruction *insn;
744	struct reloc *reloc;
745	struct symbol *sym;
746	char name[32] = "";
747
748	list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
749
750		if (insn->type != INSN_JUMP_DYNAMIC &&
751		    insn->type != INSN_CALL_DYNAMIC)
752			continue;
753
754		if (!strcmp(insn->sec->name, ".text.__x86.indirect_thunk"))
755			continue;
756
757		reloc = insn->reloc;
758
759		sprintf(name, "__x86_indirect_alt_%s_%s",
760			insn->type == INSN_JUMP_DYNAMIC ? "jmp" : "call",
761			reloc->sym->name + 21);
762
763		sym = find_symbol_by_name(file->elf, name);
764		if (!sym) {
765			sym = elf_create_undef_symbol(file->elf, name);
766			if (!sym) {
767				WARN("elf_create_undef_symbol");
768				return -1;
769			}
770		}
771
772		if (elf_add_alternative(file->elf, insn, sym,
773					ALT_NOT(X86_FEATURE_RETPOLINE), 5, 5)) {
774			WARN("elf_add_alternative");
775			return -1;
776		}
777	}
778
779	return 0;
780}
781
782int arch_decode_hint_reg(struct instruction *insn, u8 sp_reg)
783{
784	struct cfi_reg *cfa = &insn->cfi.cfa;
785
786	switch (sp_reg) {
787	case ORC_REG_UNDEFINED:
788		cfa->base = CFI_UNDEFINED;
789		break;
790	case ORC_REG_SP:
791		cfa->base = CFI_SP;
792		break;
793	case ORC_REG_BP:
794		cfa->base = CFI_BP;
795		break;
796	case ORC_REG_SP_INDIRECT:
797		cfa->base = CFI_SP_INDIRECT;
798		break;
799	case ORC_REG_R10:
800		cfa->base = CFI_R10;
801		break;
802	case ORC_REG_R13:
803		cfa->base = CFI_R13;
804		break;
805	case ORC_REG_DI:
806		cfa->base = CFI_DI;
807		break;
808	case ORC_REG_DX:
809		cfa->base = CFI_DX;
810		break;
811	default:
812		return -1;
813	}
814
815	return 0;
816}
817
818bool arch_is_retpoline(struct symbol *sym)
819{
820	return !strncmp(sym->name, "__x86_indirect_", 15);
 
 
 
 
 
821}
v6.2
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <objtool/builtin.h>
 24#include <arch/elf.h>
 25
 26int arch_ftrace_match(char *name)
 27{
 28	return !strcmp(name, "__fentry__");
 29}
 30
 31static int is_x86_64(const struct elf *elf)
 32{
 33	switch (elf->ehdr.e_machine) {
 34	case EM_X86_64:
 35		return 1;
 36	case EM_386:
 37		return 0;
 38	default:
 39		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 40		return -1;
 41	}
 42}
 43
 44bool arch_callee_saved_reg(unsigned char reg)
 45{
 46	switch (reg) {
 47	case CFI_BP:
 48	case CFI_BX:
 49	case CFI_R12:
 50	case CFI_R13:
 51	case CFI_R14:
 52	case CFI_R15:
 53		return true;
 54
 55	case CFI_AX:
 56	case CFI_CX:
 57	case CFI_DX:
 58	case CFI_SI:
 59	case CFI_DI:
 60	case CFI_SP:
 61	case CFI_R8:
 62	case CFI_R9:
 63	case CFI_R10:
 64	case CFI_R11:
 65	case CFI_RA:
 66	default:
 67		return false;
 68	}
 69}
 70
 71unsigned long arch_dest_reloc_offset(int addend)
 72{
 73	return addend + 4;
 74}
 75
 76unsigned long arch_jump_destination(struct instruction *insn)
 77{
 78	return insn->offset + insn->len + insn->immediate;
 79}
 80
 81bool arch_pc_relative_reloc(struct reloc *reloc)
 82{
 83	/*
 84	 * All relocation types where P (the address of the target)
 85	 * is included in the computation.
 86	 */
 87	switch (reloc->type) {
 88	case R_X86_64_PC8:
 89	case R_X86_64_PC16:
 90	case R_X86_64_PC32:
 91	case R_X86_64_PC64:
 92
 93	case R_X86_64_PLT32:
 94	case R_X86_64_GOTPC32:
 95	case R_X86_64_GOTPCREL:
 96		return true;
 97
 98	default:
 99		break;
100	}
101
102	return false;
103}
104
105#define ADD_OP(op) \
106	if (!(op = calloc(1, sizeof(*op)))) \
107		return -1; \
108	else for (list_add_tail(&op->list, ops_list); op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
114 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
117 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
118 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
119 * 11 |                   r/ m               |
120 */
121
122#define mod_is_mem()	(modrm_mod != 3)
123#define mod_is_reg()	(modrm_mod == 3)
124
125#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128#define rm_is(reg) (have_SIB() ? \
129		    sib_base == (reg) && sib_index == CFI_SP : \
130		    modrm_rm == (reg))
131
132#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
133#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
134
135static bool has_notrack_prefix(struct insn *insn)
136{
137	int i;
138
139	for (i = 0; i < insn->prefixes.nbytes; i++) {
140		if (insn->prefixes.bytes[i] == 0x3e)
141			return true;
142	}
143
144	return false;
145}
146
147int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148			    unsigned long offset, unsigned int maxlen,
149			    unsigned int *len, enum insn_type *type,
150			    unsigned long *immediate,
151			    struct list_head *ops_list)
152{
153	const struct elf *elf = file->elf;
154	struct insn insn;
155	int x86_64, ret;
156	unsigned char op1, op2, op3, prefix,
157		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
158		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
159		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
160	struct stack_op *op = NULL;
161	struct symbol *sym;
162	u64 imm;
163
164	x86_64 = is_x86_64(elf);
165	if (x86_64 == -1)
166		return -1;
167
168	ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
169			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
170	if (ret < 0) {
171		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
172		return -1;
173	}
174
175	*len = insn.length;
176	*type = INSN_OTHER;
177
178	if (insn.vex_prefix.nbytes)
179		return 0;
180
181	prefix = insn.prefixes.bytes[0];
182
183	op1 = insn.opcode.bytes[0];
184	op2 = insn.opcode.bytes[1];
185	op3 = insn.opcode.bytes[2];
186
187	if (insn.rex_prefix.nbytes) {
188		rex = insn.rex_prefix.bytes[0];
189		rex_w = X86_REX_W(rex) >> 3;
190		rex_r = X86_REX_R(rex) >> 2;
191		rex_x = X86_REX_X(rex) >> 1;
192		rex_b = X86_REX_B(rex);
193	}
194
195	if (insn.modrm.nbytes) {
196		modrm = insn.modrm.bytes[0];
197		modrm_mod = X86_MODRM_MOD(modrm);
198		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
199		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
200	}
201
202	if (insn.sib.nbytes) {
203		sib = insn.sib.bytes[0];
204		/* sib_scale = X86_SIB_SCALE(sib); */
205		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
206		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
207	}
208
209	switch (op1) {
210
211	case 0x1:
212	case 0x29:
213		if (rex_w && rm_is_reg(CFI_SP)) {
214
215			/* add/sub reg, %rsp */
216			ADD_OP(op) {
217				op->src.type = OP_SRC_ADD;
218				op->src.reg = modrm_reg;
219				op->dest.type = OP_DEST_REG;
220				op->dest.reg = CFI_SP;
221			}
222		}
223		break;
224
225	case 0x50 ... 0x57:
226
227		/* push reg */
228		ADD_OP(op) {
229			op->src.type = OP_SRC_REG;
230			op->src.reg = (op1 & 0x7) + 8*rex_b;
231			op->dest.type = OP_DEST_PUSH;
232		}
233
234		break;
235
236	case 0x58 ... 0x5f:
237
238		/* pop reg */
239		ADD_OP(op) {
240			op->src.type = OP_SRC_POP;
241			op->dest.type = OP_DEST_REG;
242			op->dest.reg = (op1 & 0x7) + 8*rex_b;
243		}
244
245		break;
246
247	case 0x68:
248	case 0x6a:
249		/* push immediate */
250		ADD_OP(op) {
251			op->src.type = OP_SRC_CONST;
252			op->dest.type = OP_DEST_PUSH;
253		}
254		break;
255
256	case 0x70 ... 0x7f:
257		*type = INSN_JUMP_CONDITIONAL;
258		break;
259
260	case 0x80 ... 0x83:
261		/*
262		 * 1000 00sw : mod OP r/m : immediate
263		 *
264		 * s - sign extend immediate
265		 * w - imm8 / imm32
266		 *
267		 * OP: 000 ADD    100 AND
268		 *     001 OR     101 SUB
269		 *     010 ADC    110 XOR
270		 *     011 SBB    111 CMP
271		 */
272
273		/* 64bit only */
274		if (!rex_w)
275			break;
276
277		/* %rsp target only */
278		if (!rm_is_reg(CFI_SP))
279			break;
280
281		imm = insn.immediate.value;
282		if (op1 & 2) { /* sign extend */
283			if (op1 & 1) { /* imm32 */
284				imm <<= 32;
285				imm = (s64)imm >> 32;
286			} else { /* imm8 */
287				imm <<= 56;
288				imm = (s64)imm >> 56;
289			}
290		}
291
292		switch (modrm_reg & 7) {
293		case 5:
294			imm = -imm;
295			/* fallthrough */
296		case 0:
297			/* add/sub imm, %rsp */
298			ADD_OP(op) {
299				op->src.type = OP_SRC_ADD;
300				op->src.reg = CFI_SP;
301				op->src.offset = imm;
302				op->dest.type = OP_DEST_REG;
303				op->dest.reg = CFI_SP;
304			}
305			break;
306
307		case 4:
308			/* and imm, %rsp */
309			ADD_OP(op) {
310				op->src.type = OP_SRC_AND;
311				op->src.reg = CFI_SP;
312				op->src.offset = insn.immediate.value;
313				op->dest.type = OP_DEST_REG;
314				op->dest.reg = CFI_SP;
315			}
316			break;
317
318		default:
319			/* WARN ? */
320			break;
321		}
322
323		break;
324
325	case 0x89:
326		if (!rex_w)
327			break;
328
329		if (modrm_reg == CFI_SP) {
330
331			if (mod_is_reg()) {
332				/* mov %rsp, reg */
333				ADD_OP(op) {
334					op->src.type = OP_SRC_REG;
335					op->src.reg = CFI_SP;
336					op->dest.type = OP_DEST_REG;
337					op->dest.reg = modrm_rm;
338				}
339				break;
340
341			} else {
342				/* skip RIP relative displacement */
343				if (is_RIP())
344					break;
345
346				/* skip nontrivial SIB */
347				if (have_SIB()) {
348					modrm_rm = sib_base;
349					if (sib_index != CFI_SP)
350						break;
351				}
352
353				/* mov %rsp, disp(%reg) */
354				ADD_OP(op) {
355					op->src.type = OP_SRC_REG;
356					op->src.reg = CFI_SP;
357					op->dest.type = OP_DEST_REG_INDIRECT;
358					op->dest.reg = modrm_rm;
359					op->dest.offset = insn.displacement.value;
360				}
361				break;
362			}
363
364			break;
365		}
366
367		if (rm_is_reg(CFI_SP)) {
368
369			/* mov reg, %rsp */
370			ADD_OP(op) {
371				op->src.type = OP_SRC_REG;
372				op->src.reg = modrm_reg;
373				op->dest.type = OP_DEST_REG;
374				op->dest.reg = CFI_SP;
375			}
376			break;
377		}
378
379		/* fallthrough */
380	case 0x88:
381		if (!rex_w)
382			break;
383
384		if (rm_is_mem(CFI_BP)) {
385
386			/* mov reg, disp(%rbp) */
387			ADD_OP(op) {
388				op->src.type = OP_SRC_REG;
389				op->src.reg = modrm_reg;
390				op->dest.type = OP_DEST_REG_INDIRECT;
391				op->dest.reg = CFI_BP;
392				op->dest.offset = insn.displacement.value;
393			}
394			break;
395		}
396
397		if (rm_is_mem(CFI_SP)) {
398
399			/* mov reg, disp(%rsp) */
400			ADD_OP(op) {
401				op->src.type = OP_SRC_REG;
402				op->src.reg = modrm_reg;
403				op->dest.type = OP_DEST_REG_INDIRECT;
404				op->dest.reg = CFI_SP;
405				op->dest.offset = insn.displacement.value;
406			}
407			break;
408		}
409
410		break;
411
412	case 0x8b:
413		if (!rex_w)
414			break;
415
416		if (rm_is_mem(CFI_BP)) {
417
418			/* mov disp(%rbp), reg */
419			ADD_OP(op) {
420				op->src.type = OP_SRC_REG_INDIRECT;
421				op->src.reg = CFI_BP;
422				op->src.offset = insn.displacement.value;
423				op->dest.type = OP_DEST_REG;
424				op->dest.reg = modrm_reg;
425			}
426			break;
427		}
428
429		if (rm_is_mem(CFI_SP)) {
430
431			/* mov disp(%rsp), reg */
432			ADD_OP(op) {
433				op->src.type = OP_SRC_REG_INDIRECT;
434				op->src.reg = CFI_SP;
435				op->src.offset = insn.displacement.value;
436				op->dest.type = OP_DEST_REG;
437				op->dest.reg = modrm_reg;
438			}
439			break;
440		}
441
442		break;
443
444	case 0x8d:
445		if (mod_is_reg()) {
446			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
447			break;
448		}
449
450		/* skip non 64bit ops */
451		if (!rex_w)
452			break;
453
454		/* skip RIP relative displacement */
455		if (is_RIP())
456			break;
457
458		/* skip nontrivial SIB */
459		if (have_SIB()) {
460			modrm_rm = sib_base;
461			if (sib_index != CFI_SP)
462				break;
463		}
464
465		/* lea disp(%src), %dst */
466		ADD_OP(op) {
467			op->src.offset = insn.displacement.value;
468			if (!op->src.offset) {
469				/* lea (%src), %dst */
470				op->src.type = OP_SRC_REG;
471			} else {
472				/* lea disp(%src), %dst */
473				op->src.type = OP_SRC_ADD;
474			}
475			op->src.reg = modrm_rm;
476			op->dest.type = OP_DEST_REG;
477			op->dest.reg = modrm_reg;
478		}
479		break;
480
481	case 0x8f:
482		/* pop to mem */
483		ADD_OP(op) {
484			op->src.type = OP_SRC_POP;
485			op->dest.type = OP_DEST_MEM;
486		}
487		break;
488
489	case 0x90:
490		*type = INSN_NOP;
491		break;
492
493	case 0x9c:
494		/* pushf */
495		ADD_OP(op) {
496			op->src.type = OP_SRC_CONST;
497			op->dest.type = OP_DEST_PUSHF;
498		}
499		break;
500
501	case 0x9d:
502		/* popf */
503		ADD_OP(op) {
504			op->src.type = OP_SRC_POPF;
505			op->dest.type = OP_DEST_MEM;
506		}
507		break;
508
509	case 0x0f:
510
511		if (op2 == 0x01) {
512
513			if (modrm == 0xca)
514				*type = INSN_CLAC;
515			else if (modrm == 0xcb)
516				*type = INSN_STAC;
517
518		} else if (op2 >= 0x80 && op2 <= 0x8f) {
519
520			*type = INSN_JUMP_CONDITIONAL;
521
522		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
523			   op2 == 0x35) {
524
525			/* sysenter, sysret */
526			*type = INSN_CONTEXT_SWITCH;
527
528		} else if (op2 == 0x0b || op2 == 0xb9) {
529
530			/* ud2 */
531			*type = INSN_BUG;
532
533		} else if (op2 == 0x0d || op2 == 0x1f) {
534
535			/* nopl/nopw */
536			*type = INSN_NOP;
537
538		} else if (op2 == 0x1e) {
539
540			if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
541				*type = INSN_ENDBR;
542
543
544		} else if (op2 == 0x38 && op3 == 0xf8) {
545			if (insn.prefixes.nbytes == 1 &&
546			    insn.prefixes.bytes[0] == 0xf2) {
547				/* ENQCMD cannot be used in the kernel. */
548				WARN("ENQCMD instruction at %s:%lx", sec->name,
549				     offset);
550			}
551
552		} else if (op2 == 0xa0 || op2 == 0xa8) {
553
554			/* push fs/gs */
555			ADD_OP(op) {
556				op->src.type = OP_SRC_CONST;
557				op->dest.type = OP_DEST_PUSH;
558			}
559
560		} else if (op2 == 0xa1 || op2 == 0xa9) {
561
562			/* pop fs/gs */
563			ADD_OP(op) {
564				op->src.type = OP_SRC_POP;
565				op->dest.type = OP_DEST_MEM;
566			}
567		}
568
569		break;
570
571	case 0xc9:
572		/*
573		 * leave
574		 *
575		 * equivalent to:
576		 * mov bp, sp
577		 * pop bp
578		 */
579		ADD_OP(op) {
580			op->src.type = OP_SRC_REG;
581			op->src.reg = CFI_BP;
582			op->dest.type = OP_DEST_REG;
583			op->dest.reg = CFI_SP;
584		}
585		ADD_OP(op) {
586			op->src.type = OP_SRC_POP;
587			op->dest.type = OP_DEST_REG;
588			op->dest.reg = CFI_BP;
589		}
590		break;
591
592	case 0xcc:
593		/* int3 */
594		*type = INSN_TRAP;
595		break;
596
597	case 0xe3:
598		/* jecxz/jrcxz */
599		*type = INSN_JUMP_CONDITIONAL;
600		break;
601
602	case 0xe9:
603	case 0xeb:
604		*type = INSN_JUMP_UNCONDITIONAL;
605		break;
606
607	case 0xc2:
608	case 0xc3:
609		*type = INSN_RETURN;
610		break;
611
612	case 0xc7: /* mov imm, r/m */
613		if (!opts.noinstr)
614			break;
615
616		if (insn.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
617			struct reloc *immr, *disp;
618			struct symbol *func;
619			int idx;
620
621			immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
622			disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
623
624			if (!immr || strcmp(immr->sym->name, "pv_ops"))
625				break;
626
627			idx = (immr->addend + 8) / sizeof(void *);
628
629			func = disp->sym;
630			if (disp->sym->type == STT_SECTION)
631				func = find_symbol_by_offset(disp->sym->sec, disp->addend);
632			if (!func) {
633				WARN("no func for pv_ops[]");
634				return -1;
635			}
636
637			objtool_pv_add(file, idx, func);
638		}
639
640		break;
641
642	case 0xcf: /* iret */
643		/*
644		 * Handle sync_core(), which has an IRET to self.
645		 * All other IRET are in STT_NONE entry code.
646		 */
647		sym = find_symbol_containing(sec, offset);
648		if (sym && sym->type == STT_FUNC) {
649			ADD_OP(op) {
650				/* add $40, %rsp */
651				op->src.type = OP_SRC_ADD;
652				op->src.reg = CFI_SP;
653				op->src.offset = 5*8;
654				op->dest.type = OP_DEST_REG;
655				op->dest.reg = CFI_SP;
656			}
657			break;
658		}
659
660		/* fallthrough */
661
662	case 0xca: /* retf */
663	case 0xcb: /* retf */
664		*type = INSN_CONTEXT_SWITCH;
665		break;
666
667	case 0xe0: /* loopne */
668	case 0xe1: /* loope */
669	case 0xe2: /* loop */
670		*type = INSN_JUMP_CONDITIONAL;
671		break;
672
673	case 0xe8:
674		*type = INSN_CALL;
675		/*
676		 * For the impact on the stack, a CALL behaves like
677		 * a PUSH of an immediate value (the return address).
678		 */
679		ADD_OP(op) {
680			op->src.type = OP_SRC_CONST;
681			op->dest.type = OP_DEST_PUSH;
682		}
683		break;
684
685	case 0xfc:
686		*type = INSN_CLD;
687		break;
688
689	case 0xfd:
690		*type = INSN_STD;
691		break;
692
693	case 0xff:
694		if (modrm_reg == 2 || modrm_reg == 3) {
695
696			*type = INSN_CALL_DYNAMIC;
697			if (has_notrack_prefix(&insn))
698				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
699
700		} else if (modrm_reg == 4) {
701
702			*type = INSN_JUMP_DYNAMIC;
703			if (has_notrack_prefix(&insn))
704				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
705
706		} else if (modrm_reg == 5) {
707
708			/* jmpf */
709			*type = INSN_CONTEXT_SWITCH;
710
711		} else if (modrm_reg == 6) {
712
713			/* push from mem */
714			ADD_OP(op) {
715				op->src.type = OP_SRC_CONST;
716				op->dest.type = OP_DEST_PUSH;
717			}
718		}
719
720		break;
721
722	default:
723		break;
724	}
725
726	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
727
728	return 0;
729}
730
731void arch_initial_func_cfi_state(struct cfi_init_state *state)
732{
733	int i;
734
735	for (i = 0; i < CFI_NUM_REGS; i++) {
736		state->regs[i].base = CFI_UNDEFINED;
737		state->regs[i].offset = 0;
738	}
739
740	/* initial CFA (call frame address) */
741	state->cfa.base = CFI_SP;
742	state->cfa.offset = 8;
743
744	/* initial RA (return address) */
745	state->regs[CFI_RA].base = CFI_CFA;
746	state->regs[CFI_RA].offset = -8;
747}
748
749const char *arch_nop_insn(int len)
750{
751	static const char nops[5][5] = {
752		{ BYTES_NOP1 },
753		{ BYTES_NOP2 },
754		{ BYTES_NOP3 },
755		{ BYTES_NOP4 },
756		{ BYTES_NOP5 },
757	};
758
759	if (len < 1 || len > 5) {
760		WARN("invalid NOP size: %d\n", len);
761		return NULL;
762	}
763
764	return nops[len-1];
765}
766
767#define BYTE_RET	0xC3
 
 
 
768
769const char *arch_ret_insn(int len)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
770{
771	static const char ret[5][5] = {
772		{ BYTE_RET },
773		{ BYTE_RET, 0xcc },
774		{ BYTE_RET, 0xcc, BYTES_NOP1 },
775		{ BYTE_RET, 0xcc, BYTES_NOP2 },
776		{ BYTE_RET, 0xcc, BYTES_NOP3 },
777	};
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
778
779	if (len < 1 || len > 5) {
780		WARN("invalid RET size: %d\n", len);
781		return NULL;
 
 
782	}
783
784	return ret[len-1];
785}
786
787int arch_decode_hint_reg(u8 sp_reg, int *base)
788{
 
 
789	switch (sp_reg) {
790	case ORC_REG_UNDEFINED:
791		*base = CFI_UNDEFINED;
792		break;
793	case ORC_REG_SP:
794		*base = CFI_SP;
795		break;
796	case ORC_REG_BP:
797		*base = CFI_BP;
798		break;
799	case ORC_REG_SP_INDIRECT:
800		*base = CFI_SP_INDIRECT;
801		break;
802	case ORC_REG_R10:
803		*base = CFI_R10;
804		break;
805	case ORC_REG_R13:
806		*base = CFI_R13;
807		break;
808	case ORC_REG_DI:
809		*base = CFI_DI;
810		break;
811	case ORC_REG_DX:
812		*base = CFI_DX;
813		break;
814	default:
815		return -1;
816	}
817
818	return 0;
819}
820
821bool arch_is_retpoline(struct symbol *sym)
822{
823	return !strncmp(sym->name, "__x86_indirect_", 15);
824}
825
826bool arch_is_rethunk(struct symbol *sym)
827{
828	return !strcmp(sym->name, "__x86_return_thunk");
829}