Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.1.
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <arch/elf.h>
 24
 25static int is_x86_64(const struct elf *elf)
 26{
 27	switch (elf->ehdr.e_machine) {
 28	case EM_X86_64:
 29		return 1;
 30	case EM_386:
 31		return 0;
 32	default:
 33		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 34		return -1;
 35	}
 36}
 37
 38bool arch_callee_saved_reg(unsigned char reg)
 39{
 40	switch (reg) {
 41	case CFI_BP:
 42	case CFI_BX:
 43	case CFI_R12:
 44	case CFI_R13:
 45	case CFI_R14:
 46	case CFI_R15:
 47		return true;
 48
 49	case CFI_AX:
 50	case CFI_CX:
 51	case CFI_DX:
 52	case CFI_SI:
 53	case CFI_DI:
 54	case CFI_SP:
 55	case CFI_R8:
 56	case CFI_R9:
 57	case CFI_R10:
 58	case CFI_R11:
 59	case CFI_RA:
 60	default:
 61		return false;
 62	}
 63}
 64
 65unsigned long arch_dest_reloc_offset(int addend)
 66{
 67	return addend + 4;
 68}
 69
 70unsigned long arch_jump_destination(struct instruction *insn)
 71{
 72	return insn->offset + insn->len + insn->immediate;
 73}
 74
 75#define ADD_OP(op) \
 76	if (!(op = calloc(1, sizeof(*op)))) \
 77		return -1; \
 78	else for (list_add_tail(&op->list, ops_list); op; op = NULL)
 79
 80/*
 81 * Helpers to decode ModRM/SIB:
 82 *
 83 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
 84 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
 85 * Mod+----------------+-----+-----+---------+
 86 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
 87 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
 88 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
 89 * 11 |                   r/ m               |
 90 */
 91
 92#define mod_is_mem()	(modrm_mod != 3)
 93#define mod_is_reg()	(modrm_mod == 3)
 94
 95#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
 96#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
 97
 98#define rm_is(reg) (have_SIB() ? \
 99		    sib_base == (reg) && sib_index == CFI_SP : \
100		    modrm_rm == (reg))
101
102#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
103#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
104
105int arch_decode_instruction(const struct elf *elf, const struct section *sec,
106			    unsigned long offset, unsigned int maxlen,
107			    unsigned int *len, enum insn_type *type,
108			    unsigned long *immediate,
109			    struct list_head *ops_list)
110{
111	struct insn insn;
112	int x86_64, ret;
113	unsigned char op1, op2,
114		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
115		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
116		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
117	struct stack_op *op = NULL;
118	struct symbol *sym;
119	u64 imm;
120
121	x86_64 = is_x86_64(elf);
122	if (x86_64 == -1)
123		return -1;
124
125	ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
126			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
127	if (ret < 0) {
128		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
129		return -1;
130	}
131
132	*len = insn.length;
133	*type = INSN_OTHER;
134
135	if (insn.vex_prefix.nbytes)
136		return 0;
137
138	op1 = insn.opcode.bytes[0];
139	op2 = insn.opcode.bytes[1];
140
141	if (insn.rex_prefix.nbytes) {
142		rex = insn.rex_prefix.bytes[0];
143		rex_w = X86_REX_W(rex) >> 3;
144		rex_r = X86_REX_R(rex) >> 2;
145		rex_x = X86_REX_X(rex) >> 1;
146		rex_b = X86_REX_B(rex);
147	}
148
149	if (insn.modrm.nbytes) {
150		modrm = insn.modrm.bytes[0];
151		modrm_mod = X86_MODRM_MOD(modrm);
152		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
153		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
154	}
155
156	if (insn.sib.nbytes) {
157		sib = insn.sib.bytes[0];
158		/* sib_scale = X86_SIB_SCALE(sib); */
159		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
160		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
161	}
162
163	switch (op1) {
164
165	case 0x1:
166	case 0x29:
167		if (rex_w && rm_is_reg(CFI_SP)) {
168
169			/* add/sub reg, %rsp */
170			ADD_OP(op) {
171				op->src.type = OP_SRC_ADD;
172				op->src.reg = modrm_reg;
173				op->dest.type = OP_DEST_REG;
174				op->dest.reg = CFI_SP;
175			}
176		}
177		break;
178
179	case 0x50 ... 0x57:
180
181		/* push reg */
182		ADD_OP(op) {
183			op->src.type = OP_SRC_REG;
184			op->src.reg = (op1 & 0x7) + 8*rex_b;
185			op->dest.type = OP_DEST_PUSH;
186		}
187
188		break;
189
190	case 0x58 ... 0x5f:
191
192		/* pop reg */
193		ADD_OP(op) {
194			op->src.type = OP_SRC_POP;
195			op->dest.type = OP_DEST_REG;
196			op->dest.reg = (op1 & 0x7) + 8*rex_b;
197		}
198
199		break;
200
201	case 0x68:
202	case 0x6a:
203		/* push immediate */
204		ADD_OP(op) {
205			op->src.type = OP_SRC_CONST;
206			op->dest.type = OP_DEST_PUSH;
207		}
208		break;
209
210	case 0x70 ... 0x7f:
211		*type = INSN_JUMP_CONDITIONAL;
212		break;
213
214	case 0x80 ... 0x83:
215		/*
216		 * 1000 00sw : mod OP r/m : immediate
217		 *
218		 * s - sign extend immediate
219		 * w - imm8 / imm32
220		 *
221		 * OP: 000 ADD    100 AND
222		 *     001 OR     101 SUB
223		 *     010 ADC    110 XOR
224		 *     011 SBB    111 CMP
225		 */
226
227		/* 64bit only */
228		if (!rex_w)
229			break;
230
231		/* %rsp target only */
232		if (!rm_is_reg(CFI_SP))
233			break;
234
235		imm = insn.immediate.value;
236		if (op1 & 2) { /* sign extend */
237			if (op1 & 1) { /* imm32 */
238				imm <<= 32;
239				imm = (s64)imm >> 32;
240			} else { /* imm8 */
241				imm <<= 56;
242				imm = (s64)imm >> 56;
243			}
244		}
245
246		switch (modrm_reg & 7) {
247		case 5:
248			imm = -imm;
249			/* fallthrough */
250		case 0:
251			/* add/sub imm, %rsp */
252			ADD_OP(op) {
253				op->src.type = OP_SRC_ADD;
254				op->src.reg = CFI_SP;
255				op->src.offset = imm;
256				op->dest.type = OP_DEST_REG;
257				op->dest.reg = CFI_SP;
258			}
259			break;
260
261		case 4:
262			/* and imm, %rsp */
263			ADD_OP(op) {
264				op->src.type = OP_SRC_AND;
265				op->src.reg = CFI_SP;
266				op->src.offset = insn.immediate.value;
267				op->dest.type = OP_DEST_REG;
268				op->dest.reg = CFI_SP;
269			}
270			break;
271
272		default:
273			/* WARN ? */
274			break;
275		}
276
277		break;
278
279	case 0x89:
280		if (!rex_w)
281			break;
282
283		if (modrm_reg == CFI_SP) {
284
285			if (mod_is_reg()) {
286				/* mov %rsp, reg */
287				ADD_OP(op) {
288					op->src.type = OP_SRC_REG;
289					op->src.reg = CFI_SP;
290					op->dest.type = OP_DEST_REG;
291					op->dest.reg = modrm_rm;
292				}
293				break;
294
295			} else {
296				/* skip RIP relative displacement */
297				if (is_RIP())
298					break;
299
300				/* skip nontrivial SIB */
301				if (have_SIB()) {
302					modrm_rm = sib_base;
303					if (sib_index != CFI_SP)
304						break;
305				}
306
307				/* mov %rsp, disp(%reg) */
308				ADD_OP(op) {
309					op->src.type = OP_SRC_REG;
310					op->src.reg = CFI_SP;
311					op->dest.type = OP_DEST_REG_INDIRECT;
312					op->dest.reg = modrm_rm;
313					op->dest.offset = insn.displacement.value;
314				}
315				break;
316			}
317
318			break;
319		}
320
321		if (rm_is_reg(CFI_SP)) {
322
323			/* mov reg, %rsp */
324			ADD_OP(op) {
325				op->src.type = OP_SRC_REG;
326				op->src.reg = modrm_reg;
327				op->dest.type = OP_DEST_REG;
328				op->dest.reg = CFI_SP;
329			}
330			break;
331		}
332
333		/* fallthrough */
334	case 0x88:
335		if (!rex_w)
336			break;
337
338		if (rm_is_mem(CFI_BP)) {
339
340			/* mov reg, disp(%rbp) */
341			ADD_OP(op) {
342				op->src.type = OP_SRC_REG;
343				op->src.reg = modrm_reg;
344				op->dest.type = OP_DEST_REG_INDIRECT;
345				op->dest.reg = CFI_BP;
346				op->dest.offset = insn.displacement.value;
347			}
348			break;
349		}
350
351		if (rm_is_mem(CFI_SP)) {
352
353			/* mov reg, disp(%rsp) */
354			ADD_OP(op) {
355				op->src.type = OP_SRC_REG;
356				op->src.reg = modrm_reg;
357				op->dest.type = OP_DEST_REG_INDIRECT;
358				op->dest.reg = CFI_SP;
359				op->dest.offset = insn.displacement.value;
360			}
361			break;
362		}
363
364		break;
365
366	case 0x8b:
367		if (!rex_w)
368			break;
369
370		if (rm_is_mem(CFI_BP)) {
371
372			/* mov disp(%rbp), reg */
373			ADD_OP(op) {
374				op->src.type = OP_SRC_REG_INDIRECT;
375				op->src.reg = CFI_BP;
376				op->src.offset = insn.displacement.value;
377				op->dest.type = OP_DEST_REG;
378				op->dest.reg = modrm_reg;
379			}
380			break;
381		}
382
383		if (rm_is_mem(CFI_SP)) {
384
385			/* mov disp(%rsp), reg */
386			ADD_OP(op) {
387				op->src.type = OP_SRC_REG_INDIRECT;
388				op->src.reg = CFI_SP;
389				op->src.offset = insn.displacement.value;
390				op->dest.type = OP_DEST_REG;
391				op->dest.reg = modrm_reg;
392			}
393			break;
394		}
395
396		break;
397
398	case 0x8d:
399		if (mod_is_reg()) {
400			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
401			break;
402		}
403
404		/* skip non 64bit ops */
405		if (!rex_w)
406			break;
407
408		/* skip RIP relative displacement */
409		if (is_RIP())
410			break;
411
412		/* skip nontrivial SIB */
413		if (have_SIB()) {
414			modrm_rm = sib_base;
415			if (sib_index != CFI_SP)
416				break;
417		}
418
419		/* lea disp(%src), %dst */
420		ADD_OP(op) {
421			op->src.offset = insn.displacement.value;
422			if (!op->src.offset) {
423				/* lea (%src), %dst */
424				op->src.type = OP_SRC_REG;
425			} else {
426				/* lea disp(%src), %dst */
427				op->src.type = OP_SRC_ADD;
428			}
429			op->src.reg = modrm_rm;
430			op->dest.type = OP_DEST_REG;
431			op->dest.reg = modrm_reg;
432		}
433		break;
434
435	case 0x8f:
436		/* pop to mem */
437		ADD_OP(op) {
438			op->src.type = OP_SRC_POP;
439			op->dest.type = OP_DEST_MEM;
440		}
441		break;
442
443	case 0x90:
444		*type = INSN_NOP;
445		break;
446
447	case 0x9c:
448		/* pushf */
449		ADD_OP(op) {
450			op->src.type = OP_SRC_CONST;
451			op->dest.type = OP_DEST_PUSHF;
452		}
453		break;
454
455	case 0x9d:
456		/* popf */
457		ADD_OP(op) {
458			op->src.type = OP_SRC_POPF;
459			op->dest.type = OP_DEST_MEM;
460		}
461		break;
462
463	case 0x0f:
464
465		if (op2 == 0x01) {
466
467			if (modrm == 0xca)
468				*type = INSN_CLAC;
469			else if (modrm == 0xcb)
470				*type = INSN_STAC;
471
472		} else if (op2 >= 0x80 && op2 <= 0x8f) {
473
474			*type = INSN_JUMP_CONDITIONAL;
475
476		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
477			   op2 == 0x35) {
478
479			/* sysenter, sysret */
480			*type = INSN_CONTEXT_SWITCH;
481
482		} else if (op2 == 0x0b || op2 == 0xb9) {
483
484			/* ud2 */
485			*type = INSN_BUG;
486
487		} else if (op2 == 0x0d || op2 == 0x1f) {
488
489			/* nopl/nopw */
490			*type = INSN_NOP;
491
492		} else if (op2 == 0xa0 || op2 == 0xa8) {
493
494			/* push fs/gs */
495			ADD_OP(op) {
496				op->src.type = OP_SRC_CONST;
497				op->dest.type = OP_DEST_PUSH;
498			}
499
500		} else if (op2 == 0xa1 || op2 == 0xa9) {
501
502			/* pop fs/gs */
503			ADD_OP(op) {
504				op->src.type = OP_SRC_POP;
505				op->dest.type = OP_DEST_MEM;
506			}
507		}
508
509		break;
510
511	case 0xc9:
512		/*
513		 * leave
514		 *
515		 * equivalent to:
516		 * mov bp, sp
517		 * pop bp
518		 */
519		ADD_OP(op) {
520			op->src.type = OP_SRC_REG;
521			op->src.reg = CFI_BP;
522			op->dest.type = OP_DEST_REG;
523			op->dest.reg = CFI_SP;
524		}
525		ADD_OP(op) {
526			op->src.type = OP_SRC_POP;
527			op->dest.type = OP_DEST_REG;
528			op->dest.reg = CFI_BP;
529		}
530		break;
531
532	case 0xe3:
533		/* jecxz/jrcxz */
534		*type = INSN_JUMP_CONDITIONAL;
535		break;
536
537	case 0xe9:
538	case 0xeb:
539		*type = INSN_JUMP_UNCONDITIONAL;
540		break;
541
542	case 0xc2:
543	case 0xc3:
544		*type = INSN_RETURN;
545		break;
546
547	case 0xcf: /* iret */
548		/*
549		 * Handle sync_core(), which has an IRET to self.
550		 * All other IRET are in STT_NONE entry code.
551		 */
552		sym = find_symbol_containing(sec, offset);
553		if (sym && sym->type == STT_FUNC) {
554			ADD_OP(op) {
555				/* add $40, %rsp */
556				op->src.type = OP_SRC_ADD;
557				op->src.reg = CFI_SP;
558				op->src.offset = 5*8;
559				op->dest.type = OP_DEST_REG;
560				op->dest.reg = CFI_SP;
561			}
562			break;
563		}
564
565		/* fallthrough */
566
567	case 0xca: /* retf */
568	case 0xcb: /* retf */
569		*type = INSN_CONTEXT_SWITCH;
570		break;
571
572	case 0xe8:
573		*type = INSN_CALL;
574		/*
575		 * For the impact on the stack, a CALL behaves like
576		 * a PUSH of an immediate value (the return address).
577		 */
578		ADD_OP(op) {
579			op->src.type = OP_SRC_CONST;
580			op->dest.type = OP_DEST_PUSH;
581		}
582		break;
583
584	case 0xfc:
585		*type = INSN_CLD;
586		break;
587
588	case 0xfd:
589		*type = INSN_STD;
590		break;
591
592	case 0xff:
593		if (modrm_reg == 2 || modrm_reg == 3)
594
595			*type = INSN_CALL_DYNAMIC;
596
597		else if (modrm_reg == 4)
598
599			*type = INSN_JUMP_DYNAMIC;
600
601		else if (modrm_reg == 5)
602
603			/* jmpf */
604			*type = INSN_CONTEXT_SWITCH;
605
606		else if (modrm_reg == 6) {
607
608			/* push from mem */
609			ADD_OP(op) {
610				op->src.type = OP_SRC_CONST;
611				op->dest.type = OP_DEST_PUSH;
612			}
613		}
614
615		break;
616
617	default:
618		break;
619	}
620
621	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
622
623	return 0;
624}
625
626void arch_initial_func_cfi_state(struct cfi_init_state *state)
627{
628	int i;
629
630	for (i = 0; i < CFI_NUM_REGS; i++) {
631		state->regs[i].base = CFI_UNDEFINED;
632		state->regs[i].offset = 0;
633	}
634
635	/* initial CFA (call frame address) */
636	state->cfa.base = CFI_SP;
637	state->cfa.offset = 8;
638
639	/* initial RA (return address) */
640	state->regs[CFI_RA].base = CFI_CFA;
641	state->regs[CFI_RA].offset = -8;
642}
643
644const char *arch_nop_insn(int len)
645{
646	static const char nops[5][5] = {
647		{ BYTES_NOP1 },
648		{ BYTES_NOP2 },
649		{ BYTES_NOP3 },
650		{ BYTES_NOP4 },
651		{ BYTES_NOP5 },
652	};
653
654	if (len < 1 || len > 5) {
655		WARN("invalid NOP size: %d\n", len);
656		return NULL;
657	}
658
659	return nops[len-1];
660}
661
662/* asm/alternative.h ? */
663
664#define ALTINSTR_FLAG_INV	(1 << 15)
665#define ALT_NOT(feat)		((feat) | ALTINSTR_FLAG_INV)
666
667struct alt_instr {
668	s32 instr_offset;	/* original instruction */
669	s32 repl_offset;	/* offset to replacement instruction */
670	u16 cpuid;		/* cpuid bit set for replacement */
671	u8  instrlen;		/* length of original instruction */
672	u8  replacementlen;	/* length of new instruction */
673} __packed;
674
675static int elf_add_alternative(struct elf *elf,
676			       struct instruction *orig, struct symbol *sym,
677			       int cpuid, u8 orig_len, u8 repl_len)
678{
679	const int size = sizeof(struct alt_instr);
680	struct alt_instr *alt;
681	struct section *sec;
682	Elf_Scn *s;
683
684	sec = find_section_by_name(elf, ".altinstructions");
685	if (!sec) {
686		sec = elf_create_section(elf, ".altinstructions",
687					 SHF_ALLOC, 0, 0);
688
689		if (!sec) {
690			WARN_ELF("elf_create_section");
691			return -1;
692		}
693	}
694
695	s = elf_getscn(elf->elf, sec->idx);
696	if (!s) {
697		WARN_ELF("elf_getscn");
698		return -1;
699	}
700
701	sec->data = elf_newdata(s);
702	if (!sec->data) {
703		WARN_ELF("elf_newdata");
704		return -1;
705	}
706
707	sec->data->d_size = size;
708	sec->data->d_align = 1;
709
710	alt = sec->data->d_buf = malloc(size);
711	if (!sec->data->d_buf) {
712		perror("malloc");
713		return -1;
714	}
715	memset(sec->data->d_buf, 0, size);
716
717	if (elf_add_reloc_to_insn(elf, sec, sec->sh.sh_size,
718				  R_X86_64_PC32, orig->sec, orig->offset)) {
719		WARN("elf_create_reloc: alt_instr::instr_offset");
720		return -1;
721	}
722
723	if (elf_add_reloc(elf, sec, sec->sh.sh_size + 4,
724			  R_X86_64_PC32, sym, 0)) {
725		WARN("elf_create_reloc: alt_instr::repl_offset");
726		return -1;
727	}
728
729	alt->cpuid = bswap_if_needed(cpuid);
730	alt->instrlen = orig_len;
731	alt->replacementlen = repl_len;
732
733	sec->sh.sh_size += size;
734	sec->changed = true;
735
736	return 0;
737}
738
739#define X86_FEATURE_RETPOLINE                ( 7*32+12)
740
741int arch_rewrite_retpolines(struct objtool_file *file)
742{
743	struct instruction *insn;
744	struct reloc *reloc;
745	struct symbol *sym;
746	char name[32] = "";
747
748	list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
749
750		if (insn->type != INSN_JUMP_DYNAMIC &&
751		    insn->type != INSN_CALL_DYNAMIC)
752			continue;
753
754		if (!strcmp(insn->sec->name, ".text.__x86.indirect_thunk"))
755			continue;
756
757		reloc = insn->reloc;
758
759		sprintf(name, "__x86_indirect_alt_%s_%s",
760			insn->type == INSN_JUMP_DYNAMIC ? "jmp" : "call",
761			reloc->sym->name + 21);
762
763		sym = find_symbol_by_name(file->elf, name);
764		if (!sym) {
765			sym = elf_create_undef_symbol(file->elf, name);
766			if (!sym) {
767				WARN("elf_create_undef_symbol");
768				return -1;
769			}
770		}
771
772		if (elf_add_alternative(file->elf, insn, sym,
773					ALT_NOT(X86_FEATURE_RETPOLINE), 5, 5)) {
774			WARN("elf_add_alternative");
775			return -1;
776		}
777	}
778
779	return 0;
780}
781
782int arch_decode_hint_reg(struct instruction *insn, u8 sp_reg)
783{
784	struct cfi_reg *cfa = &insn->cfi.cfa;
785
786	switch (sp_reg) {
787	case ORC_REG_UNDEFINED:
788		cfa->base = CFI_UNDEFINED;
789		break;
790	case ORC_REG_SP:
791		cfa->base = CFI_SP;
792		break;
793	case ORC_REG_BP:
794		cfa->base = CFI_BP;
795		break;
796	case ORC_REG_SP_INDIRECT:
797		cfa->base = CFI_SP_INDIRECT;
798		break;
799	case ORC_REG_R10:
800		cfa->base = CFI_R10;
801		break;
802	case ORC_REG_R13:
803		cfa->base = CFI_R13;
804		break;
805	case ORC_REG_DI:
806		cfa->base = CFI_DI;
807		break;
808	case ORC_REG_DX:
809		cfa->base = CFI_DX;
810		break;
811	default:
812		return -1;
813	}
814
815	return 0;
816}
817
818bool arch_is_retpoline(struct symbol *sym)
819{
820	return !strncmp(sym->name, "__x86_indirect_", 15);
821}