Linux Audio

Check our new training course

Loading...
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#include "../../elf.h"
 15#include "../../arch.h"
 16#include "../../warn.h"
 17
 18static unsigned char op_to_cfi_reg[][2] = {
 19	{CFI_AX, CFI_R8},
 20	{CFI_CX, CFI_R9},
 21	{CFI_DX, CFI_R10},
 22	{CFI_BX, CFI_R11},
 23	{CFI_SP, CFI_R12},
 24	{CFI_BP, CFI_R13},
 25	{CFI_SI, CFI_R14},
 26	{CFI_DI, CFI_R15},
 27};
 28
 29static int is_x86_64(struct elf *elf)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 30{
 31	switch (elf->ehdr.e_machine) {
 32	case EM_X86_64:
 33		return 1;
 34	case EM_386:
 35		return 0;
 36	default:
 37		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 38		return -1;
 39	}
 40}
 41
 42bool arch_callee_saved_reg(unsigned char reg)
 43{
 44	switch (reg) {
 45	case CFI_BP:
 46	case CFI_BX:
 47	case CFI_R12:
 48	case CFI_R13:
 49	case CFI_R14:
 50	case CFI_R15:
 51		return true;
 52
 53	case CFI_AX:
 54	case CFI_CX:
 55	case CFI_DX:
 56	case CFI_SI:
 57	case CFI_DI:
 58	case CFI_SP:
 59	case CFI_R8:
 60	case CFI_R9:
 61	case CFI_R10:
 62	case CFI_R11:
 63	case CFI_RA:
 64	default:
 65		return false;
 66	}
 67}
 68
 69int arch_decode_instruction(struct elf *elf, struct section *sec,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 70			    unsigned long offset, unsigned int maxlen,
 71			    unsigned int *len, enum insn_type *type,
 72			    unsigned long *immediate, struct stack_op *op)
 73{
 74	struct insn insn;
 75	int x86_64, sign;
 76	unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0,
 77		      rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0,
 78		      modrm_reg = 0, sib = 0;
 
 
 
 
 
 
 79
 80	x86_64 = is_x86_64(elf);
 81	if (x86_64 == -1)
 82		return -1;
 83
 84	insn_init(&insn, sec->data->d_buf + offset, maxlen, x86_64);
 85	insn_get_length(&insn);
 86
 87	if (!insn_complete(&insn)) {
 88		WARN_FUNC("can't decode instruction", sec, offset);
 89		return -1;
 90	}
 91
 92	*len = insn.length;
 93	*type = INSN_OTHER;
 94
 95	if (insn.vex_prefix.nbytes)
 96		return 0;
 97
 98	op1 = insn.opcode.bytes[0];
 99	op2 = insn.opcode.bytes[1];
100
101	if (insn.rex_prefix.nbytes) {
102		rex = insn.rex_prefix.bytes[0];
 
 
 
 
103		rex_w = X86_REX_W(rex) >> 3;
104		rex_r = X86_REX_R(rex) >> 2;
105		rex_x = X86_REX_X(rex) >> 1;
106		rex_b = X86_REX_B(rex);
107	}
108
109	if (insn.modrm.nbytes) {
110		modrm = insn.modrm.bytes[0];
111		modrm_mod = X86_MODRM_MOD(modrm);
112		modrm_reg = X86_MODRM_REG(modrm);
113		modrm_rm = X86_MODRM_RM(modrm);
114	}
115
116	if (insn.sib.nbytes)
117		sib = insn.sib.bytes[0];
 
 
 
 
118
119	switch (op1) {
120
121	case 0x1:
122	case 0x29:
123		if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
124
125			/* add/sub reg, %rsp */
126			*type = INSN_STACK;
127			op->src.type = OP_SRC_ADD;
128			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
129			op->dest.type = OP_DEST_REG;
130			op->dest.reg = CFI_SP;
 
131		}
132		break;
133
134	case 0x50 ... 0x57:
135
136		/* push reg */
137		*type = INSN_STACK;
138		op->src.type = OP_SRC_REG;
139		op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
140		op->dest.type = OP_DEST_PUSH;
 
141
142		break;
143
144	case 0x58 ... 0x5f:
145
146		/* pop reg */
147		*type = INSN_STACK;
148		op->src.type = OP_SRC_POP;
149		op->dest.type = OP_DEST_REG;
150		op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
 
151
152		break;
153
154	case 0x68:
155	case 0x6a:
156		/* push immediate */
157		*type = INSN_STACK;
158		op->src.type = OP_SRC_CONST;
159		op->dest.type = OP_DEST_PUSH;
 
160		break;
161
162	case 0x70 ... 0x7f:
163		*type = INSN_JUMP_CONDITIONAL;
164		break;
165
166	case 0x81:
167	case 0x83:
168		if (rex != 0x48)
 
 
 
 
 
 
 
 
 
 
 
 
169			break;
170
171		if (modrm == 0xe4) {
172			/* and imm, %rsp */
173			*type = INSN_STACK;
174			op->src.type = OP_SRC_AND;
175			op->src.reg = CFI_SP;
176			op->src.offset = insn.immediate.value;
177			op->dest.type = OP_DEST_REG;
178			op->dest.reg = CFI_SP;
179			break;
 
 
 
 
 
 
 
 
 
 
180		}
181
182		if (modrm == 0xc4)
183			sign = 1;
184		else if (modrm == 0xec)
185			sign = -1;
186		else
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
187			break;
 
188
189		/* add/sub imm, %rsp */
190		*type = INSN_STACK;
191		op->src.type = OP_SRC_ADD;
192		op->src.reg = CFI_SP;
193		op->src.offset = insn.immediate.value * sign;
194		op->dest.type = OP_DEST_REG;
195		op->dest.reg = CFI_SP;
196		break;
197
198	case 0x89:
199		if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
201			/* mov %rsp, reg */
202			*type = INSN_STACK;
203			op->src.type = OP_SRC_REG;
204			op->src.reg = CFI_SP;
205			op->dest.type = OP_DEST_REG;
206			op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
207			break;
208		}
209
210		if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
211
212			/* mov reg, %rsp */
213			*type = INSN_STACK;
214			op->src.type = OP_SRC_REG;
215			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
216			op->dest.type = OP_DEST_REG;
217			op->dest.reg = CFI_SP;
 
218			break;
219		}
220
221		/* fallthrough */
222	case 0x88:
223		if (!rex_b &&
224		    (modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) {
 
 
225
226			/* mov reg, disp(%rbp) */
227			*type = INSN_STACK;
228			op->src.type = OP_SRC_REG;
229			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
230			op->dest.type = OP_DEST_REG_INDIRECT;
231			op->dest.reg = CFI_BP;
232			op->dest.offset = insn.displacement.value;
 
 
 
233
234		} else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) {
235
236			/* mov reg, disp(%rsp) */
237			*type = INSN_STACK;
238			op->src.type = OP_SRC_REG;
239			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
240			op->dest.type = OP_DEST_REG_INDIRECT;
241			op->dest.reg = CFI_SP;
242			op->dest.offset = insn.displacement.value;
 
 
243		}
244
245		break;
246
247	case 0x8b:
248		if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) {
 
 
 
249
250			/* mov disp(%rbp), reg */
251			*type = INSN_STACK;
252			op->src.type = OP_SRC_REG_INDIRECT;
253			op->src.reg = CFI_BP;
254			op->src.offset = insn.displacement.value;
255			op->dest.type = OP_DEST_REG;
256			op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
 
 
 
257
258		} else if (rex_w && !rex_b && sib == 0x24 &&
259			   modrm_mod != 3 && modrm_rm == 4) {
260
261			/* mov disp(%rsp), reg */
262			*type = INSN_STACK;
263			op->src.type = OP_SRC_REG_INDIRECT;
264			op->src.reg = CFI_SP;
265			op->src.offset = insn.displacement.value;
266			op->dest.type = OP_DEST_REG;
267			op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
 
 
268		}
269
270		break;
271
272	case 0x8d:
273		if (sib == 0x24 && rex_w && !rex_b && !rex_x) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
274
275			*type = INSN_STACK;
276			if (!insn.displacement.value) {
277				/* lea (%rsp), reg */
 
 
278				op->src.type = OP_SRC_REG;
279			} else {
280				/* lea disp(%rsp), reg */
281				op->src.type = OP_SRC_ADD;
282				op->src.offset = insn.displacement.value;
283			}
284			op->src.reg = CFI_SP;
285			op->dest.type = OP_DEST_REG;
286			op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
287
288		} else if (rex == 0x48 && modrm == 0x65) {
289
290			/* lea disp(%rbp), %rsp */
291			*type = INSN_STACK;
292			op->src.type = OP_SRC_ADD;
293			op->src.reg = CFI_BP;
294			op->src.offset = insn.displacement.value;
295			op->dest.type = OP_DEST_REG;
296			op->dest.reg = CFI_SP;
297
298		} else if (rex == 0x49 && modrm == 0x62 &&
299			   insn.displacement.value == -8) {
300
301			/*
302			 * lea -0x8(%r10), %rsp
303			 *
304			 * Restoring rsp back to its original value after a
305			 * stack realignment.
306			 */
307			*type = INSN_STACK;
308			op->src.type = OP_SRC_ADD;
309			op->src.reg = CFI_R10;
310			op->src.offset = -8;
311			op->dest.type = OP_DEST_REG;
312			op->dest.reg = CFI_SP;
313
314		} else if (rex == 0x49 && modrm == 0x65 &&
315			   insn.displacement.value == -16) {
316
317			/*
318			 * lea -0x10(%r13), %rsp
319			 *
320			 * Restoring rsp back to its original value after a
321			 * stack realignment.
322			 */
323			*type = INSN_STACK;
324			op->src.type = OP_SRC_ADD;
325			op->src.reg = CFI_R13;
326			op->src.offset = -16;
327			op->dest.type = OP_DEST_REG;
328			op->dest.reg = CFI_SP;
329		}
330
331		break;
332
333	case 0x8f:
334		/* pop to mem */
335		*type = INSN_STACK;
336		op->src.type = OP_SRC_POP;
337		op->dest.type = OP_DEST_MEM;
 
338		break;
339
340	case 0x90:
341		*type = INSN_NOP;
342		break;
343
344	case 0x9c:
345		/* pushf */
346		*type = INSN_STACK;
347		op->src.type = OP_SRC_CONST;
348		op->dest.type = OP_DEST_PUSHF;
 
349		break;
350
351	case 0x9d:
352		/* popf */
353		*type = INSN_STACK;
354		op->src.type = OP_SRC_POPF;
355		op->dest.type = OP_DEST_MEM;
 
356		break;
357
358	case 0x0f:
359
360		if (op2 == 0x01) {
361
362			if (modrm == 0xca)
363				*type = INSN_CLAC;
364			else if (modrm == 0xcb)
365				*type = INSN_STAC;
366
367		} else if (op2 >= 0x80 && op2 <= 0x8f) {
368
369			*type = INSN_JUMP_CONDITIONAL;
370
371		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
372			   op2 == 0x35) {
373
374			/* sysenter, sysret */
375			*type = INSN_CONTEXT_SWITCH;
376
377		} else if (op2 == 0x0b || op2 == 0xb9) {
378
379			/* ud2 */
380			*type = INSN_BUG;
381
382		} else if (op2 == 0x0d || op2 == 0x1f) {
383
384			/* nopl/nopw */
385			*type = INSN_NOP;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
386
387		} else if (op2 == 0xa0 || op2 == 0xa8) {
388
389			/* push fs/gs */
390			*type = INSN_STACK;
391			op->src.type = OP_SRC_CONST;
392			op->dest.type = OP_DEST_PUSH;
 
393
394		} else if (op2 == 0xa1 || op2 == 0xa9) {
395
396			/* pop fs/gs */
397			*type = INSN_STACK;
398			op->src.type = OP_SRC_POP;
399			op->dest.type = OP_DEST_MEM;
 
400		}
401
402		break;
403
404	case 0xc9:
405		/*
406		 * leave
407		 *
408		 * equivalent to:
409		 * mov bp, sp
410		 * pop bp
411		 */
412		*type = INSN_STACK;
413		op->dest.type = OP_DEST_LEAVE;
 
 
 
 
 
 
 
 
 
 
414
 
 
 
415		break;
416
417	case 0xe3:
418		/* jecxz/jrcxz */
419		*type = INSN_JUMP_CONDITIONAL;
420		break;
421
422	case 0xe9:
423	case 0xeb:
424		*type = INSN_JUMP_UNCONDITIONAL;
425		break;
426
427	case 0xc2:
428	case 0xc3:
429		*type = INSN_RETURN;
430		break;
431
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
432	case 0xca: /* retf */
433	case 0xcb: /* retf */
434	case 0xcf: /* iret */
435		*type = INSN_CONTEXT_SWITCH;
 
 
 
 
 
436		break;
437
438	case 0xe8:
439		*type = INSN_CALL;
 
 
 
 
 
 
 
 
440		break;
441
442	case 0xfc:
443		*type = INSN_CLD;
444		break;
445
446	case 0xfd:
447		*type = INSN_STD;
448		break;
449
450	case 0xff:
451		if (modrm_reg == 2 || modrm_reg == 3)
452
453			*type = INSN_CALL_DYNAMIC;
 
 
454
455		else if (modrm_reg == 4)
456
457			*type = INSN_JUMP_DYNAMIC;
 
 
458
459		else if (modrm_reg == 5)
460
461			/* jmpf */
462			*type = INSN_CONTEXT_SWITCH;
463
464		else if (modrm_reg == 6) {
465
466			/* push from mem */
467			*type = INSN_STACK;
468			op->src.type = OP_SRC_CONST;
469			op->dest.type = OP_DEST_PUSH;
 
470		}
471
472		break;
473
474	default:
475		break;
476	}
477
478	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
479
480	return 0;
481}
482
483void arch_initial_func_cfi_state(struct cfi_state *state)
484{
485	int i;
486
487	for (i = 0; i < CFI_NUM_REGS; i++) {
488		state->regs[i].base = CFI_UNDEFINED;
489		state->regs[i].offset = 0;
490	}
491
492	/* initial CFA (call frame address) */
493	state->cfa.base = CFI_SP;
494	state->cfa.offset = 8;
495
496	/* initial RA (return address) */
497	state->regs[16].base = CFI_CFA;
498	state->regs[16].offset = -8;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
499}
v6.8
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 
 
 
 
 
 
 
 
 
 
 
 
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <objtool/builtin.h>
 24#include <arch/elf.h>
 25
 26int arch_ftrace_match(char *name)
 27{
 28	return !strcmp(name, "__fentry__");
 29}
 30
 31static int is_x86_64(const struct elf *elf)
 32{
 33	switch (elf->ehdr.e_machine) {
 34	case EM_X86_64:
 35		return 1;
 36	case EM_386:
 37		return 0;
 38	default:
 39		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 40		return -1;
 41	}
 42}
 43
 44bool arch_callee_saved_reg(unsigned char reg)
 45{
 46	switch (reg) {
 47	case CFI_BP:
 48	case CFI_BX:
 49	case CFI_R12:
 50	case CFI_R13:
 51	case CFI_R14:
 52	case CFI_R15:
 53		return true;
 54
 55	case CFI_AX:
 56	case CFI_CX:
 57	case CFI_DX:
 58	case CFI_SI:
 59	case CFI_DI:
 60	case CFI_SP:
 61	case CFI_R8:
 62	case CFI_R9:
 63	case CFI_R10:
 64	case CFI_R11:
 65	case CFI_RA:
 66	default:
 67		return false;
 68	}
 69}
 70
 71unsigned long arch_dest_reloc_offset(int addend)
 72{
 73	return addend + 4;
 74}
 75
 76unsigned long arch_jump_destination(struct instruction *insn)
 77{
 78	return insn->offset + insn->len + insn->immediate;
 79}
 80
 81bool arch_pc_relative_reloc(struct reloc *reloc)
 82{
 83	/*
 84	 * All relocation types where P (the address of the target)
 85	 * is included in the computation.
 86	 */
 87	switch (reloc_type(reloc)) {
 88	case R_X86_64_PC8:
 89	case R_X86_64_PC16:
 90	case R_X86_64_PC32:
 91	case R_X86_64_PC64:
 92
 93	case R_X86_64_PLT32:
 94	case R_X86_64_GOTPC32:
 95	case R_X86_64_GOTPCREL:
 96		return true;
 97
 98	default:
 99		break;
100	}
101
102	return false;
103}
104
105#define ADD_OP(op) \
106	if (!(op = calloc(1, sizeof(*op)))) \
107		return -1; \
108	else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
114 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
117 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
118 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
119 * 11 |                   r/ m               |
120 */
121
122#define mod_is_mem()	(modrm_mod != 3)
123#define mod_is_reg()	(modrm_mod == 3)
124
125#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128#define rm_is(reg) (have_SIB() ? \
129		    sib_base == (reg) && sib_index == CFI_SP : \
130		    modrm_rm == (reg))
131
132#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
133#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
134
135static bool has_notrack_prefix(struct insn *insn)
136{
137	int i;
138
139	for (i = 0; i < insn->prefixes.nbytes; i++) {
140		if (insn->prefixes.bytes[i] == 0x3e)
141			return true;
142	}
143
144	return false;
145}
146
147int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148			    unsigned long offset, unsigned int maxlen,
149			    struct instruction *insn)
 
150{
151	struct stack_op **ops_list = &insn->stack_ops;
152	const struct elf *elf = file->elf;
153	struct insn ins;
154	int x86_64, ret;
155	unsigned char op1, op2, op3, prefix,
156		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
157		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
158		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
159	struct stack_op *op = NULL;
160	struct symbol *sym;
161	u64 imm;
162
163	x86_64 = is_x86_64(elf);
164	if (x86_64 == -1)
165		return -1;
166
167	ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
168			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
169	if (ret < 0) {
170		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
 
171		return -1;
172	}
173
174	insn->len = ins.length;
175	insn->type = INSN_OTHER;
176
177	if (ins.vex_prefix.nbytes)
178		return 0;
179
180	prefix = ins.prefixes.bytes[0];
 
181
182	op1 = ins.opcode.bytes[0];
183	op2 = ins.opcode.bytes[1];
184	op3 = ins.opcode.bytes[2];
185
186	if (ins.rex_prefix.nbytes) {
187		rex = ins.rex_prefix.bytes[0];
188		rex_w = X86_REX_W(rex) >> 3;
189		rex_r = X86_REX_R(rex) >> 2;
190		rex_x = X86_REX_X(rex) >> 1;
191		rex_b = X86_REX_B(rex);
192	}
193
194	if (ins.modrm.nbytes) {
195		modrm = ins.modrm.bytes[0];
196		modrm_mod = X86_MODRM_MOD(modrm);
197		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
198		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
199	}
200
201	if (ins.sib.nbytes) {
202		sib = ins.sib.bytes[0];
203		/* sib_scale = X86_SIB_SCALE(sib); */
204		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
205		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
206	}
207
208	switch (op1) {
209
210	case 0x1:
211	case 0x29:
212		if (rex_w && rm_is_reg(CFI_SP)) {
213
214			/* add/sub reg, %rsp */
215			ADD_OP(op) {
216				op->src.type = OP_SRC_ADD;
217				op->src.reg = modrm_reg;
218				op->dest.type = OP_DEST_REG;
219				op->dest.reg = CFI_SP;
220			}
221		}
222		break;
223
224	case 0x50 ... 0x57:
225
226		/* push reg */
227		ADD_OP(op) {
228			op->src.type = OP_SRC_REG;
229			op->src.reg = (op1 & 0x7) + 8*rex_b;
230			op->dest.type = OP_DEST_PUSH;
231		}
232
233		break;
234
235	case 0x58 ... 0x5f:
236
237		/* pop reg */
238		ADD_OP(op) {
239			op->src.type = OP_SRC_POP;
240			op->dest.type = OP_DEST_REG;
241			op->dest.reg = (op1 & 0x7) + 8*rex_b;
242		}
243
244		break;
245
246	case 0x68:
247	case 0x6a:
248		/* push immediate */
249		ADD_OP(op) {
250			op->src.type = OP_SRC_CONST;
251			op->dest.type = OP_DEST_PUSH;
252		}
253		break;
254
255	case 0x70 ... 0x7f:
256		insn->type = INSN_JUMP_CONDITIONAL;
257		break;
258
259	case 0x80 ... 0x83:
260		/*
261		 * 1000 00sw : mod OP r/m : immediate
262		 *
263		 * s - sign extend immediate
264		 * w - imm8 / imm32
265		 *
266		 * OP: 000 ADD    100 AND
267		 *     001 OR     101 SUB
268		 *     010 ADC    110 XOR
269		 *     011 SBB    111 CMP
270		 */
271
272		/* 64bit only */
273		if (!rex_w)
274			break;
275
276		/* %rsp target only */
277		if (!rm_is_reg(CFI_SP))
 
 
 
 
 
 
278			break;
279
280		imm = ins.immediate.value;
281		if (op1 & 2) { /* sign extend */
282			if (op1 & 1) { /* imm32 */
283				imm <<= 32;
284				imm = (s64)imm >> 32;
285			} else { /* imm8 */
286				imm <<= 56;
287				imm = (s64)imm >> 56;
288			}
289		}
290
291		switch (modrm_reg & 7) {
292		case 5:
293			imm = -imm;
294			fallthrough;
295		case 0:
296			/* add/sub imm, %rsp */
297			ADD_OP(op) {
298				op->src.type = OP_SRC_ADD;
299				op->src.reg = CFI_SP;
300				op->src.offset = imm;
301				op->dest.type = OP_DEST_REG;
302				op->dest.reg = CFI_SP;
303			}
304			break;
305
306		case 4:
307			/* and imm, %rsp */
308			ADD_OP(op) {
309				op->src.type = OP_SRC_AND;
310				op->src.reg = CFI_SP;
311				op->src.offset = ins.immediate.value;
312				op->dest.type = OP_DEST_REG;
313				op->dest.reg = CFI_SP;
314			}
315			break;
316
317		default:
318			/* WARN ? */
319			break;
320		}
321
 
 
 
 
 
 
 
322		break;
323
324	case 0x89:
325		if (!rex_w)
326			break;
327
328		if (modrm_reg == CFI_SP) {
329
330			if (mod_is_reg()) {
331				/* mov %rsp, reg */
332				ADD_OP(op) {
333					op->src.type = OP_SRC_REG;
334					op->src.reg = CFI_SP;
335					op->dest.type = OP_DEST_REG;
336					op->dest.reg = modrm_rm;
337				}
338				break;
339
340			} else {
341				/* skip RIP relative displacement */
342				if (is_RIP())
343					break;
344
345				/* skip nontrivial SIB */
346				if (have_SIB()) {
347					modrm_rm = sib_base;
348					if (sib_index != CFI_SP)
349						break;
350				}
351
352				/* mov %rsp, disp(%reg) */
353				ADD_OP(op) {
354					op->src.type = OP_SRC_REG;
355					op->src.reg = CFI_SP;
356					op->dest.type = OP_DEST_REG_INDIRECT;
357					op->dest.reg = modrm_rm;
358					op->dest.offset = ins.displacement.value;
359				}
360				break;
361			}
362
 
 
 
 
 
 
363			break;
364		}
365
366		if (rm_is_reg(CFI_SP)) {
367
368			/* mov reg, %rsp */
369			ADD_OP(op) {
370				op->src.type = OP_SRC_REG;
371				op->src.reg = modrm_reg;
372				op->dest.type = OP_DEST_REG;
373				op->dest.reg = CFI_SP;
374			}
375			break;
376		}
377
378		fallthrough;
379	case 0x88:
380		if (!rex_w)
381			break;
382
383		if (rm_is_mem(CFI_BP)) {
384
385			/* mov reg, disp(%rbp) */
386			ADD_OP(op) {
387				op->src.type = OP_SRC_REG;
388				op->src.reg = modrm_reg;
389				op->dest.type = OP_DEST_REG_INDIRECT;
390				op->dest.reg = CFI_BP;
391				op->dest.offset = ins.displacement.value;
392			}
393			break;
394		}
395
396		if (rm_is_mem(CFI_SP)) {
397
398			/* mov reg, disp(%rsp) */
399			ADD_OP(op) {
400				op->src.type = OP_SRC_REG;
401				op->src.reg = modrm_reg;
402				op->dest.type = OP_DEST_REG_INDIRECT;
403				op->dest.reg = CFI_SP;
404				op->dest.offset = ins.displacement.value;
405			}
406			break;
407		}
408
409		break;
410
411	case 0x8b:
412		if (!rex_w)
413			break;
414
415		if (rm_is_mem(CFI_BP)) {
416
417			/* mov disp(%rbp), reg */
418			ADD_OP(op) {
419				op->src.type = OP_SRC_REG_INDIRECT;
420				op->src.reg = CFI_BP;
421				op->src.offset = ins.displacement.value;
422				op->dest.type = OP_DEST_REG;
423				op->dest.reg = modrm_reg;
424			}
425			break;
426		}
427
428		if (rm_is_mem(CFI_SP)) {
 
429
430			/* mov disp(%rsp), reg */
431			ADD_OP(op) {
432				op->src.type = OP_SRC_REG_INDIRECT;
433				op->src.reg = CFI_SP;
434				op->src.offset = ins.displacement.value;
435				op->dest.type = OP_DEST_REG;
436				op->dest.reg = modrm_reg;
437			}
438			break;
439		}
440
441		break;
442
443	case 0x8d:
444		if (mod_is_reg()) {
445			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
446			break;
447		}
448
449		/* skip non 64bit ops */
450		if (!rex_w)
451			break;
452
453		/* skip RIP relative displacement */
454		if (is_RIP())
455			break;
456
457		/* skip nontrivial SIB */
458		if (have_SIB()) {
459			modrm_rm = sib_base;
460			if (sib_index != CFI_SP)
461				break;
462		}
463
464		/* lea disp(%src), %dst */
465		ADD_OP(op) {
466			op->src.offset = ins.displacement.value;
467			if (!op->src.offset) {
468				/* lea (%src), %dst */
469				op->src.type = OP_SRC_REG;
470			} else {
471				/* lea disp(%src), %dst */
472				op->src.type = OP_SRC_ADD;
 
473			}
474			op->src.reg = modrm_rm;
475			op->dest.type = OP_DEST_REG;
476			op->dest.reg = modrm_reg;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
477		}
 
478		break;
479
480	case 0x8f:
481		/* pop to mem */
482		ADD_OP(op) {
483			op->src.type = OP_SRC_POP;
484			op->dest.type = OP_DEST_MEM;
485		}
486		break;
487
488	case 0x90:
489		insn->type = INSN_NOP;
490		break;
491
492	case 0x9c:
493		/* pushf */
494		ADD_OP(op) {
495			op->src.type = OP_SRC_CONST;
496			op->dest.type = OP_DEST_PUSHF;
497		}
498		break;
499
500	case 0x9d:
501		/* popf */
502		ADD_OP(op) {
503			op->src.type = OP_SRC_POPF;
504			op->dest.type = OP_DEST_MEM;
505		}
506		break;
507
508	case 0x0f:
509
510		if (op2 == 0x01) {
511
512			if (modrm == 0xca)
513				insn->type = INSN_CLAC;
514			else if (modrm == 0xcb)
515				insn->type = INSN_STAC;
516
517		} else if (op2 >= 0x80 && op2 <= 0x8f) {
518
519			insn->type = INSN_JUMP_CONDITIONAL;
520
521		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
522			   op2 == 0x35) {
523
524			/* sysenter, sysret */
525			insn->type = INSN_CONTEXT_SWITCH;
526
527		} else if (op2 == 0x0b || op2 == 0xb9) {
528
529			/* ud2 */
530			insn->type = INSN_BUG;
531
532		} else if (op2 == 0x0d || op2 == 0x1f) {
533
534			/* nopl/nopw */
535			insn->type = INSN_NOP;
536
537		} else if (op2 == 0x1e) {
538
539			if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
540				insn->type = INSN_ENDBR;
541
542
543		} else if (op2 == 0x38 && op3 == 0xf8) {
544			if (ins.prefixes.nbytes == 1 &&
545			    ins.prefixes.bytes[0] == 0xf2) {
546				/* ENQCMD cannot be used in the kernel. */
547				WARN("ENQCMD instruction at %s:%lx", sec->name,
548				     offset);
549			}
550
551		} else if (op2 == 0xa0 || op2 == 0xa8) {
552
553			/* push fs/gs */
554			ADD_OP(op) {
555				op->src.type = OP_SRC_CONST;
556				op->dest.type = OP_DEST_PUSH;
557			}
558
559		} else if (op2 == 0xa1 || op2 == 0xa9) {
560
561			/* pop fs/gs */
562			ADD_OP(op) {
563				op->src.type = OP_SRC_POP;
564				op->dest.type = OP_DEST_MEM;
565			}
566		}
567
568		break;
569
570	case 0xc9:
571		/*
572		 * leave
573		 *
574		 * equivalent to:
575		 * mov bp, sp
576		 * pop bp
577		 */
578		ADD_OP(op) {
579			op->src.type = OP_SRC_REG;
580			op->src.reg = CFI_BP;
581			op->dest.type = OP_DEST_REG;
582			op->dest.reg = CFI_SP;
583		}
584		ADD_OP(op) {
585			op->src.type = OP_SRC_POP;
586			op->dest.type = OP_DEST_REG;
587			op->dest.reg = CFI_BP;
588		}
589		break;
590
591	case 0xcc:
592		/* int3 */
593		insn->type = INSN_TRAP;
594		break;
595
596	case 0xe3:
597		/* jecxz/jrcxz */
598		insn->type = INSN_JUMP_CONDITIONAL;
599		break;
600
601	case 0xe9:
602	case 0xeb:
603		insn->type = INSN_JUMP_UNCONDITIONAL;
604		break;
605
606	case 0xc2:
607	case 0xc3:
608		insn->type = INSN_RETURN;
609		break;
610
611	case 0xc7: /* mov imm, r/m */
612		if (!opts.noinstr)
613			break;
614
615		if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
616			struct reloc *immr, *disp;
617			struct symbol *func;
618			int idx;
619
620			immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
621			disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
622
623			if (!immr || strcmp(immr->sym->name, "pv_ops"))
624				break;
625
626			idx = (reloc_addend(immr) + 8) / sizeof(void *);
627
628			func = disp->sym;
629			if (disp->sym->type == STT_SECTION)
630				func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
631			if (!func) {
632				WARN("no func for pv_ops[]");
633				return -1;
634			}
635
636			objtool_pv_add(file, idx, func);
637		}
638
639		break;
640
641	case 0xcf: /* iret */
642		/*
643		 * Handle sync_core(), which has an IRET to self.
644		 * All other IRET are in STT_NONE entry code.
645		 */
646		sym = find_symbol_containing(sec, offset);
647		if (sym && sym->type == STT_FUNC) {
648			ADD_OP(op) {
649				/* add $40, %rsp */
650				op->src.type = OP_SRC_ADD;
651				op->src.reg = CFI_SP;
652				op->src.offset = 5*8;
653				op->dest.type = OP_DEST_REG;
654				op->dest.reg = CFI_SP;
655			}
656			break;
657		}
658
659		fallthrough;
660
661	case 0xca: /* retf */
662	case 0xcb: /* retf */
663		insn->type = INSN_CONTEXT_SWITCH;
664		break;
665
666	case 0xe0: /* loopne */
667	case 0xe1: /* loope */
668	case 0xe2: /* loop */
669		insn->type = INSN_JUMP_CONDITIONAL;
670		break;
671
672	case 0xe8:
673		insn->type = INSN_CALL;
674		/*
675		 * For the impact on the stack, a CALL behaves like
676		 * a PUSH of an immediate value (the return address).
677		 */
678		ADD_OP(op) {
679			op->src.type = OP_SRC_CONST;
680			op->dest.type = OP_DEST_PUSH;
681		}
682		break;
683
684	case 0xfc:
685		insn->type = INSN_CLD;
686		break;
687
688	case 0xfd:
689		insn->type = INSN_STD;
690		break;
691
692	case 0xff:
693		if (modrm_reg == 2 || modrm_reg == 3) {
694
695			insn->type = INSN_CALL_DYNAMIC;
696			if (has_notrack_prefix(&ins))
697				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
698
699		} else if (modrm_reg == 4) {
700
701			insn->type = INSN_JUMP_DYNAMIC;
702			if (has_notrack_prefix(&ins))
703				WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
704
705		} else if (modrm_reg == 5) {
706
707			/* jmpf */
708			insn->type = INSN_CONTEXT_SWITCH;
709
710		} else if (modrm_reg == 6) {
711
712			/* push from mem */
713			ADD_OP(op) {
714				op->src.type = OP_SRC_CONST;
715				op->dest.type = OP_DEST_PUSH;
716			}
717		}
718
719		break;
720
721	default:
722		break;
723	}
724
725	insn->immediate = ins.immediate.nbytes ? ins.immediate.value : 0;
726
727	return 0;
728}
729
730void arch_initial_func_cfi_state(struct cfi_init_state *state)
731{
732	int i;
733
734	for (i = 0; i < CFI_NUM_REGS; i++) {
735		state->regs[i].base = CFI_UNDEFINED;
736		state->regs[i].offset = 0;
737	}
738
739	/* initial CFA (call frame address) */
740	state->cfa.base = CFI_SP;
741	state->cfa.offset = 8;
742
743	/* initial RA (return address) */
744	state->regs[CFI_RA].base = CFI_CFA;
745	state->regs[CFI_RA].offset = -8;
746}
747
748const char *arch_nop_insn(int len)
749{
750	static const char nops[5][5] = {
751		{ BYTES_NOP1 },
752		{ BYTES_NOP2 },
753		{ BYTES_NOP3 },
754		{ BYTES_NOP4 },
755		{ BYTES_NOP5 },
756	};
757
758	if (len < 1 || len > 5) {
759		WARN("invalid NOP size: %d\n", len);
760		return NULL;
761	}
762
763	return nops[len-1];
764}
765
766#define BYTE_RET	0xC3
767
768const char *arch_ret_insn(int len)
769{
770	static const char ret[5][5] = {
771		{ BYTE_RET },
772		{ BYTE_RET, 0xcc },
773		{ BYTE_RET, 0xcc, BYTES_NOP1 },
774		{ BYTE_RET, 0xcc, BYTES_NOP2 },
775		{ BYTE_RET, 0xcc, BYTES_NOP3 },
776	};
777
778	if (len < 1 || len > 5) {
779		WARN("invalid RET size: %d\n", len);
780		return NULL;
781	}
782
783	return ret[len-1];
784}
785
786int arch_decode_hint_reg(u8 sp_reg, int *base)
787{
788	switch (sp_reg) {
789	case ORC_REG_UNDEFINED:
790		*base = CFI_UNDEFINED;
791		break;
792	case ORC_REG_SP:
793		*base = CFI_SP;
794		break;
795	case ORC_REG_BP:
796		*base = CFI_BP;
797		break;
798	case ORC_REG_SP_INDIRECT:
799		*base = CFI_SP_INDIRECT;
800		break;
801	case ORC_REG_R10:
802		*base = CFI_R10;
803		break;
804	case ORC_REG_R13:
805		*base = CFI_R13;
806		break;
807	case ORC_REG_DI:
808		*base = CFI_DI;
809		break;
810	case ORC_REG_DX:
811		*base = CFI_DX;
812		break;
813	default:
814		return -1;
815	}
816
817	return 0;
818}
819
820bool arch_is_retpoline(struct symbol *sym)
821{
822	return !strncmp(sym->name, "__x86_indirect_", 15);
823}
824
825bool arch_is_rethunk(struct symbol *sym)
826{
827	return !strcmp(sym->name, "__x86_return_thunk");
828}
829
830bool arch_is_embedded_insn(struct symbol *sym)
831{
832	return !strcmp(sym->name, "retbleed_return_thunk") ||
833	       !strcmp(sym->name, "srso_safe_ret");
834}