Linux Audio

Check our new training course

Loading...
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#include "../../elf.h"
 15#include "../../arch.h"
 16#include "../../warn.h"
 17
 18static unsigned char op_to_cfi_reg[][2] = {
 19	{CFI_AX, CFI_R8},
 20	{CFI_CX, CFI_R9},
 21	{CFI_DX, CFI_R10},
 22	{CFI_BX, CFI_R11},
 23	{CFI_SP, CFI_R12},
 24	{CFI_BP, CFI_R13},
 25	{CFI_SI, CFI_R14},
 26	{CFI_DI, CFI_R15},
 27};
 28
 29static int is_x86_64(struct elf *elf)
 
 
 
 
 
 
 
 
 30{
 31	switch (elf->ehdr.e_machine) {
 32	case EM_X86_64:
 33		return 1;
 34	case EM_386:
 35		return 0;
 36	default:
 37		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 38		return -1;
 39	}
 40}
 41
 42bool arch_callee_saved_reg(unsigned char reg)
 43{
 44	switch (reg) {
 45	case CFI_BP:
 46	case CFI_BX:
 47	case CFI_R12:
 48	case CFI_R13:
 49	case CFI_R14:
 50	case CFI_R15:
 51		return true;
 52
 53	case CFI_AX:
 54	case CFI_CX:
 55	case CFI_DX:
 56	case CFI_SI:
 57	case CFI_DI:
 58	case CFI_SP:
 59	case CFI_R8:
 60	case CFI_R9:
 61	case CFI_R10:
 62	case CFI_R11:
 63	case CFI_RA:
 64	default:
 65		return false;
 66	}
 67}
 68
 69int arch_decode_instruction(struct elf *elf, struct section *sec,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 70			    unsigned long offset, unsigned int maxlen,
 71			    unsigned int *len, enum insn_type *type,
 72			    unsigned long *immediate, struct stack_op *op)
 
 73{
 74	struct insn insn;
 75	int x86_64, sign;
 76	unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0,
 77		      rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0,
 78		      modrm_reg = 0, sib = 0;
 
 
 
 
 79
 80	x86_64 = is_x86_64(elf);
 81	if (x86_64 == -1)
 82		return -1;
 83
 84	insn_init(&insn, sec->data->d_buf + offset, maxlen, x86_64);
 85	insn_get_length(&insn);
 86
 87	if (!insn_complete(&insn)) {
 88		WARN_FUNC("can't decode instruction", sec, offset);
 89		return -1;
 90	}
 91
 92	*len = insn.length;
 93	*type = INSN_OTHER;
 94
 95	if (insn.vex_prefix.nbytes)
 96		return 0;
 97
 98	op1 = insn.opcode.bytes[0];
 99	op2 = insn.opcode.bytes[1];
100
101	if (insn.rex_prefix.nbytes) {
102		rex = insn.rex_prefix.bytes[0];
103		rex_w = X86_REX_W(rex) >> 3;
104		rex_r = X86_REX_R(rex) >> 2;
105		rex_x = X86_REX_X(rex) >> 1;
106		rex_b = X86_REX_B(rex);
107	}
108
109	if (insn.modrm.nbytes) {
110		modrm = insn.modrm.bytes[0];
111		modrm_mod = X86_MODRM_MOD(modrm);
112		modrm_reg = X86_MODRM_REG(modrm);
113		modrm_rm = X86_MODRM_RM(modrm);
114	}
115
116	if (insn.sib.nbytes)
117		sib = insn.sib.bytes[0];
 
 
 
 
118
119	switch (op1) {
120
121	case 0x1:
122	case 0x29:
123		if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
124
125			/* add/sub reg, %rsp */
126			*type = INSN_STACK;
127			op->src.type = OP_SRC_ADD;
128			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
129			op->dest.type = OP_DEST_REG;
130			op->dest.reg = CFI_SP;
 
131		}
132		break;
133
134	case 0x50 ... 0x57:
135
136		/* push reg */
137		*type = INSN_STACK;
138		op->src.type = OP_SRC_REG;
139		op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
140		op->dest.type = OP_DEST_PUSH;
 
141
142		break;
143
144	case 0x58 ... 0x5f:
145
146		/* pop reg */
147		*type = INSN_STACK;
148		op->src.type = OP_SRC_POP;
149		op->dest.type = OP_DEST_REG;
150		op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
 
151
152		break;
153
154	case 0x68:
155	case 0x6a:
156		/* push immediate */
157		*type = INSN_STACK;
158		op->src.type = OP_SRC_CONST;
159		op->dest.type = OP_DEST_PUSH;
 
160		break;
161
162	case 0x70 ... 0x7f:
163		*type = INSN_JUMP_CONDITIONAL;
164		break;
165
166	case 0x81:
167	case 0x83:
168		if (rex != 0x48)
 
 
 
 
 
 
 
 
 
 
 
 
169			break;
170
171		if (modrm == 0xe4) {
172			/* and imm, %rsp */
173			*type = INSN_STACK;
174			op->src.type = OP_SRC_AND;
175			op->src.reg = CFI_SP;
176			op->src.offset = insn.immediate.value;
177			op->dest.type = OP_DEST_REG;
178			op->dest.reg = CFI_SP;
179			break;
 
 
 
 
 
 
 
 
 
 
180		}
181
182		if (modrm == 0xc4)
183			sign = 1;
184		else if (modrm == 0xec)
185			sign = -1;
186		else
 
 
 
 
 
 
 
 
187			break;
188
189		/* add/sub imm, %rsp */
190		*type = INSN_STACK;
191		op->src.type = OP_SRC_ADD;
192		op->src.reg = CFI_SP;
193		op->src.offset = insn.immediate.value * sign;
194		op->dest.type = OP_DEST_REG;
195		op->dest.reg = CFI_SP;
 
 
 
 
 
 
 
 
 
196		break;
197
198	case 0x89:
199		if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200
201			/* mov %rsp, reg */
202			*type = INSN_STACK;
203			op->src.type = OP_SRC_REG;
204			op->src.reg = CFI_SP;
205			op->dest.type = OP_DEST_REG;
206			op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
207			break;
208		}
209
210		if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
211
212			/* mov reg, %rsp */
213			*type = INSN_STACK;
214			op->src.type = OP_SRC_REG;
215			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
216			op->dest.type = OP_DEST_REG;
217			op->dest.reg = CFI_SP;
 
218			break;
219		}
220
221		/* fallthrough */
222	case 0x88:
223		if (!rex_b &&
224		    (modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) {
 
 
225
226			/* mov reg, disp(%rbp) */
227			*type = INSN_STACK;
228			op->src.type = OP_SRC_REG;
229			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
230			op->dest.type = OP_DEST_REG_INDIRECT;
231			op->dest.reg = CFI_BP;
232			op->dest.offset = insn.displacement.value;
 
 
 
233
234		} else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) {
235
236			/* mov reg, disp(%rsp) */
237			*type = INSN_STACK;
238			op->src.type = OP_SRC_REG;
239			op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
240			op->dest.type = OP_DEST_REG_INDIRECT;
241			op->dest.reg = CFI_SP;
242			op->dest.offset = insn.displacement.value;
 
 
243		}
244
245		break;
246
247	case 0x8b:
248		if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) {
 
 
 
249
250			/* mov disp(%rbp), reg */
251			*type = INSN_STACK;
252			op->src.type = OP_SRC_REG_INDIRECT;
253			op->src.reg = CFI_BP;
254			op->src.offset = insn.displacement.value;
255			op->dest.type = OP_DEST_REG;
256			op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
 
 
 
257
258		} else if (rex_w && !rex_b && sib == 0x24 &&
259			   modrm_mod != 3 && modrm_rm == 4) {
260
261			/* mov disp(%rsp), reg */
262			*type = INSN_STACK;
263			op->src.type = OP_SRC_REG_INDIRECT;
264			op->src.reg = CFI_SP;
265			op->src.offset = insn.displacement.value;
266			op->dest.type = OP_DEST_REG;
267			op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
 
 
268		}
269
270		break;
271
272	case 0x8d:
273		if (sib == 0x24 && rex_w && !rex_b && !rex_x) {
 
 
 
274
275			*type = INSN_STACK;
276			if (!insn.displacement.value) {
277				/* lea (%rsp), reg */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
278				op->src.type = OP_SRC_REG;
279			} else {
280				/* lea disp(%rsp), reg */
281				op->src.type = OP_SRC_ADD;
282				op->src.offset = insn.displacement.value;
283			}
284			op->src.reg = CFI_SP;
285			op->dest.type = OP_DEST_REG;
286			op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
287
288		} else if (rex == 0x48 && modrm == 0x65) {
289
290			/* lea disp(%rbp), %rsp */
291			*type = INSN_STACK;
292			op->src.type = OP_SRC_ADD;
293			op->src.reg = CFI_BP;
294			op->src.offset = insn.displacement.value;
295			op->dest.type = OP_DEST_REG;
296			op->dest.reg = CFI_SP;
297
298		} else if (rex == 0x49 && modrm == 0x62 &&
299			   insn.displacement.value == -8) {
300
301			/*
302			 * lea -0x8(%r10), %rsp
303			 *
304			 * Restoring rsp back to its original value after a
305			 * stack realignment.
306			 */
307			*type = INSN_STACK;
308			op->src.type = OP_SRC_ADD;
309			op->src.reg = CFI_R10;
310			op->src.offset = -8;
311			op->dest.type = OP_DEST_REG;
312			op->dest.reg = CFI_SP;
313
314		} else if (rex == 0x49 && modrm == 0x65 &&
315			   insn.displacement.value == -16) {
316
317			/*
318			 * lea -0x10(%r13), %rsp
319			 *
320			 * Restoring rsp back to its original value after a
321			 * stack realignment.
322			 */
323			*type = INSN_STACK;
324			op->src.type = OP_SRC_ADD;
325			op->src.reg = CFI_R13;
326			op->src.offset = -16;
327			op->dest.type = OP_DEST_REG;
328			op->dest.reg = CFI_SP;
329		}
330
331		break;
332
333	case 0x8f:
334		/* pop to mem */
335		*type = INSN_STACK;
336		op->src.type = OP_SRC_POP;
337		op->dest.type = OP_DEST_MEM;
 
338		break;
339
340	case 0x90:
341		*type = INSN_NOP;
342		break;
343
344	case 0x9c:
345		/* pushf */
346		*type = INSN_STACK;
347		op->src.type = OP_SRC_CONST;
348		op->dest.type = OP_DEST_PUSHF;
 
349		break;
350
351	case 0x9d:
352		/* popf */
353		*type = INSN_STACK;
354		op->src.type = OP_SRC_POPF;
355		op->dest.type = OP_DEST_MEM;
 
356		break;
357
358	case 0x0f:
359
360		if (op2 == 0x01) {
361
362			if (modrm == 0xca)
363				*type = INSN_CLAC;
364			else if (modrm == 0xcb)
365				*type = INSN_STAC;
366
367		} else if (op2 >= 0x80 && op2 <= 0x8f) {
368
369			*type = INSN_JUMP_CONDITIONAL;
370
371		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
372			   op2 == 0x35) {
373
374			/* sysenter, sysret */
375			*type = INSN_CONTEXT_SWITCH;
376
377		} else if (op2 == 0x0b || op2 == 0xb9) {
378
379			/* ud2 */
380			*type = INSN_BUG;
381
382		} else if (op2 == 0x0d || op2 == 0x1f) {
383
384			/* nopl/nopw */
385			*type = INSN_NOP;
386
387		} else if (op2 == 0xa0 || op2 == 0xa8) {
388
389			/* push fs/gs */
390			*type = INSN_STACK;
391			op->src.type = OP_SRC_CONST;
392			op->dest.type = OP_DEST_PUSH;
 
393
394		} else if (op2 == 0xa1 || op2 == 0xa9) {
395
396			/* pop fs/gs */
397			*type = INSN_STACK;
398			op->src.type = OP_SRC_POP;
399			op->dest.type = OP_DEST_MEM;
 
400		}
401
402		break;
403
404	case 0xc9:
405		/*
406		 * leave
407		 *
408		 * equivalent to:
409		 * mov bp, sp
410		 * pop bp
411		 */
412		*type = INSN_STACK;
413		op->dest.type = OP_DEST_LEAVE;
414
 
 
 
 
 
 
 
 
415		break;
416
417	case 0xe3:
418		/* jecxz/jrcxz */
419		*type = INSN_JUMP_CONDITIONAL;
420		break;
421
422	case 0xe9:
423	case 0xeb:
424		*type = INSN_JUMP_UNCONDITIONAL;
425		break;
426
427	case 0xc2:
428	case 0xc3:
429		*type = INSN_RETURN;
430		break;
431
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
432	case 0xca: /* retf */
433	case 0xcb: /* retf */
434	case 0xcf: /* iret */
435		*type = INSN_CONTEXT_SWITCH;
436		break;
437
438	case 0xe8:
439		*type = INSN_CALL;
 
 
 
 
 
 
 
 
440		break;
441
442	case 0xfc:
443		*type = INSN_CLD;
444		break;
445
446	case 0xfd:
447		*type = INSN_STD;
448		break;
449
450	case 0xff:
451		if (modrm_reg == 2 || modrm_reg == 3)
452
453			*type = INSN_CALL_DYNAMIC;
454
455		else if (modrm_reg == 4)
456
457			*type = INSN_JUMP_DYNAMIC;
458
459		else if (modrm_reg == 5)
460
461			/* jmpf */
462			*type = INSN_CONTEXT_SWITCH;
463
464		else if (modrm_reg == 6) {
465
466			/* push from mem */
467			*type = INSN_STACK;
468			op->src.type = OP_SRC_CONST;
469			op->dest.type = OP_DEST_PUSH;
 
470		}
471
472		break;
473
474	default:
475		break;
476	}
477
478	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
479
480	return 0;
481}
482
483void arch_initial_func_cfi_state(struct cfi_state *state)
484{
485	int i;
486
487	for (i = 0; i < CFI_NUM_REGS; i++) {
488		state->regs[i].base = CFI_UNDEFINED;
489		state->regs[i].offset = 0;
490	}
491
492	/* initial CFA (call frame address) */
493	state->cfa.base = CFI_SP;
494	state->cfa.offset = 8;
495
496	/* initial RA (return address) */
497	state->regs[16].base = CFI_CFA;
498	state->regs[16].offset = -8;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
499}
v5.14.15
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
  4 */
  5
  6#include <stdio.h>
  7#include <stdlib.h>
  8
  9#define unlikely(cond) (cond)
 10#include <asm/insn.h>
 11#include "../../../arch/x86/lib/inat.c"
 12#include "../../../arch/x86/lib/insn.c"
 13
 14#define CONFIG_64BIT 1
 15#include <asm/nops.h>
 
 
 
 
 
 
 
 
 
 
 
 
 16
 17#include <asm/orc_types.h>
 18#include <objtool/check.h>
 19#include <objtool/elf.h>
 20#include <objtool/arch.h>
 21#include <objtool/warn.h>
 22#include <objtool/endianness.h>
 23#include <arch/elf.h>
 24
 25static int is_x86_64(const struct elf *elf)
 26{
 27	switch (elf->ehdr.e_machine) {
 28	case EM_X86_64:
 29		return 1;
 30	case EM_386:
 31		return 0;
 32	default:
 33		WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
 34		return -1;
 35	}
 36}
 37
 38bool arch_callee_saved_reg(unsigned char reg)
 39{
 40	switch (reg) {
 41	case CFI_BP:
 42	case CFI_BX:
 43	case CFI_R12:
 44	case CFI_R13:
 45	case CFI_R14:
 46	case CFI_R15:
 47		return true;
 48
 49	case CFI_AX:
 50	case CFI_CX:
 51	case CFI_DX:
 52	case CFI_SI:
 53	case CFI_DI:
 54	case CFI_SP:
 55	case CFI_R8:
 56	case CFI_R9:
 57	case CFI_R10:
 58	case CFI_R11:
 59	case CFI_RA:
 60	default:
 61		return false;
 62	}
 63}
 64
 65unsigned long arch_dest_reloc_offset(int addend)
 66{
 67	return addend + 4;
 68}
 69
 70unsigned long arch_jump_destination(struct instruction *insn)
 71{
 72	return insn->offset + insn->len + insn->immediate;
 73}
 74
 75#define ADD_OP(op) \
 76	if (!(op = calloc(1, sizeof(*op)))) \
 77		return -1; \
 78	else for (list_add_tail(&op->list, ops_list); op; op = NULL)
 79
 80/*
 81 * Helpers to decode ModRM/SIB:
 82 *
 83 * r/m| AX  CX  DX  BX |  SP |  BP |  SI  DI |
 84 *    | R8  R9 R10 R11 | R12 | R13 | R14 R15 |
 85 * Mod+----------------+-----+-----+---------+
 86 * 00 |    [r/m]       |[SIB]|[IP+]|  [r/m]  |
 87 * 01 |  [r/m + d8]    |[S+d]|   [r/m + d8]  |
 88 * 10 |  [r/m + d32]   |[S+D]|   [r/m + d32] |
 89 * 11 |                   r/ m               |
 90 */
 91
 92#define mod_is_mem()	(modrm_mod != 3)
 93#define mod_is_reg()	(modrm_mod == 3)
 94
 95#define is_RIP()   ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
 96#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
 97
 98#define rm_is(reg) (have_SIB() ? \
 99		    sib_base == (reg) && sib_index == CFI_SP : \
100		    modrm_rm == (reg))
101
102#define rm_is_mem(reg)	(mod_is_mem() && !is_RIP() && rm_is(reg))
103#define rm_is_reg(reg)	(mod_is_reg() && modrm_rm == (reg))
104
105int arch_decode_instruction(const struct elf *elf, const struct section *sec,
106			    unsigned long offset, unsigned int maxlen,
107			    unsigned int *len, enum insn_type *type,
108			    unsigned long *immediate,
109			    struct list_head *ops_list)
110{
111	struct insn insn;
112	int x86_64, ret;
113	unsigned char op1, op2,
114		      rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
115		      modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
116		      sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
117	struct stack_op *op = NULL;
118	struct symbol *sym;
119	u64 imm;
120
121	x86_64 = is_x86_64(elf);
122	if (x86_64 == -1)
123		return -1;
124
125	ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
126			  x86_64 ? INSN_MODE_64 : INSN_MODE_32);
127	if (ret < 0) {
128		WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
 
129		return -1;
130	}
131
132	*len = insn.length;
133	*type = INSN_OTHER;
134
135	if (insn.vex_prefix.nbytes)
136		return 0;
137
138	op1 = insn.opcode.bytes[0];
139	op2 = insn.opcode.bytes[1];
140
141	if (insn.rex_prefix.nbytes) {
142		rex = insn.rex_prefix.bytes[0];
143		rex_w = X86_REX_W(rex) >> 3;
144		rex_r = X86_REX_R(rex) >> 2;
145		rex_x = X86_REX_X(rex) >> 1;
146		rex_b = X86_REX_B(rex);
147	}
148
149	if (insn.modrm.nbytes) {
150		modrm = insn.modrm.bytes[0];
151		modrm_mod = X86_MODRM_MOD(modrm);
152		modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
153		modrm_rm  = X86_MODRM_RM(modrm)  + 8*rex_b;
154	}
155
156	if (insn.sib.nbytes) {
157		sib = insn.sib.bytes[0];
158		/* sib_scale = X86_SIB_SCALE(sib); */
159		sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
160		sib_base  = X86_SIB_BASE(sib)  + 8*rex_b;
161	}
162
163	switch (op1) {
164
165	case 0x1:
166	case 0x29:
167		if (rex_w && rm_is_reg(CFI_SP)) {
168
169			/* add/sub reg, %rsp */
170			ADD_OP(op) {
171				op->src.type = OP_SRC_ADD;
172				op->src.reg = modrm_reg;
173				op->dest.type = OP_DEST_REG;
174				op->dest.reg = CFI_SP;
175			}
176		}
177		break;
178
179	case 0x50 ... 0x57:
180
181		/* push reg */
182		ADD_OP(op) {
183			op->src.type = OP_SRC_REG;
184			op->src.reg = (op1 & 0x7) + 8*rex_b;
185			op->dest.type = OP_DEST_PUSH;
186		}
187
188		break;
189
190	case 0x58 ... 0x5f:
191
192		/* pop reg */
193		ADD_OP(op) {
194			op->src.type = OP_SRC_POP;
195			op->dest.type = OP_DEST_REG;
196			op->dest.reg = (op1 & 0x7) + 8*rex_b;
197		}
198
199		break;
200
201	case 0x68:
202	case 0x6a:
203		/* push immediate */
204		ADD_OP(op) {
205			op->src.type = OP_SRC_CONST;
206			op->dest.type = OP_DEST_PUSH;
207		}
208		break;
209
210	case 0x70 ... 0x7f:
211		*type = INSN_JUMP_CONDITIONAL;
212		break;
213
214	case 0x80 ... 0x83:
215		/*
216		 * 1000 00sw : mod OP r/m : immediate
217		 *
218		 * s - sign extend immediate
219		 * w - imm8 / imm32
220		 *
221		 * OP: 000 ADD    100 AND
222		 *     001 OR     101 SUB
223		 *     010 ADC    110 XOR
224		 *     011 SBB    111 CMP
225		 */
226
227		/* 64bit only */
228		if (!rex_w)
229			break;
230
231		/* %rsp target only */
232		if (!rm_is_reg(CFI_SP))
 
 
 
 
 
 
233			break;
234
235		imm = insn.immediate.value;
236		if (op1 & 2) { /* sign extend */
237			if (op1 & 1) { /* imm32 */
238				imm <<= 32;
239				imm = (s64)imm >> 32;
240			} else { /* imm8 */
241				imm <<= 56;
242				imm = (s64)imm >> 56;
243			}
244		}
245
246		switch (modrm_reg & 7) {
247		case 5:
248			imm = -imm;
249			/* fallthrough */
250		case 0:
251			/* add/sub imm, %rsp */
252			ADD_OP(op) {
253				op->src.type = OP_SRC_ADD;
254				op->src.reg = CFI_SP;
255				op->src.offset = imm;
256				op->dest.type = OP_DEST_REG;
257				op->dest.reg = CFI_SP;
258			}
259			break;
260
261		case 4:
262			/* and imm, %rsp */
263			ADD_OP(op) {
264				op->src.type = OP_SRC_AND;
265				op->src.reg = CFI_SP;
266				op->src.offset = insn.immediate.value;
267				op->dest.type = OP_DEST_REG;
268				op->dest.reg = CFI_SP;
269			}
270			break;
271
272		default:
273			/* WARN ? */
274			break;
275		}
276
277		break;
278
279	case 0x89:
280		if (!rex_w)
281			break;
282
283		if (modrm_reg == CFI_SP) {
284
285			if (mod_is_reg()) {
286				/* mov %rsp, reg */
287				ADD_OP(op) {
288					op->src.type = OP_SRC_REG;
289					op->src.reg = CFI_SP;
290					op->dest.type = OP_DEST_REG;
291					op->dest.reg = modrm_rm;
292				}
293				break;
294
295			} else {
296				/* skip RIP relative displacement */
297				if (is_RIP())
298					break;
299
300				/* skip nontrivial SIB */
301				if (have_SIB()) {
302					modrm_rm = sib_base;
303					if (sib_index != CFI_SP)
304						break;
305				}
306
307				/* mov %rsp, disp(%reg) */
308				ADD_OP(op) {
309					op->src.type = OP_SRC_REG;
310					op->src.reg = CFI_SP;
311					op->dest.type = OP_DEST_REG_INDIRECT;
312					op->dest.reg = modrm_rm;
313					op->dest.offset = insn.displacement.value;
314				}
315				break;
316			}
317
 
 
 
 
 
 
318			break;
319		}
320
321		if (rm_is_reg(CFI_SP)) {
322
323			/* mov reg, %rsp */
324			ADD_OP(op) {
325				op->src.type = OP_SRC_REG;
326				op->src.reg = modrm_reg;
327				op->dest.type = OP_DEST_REG;
328				op->dest.reg = CFI_SP;
329			}
330			break;
331		}
332
333		/* fallthrough */
334	case 0x88:
335		if (!rex_w)
336			break;
337
338		if (rm_is_mem(CFI_BP)) {
339
340			/* mov reg, disp(%rbp) */
341			ADD_OP(op) {
342				op->src.type = OP_SRC_REG;
343				op->src.reg = modrm_reg;
344				op->dest.type = OP_DEST_REG_INDIRECT;
345				op->dest.reg = CFI_BP;
346				op->dest.offset = insn.displacement.value;
347			}
348			break;
349		}
350
351		if (rm_is_mem(CFI_SP)) {
352
353			/* mov reg, disp(%rsp) */
354			ADD_OP(op) {
355				op->src.type = OP_SRC_REG;
356				op->src.reg = modrm_reg;
357				op->dest.type = OP_DEST_REG_INDIRECT;
358				op->dest.reg = CFI_SP;
359				op->dest.offset = insn.displacement.value;
360			}
361			break;
362		}
363
364		break;
365
366	case 0x8b:
367		if (!rex_w)
368			break;
369
370		if (rm_is_mem(CFI_BP)) {
371
372			/* mov disp(%rbp), reg */
373			ADD_OP(op) {
374				op->src.type = OP_SRC_REG_INDIRECT;
375				op->src.reg = CFI_BP;
376				op->src.offset = insn.displacement.value;
377				op->dest.type = OP_DEST_REG;
378				op->dest.reg = modrm_reg;
379			}
380			break;
381		}
382
383		if (rm_is_mem(CFI_SP)) {
 
384
385			/* mov disp(%rsp), reg */
386			ADD_OP(op) {
387				op->src.type = OP_SRC_REG_INDIRECT;
388				op->src.reg = CFI_SP;
389				op->src.offset = insn.displacement.value;
390				op->dest.type = OP_DEST_REG;
391				op->dest.reg = modrm_reg;
392			}
393			break;
394		}
395
396		break;
397
398	case 0x8d:
399		if (mod_is_reg()) {
400			WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
401			break;
402		}
403
404		/* skip non 64bit ops */
405		if (!rex_w)
406			break;
407
408		/* skip RIP relative displacement */
409		if (is_RIP())
410			break;
411
412		/* skip nontrivial SIB */
413		if (have_SIB()) {
414			modrm_rm = sib_base;
415			if (sib_index != CFI_SP)
416				break;
417		}
418
419		/* lea disp(%src), %dst */
420		ADD_OP(op) {
421			op->src.offset = insn.displacement.value;
422			if (!op->src.offset) {
423				/* lea (%src), %dst */
424				op->src.type = OP_SRC_REG;
425			} else {
426				/* lea disp(%src), %dst */
427				op->src.type = OP_SRC_ADD;
 
428			}
429			op->src.reg = modrm_rm;
 
 
 
 
 
 
 
 
 
 
430			op->dest.type = OP_DEST_REG;
431			op->dest.reg = modrm_reg;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
432		}
 
433		break;
434
435	case 0x8f:
436		/* pop to mem */
437		ADD_OP(op) {
438			op->src.type = OP_SRC_POP;
439			op->dest.type = OP_DEST_MEM;
440		}
441		break;
442
443	case 0x90:
444		*type = INSN_NOP;
445		break;
446
447	case 0x9c:
448		/* pushf */
449		ADD_OP(op) {
450			op->src.type = OP_SRC_CONST;
451			op->dest.type = OP_DEST_PUSHF;
452		}
453		break;
454
455	case 0x9d:
456		/* popf */
457		ADD_OP(op) {
458			op->src.type = OP_SRC_POPF;
459			op->dest.type = OP_DEST_MEM;
460		}
461		break;
462
463	case 0x0f:
464
465		if (op2 == 0x01) {
466
467			if (modrm == 0xca)
468				*type = INSN_CLAC;
469			else if (modrm == 0xcb)
470				*type = INSN_STAC;
471
472		} else if (op2 >= 0x80 && op2 <= 0x8f) {
473
474			*type = INSN_JUMP_CONDITIONAL;
475
476		} else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
477			   op2 == 0x35) {
478
479			/* sysenter, sysret */
480			*type = INSN_CONTEXT_SWITCH;
481
482		} else if (op2 == 0x0b || op2 == 0xb9) {
483
484			/* ud2 */
485			*type = INSN_BUG;
486
487		} else if (op2 == 0x0d || op2 == 0x1f) {
488
489			/* nopl/nopw */
490			*type = INSN_NOP;
491
492		} else if (op2 == 0xa0 || op2 == 0xa8) {
493
494			/* push fs/gs */
495			ADD_OP(op) {
496				op->src.type = OP_SRC_CONST;
497				op->dest.type = OP_DEST_PUSH;
498			}
499
500		} else if (op2 == 0xa1 || op2 == 0xa9) {
501
502			/* pop fs/gs */
503			ADD_OP(op) {
504				op->src.type = OP_SRC_POP;
505				op->dest.type = OP_DEST_MEM;
506			}
507		}
508
509		break;
510
511	case 0xc9:
512		/*
513		 * leave
514		 *
515		 * equivalent to:
516		 * mov bp, sp
517		 * pop bp
518		 */
519		ADD_OP(op) {
520			op->src.type = OP_SRC_REG;
521			op->src.reg = CFI_BP;
522			op->dest.type = OP_DEST_REG;
523			op->dest.reg = CFI_SP;
524		}
525		ADD_OP(op) {
526			op->src.type = OP_SRC_POP;
527			op->dest.type = OP_DEST_REG;
528			op->dest.reg = CFI_BP;
529		}
530		break;
531
532	case 0xe3:
533		/* jecxz/jrcxz */
534		*type = INSN_JUMP_CONDITIONAL;
535		break;
536
537	case 0xe9:
538	case 0xeb:
539		*type = INSN_JUMP_UNCONDITIONAL;
540		break;
541
542	case 0xc2:
543	case 0xc3:
544		*type = INSN_RETURN;
545		break;
546
547	case 0xcf: /* iret */
548		/*
549		 * Handle sync_core(), which has an IRET to self.
550		 * All other IRET are in STT_NONE entry code.
551		 */
552		sym = find_symbol_containing(sec, offset);
553		if (sym && sym->type == STT_FUNC) {
554			ADD_OP(op) {
555				/* add $40, %rsp */
556				op->src.type = OP_SRC_ADD;
557				op->src.reg = CFI_SP;
558				op->src.offset = 5*8;
559				op->dest.type = OP_DEST_REG;
560				op->dest.reg = CFI_SP;
561			}
562			break;
563		}
564
565		/* fallthrough */
566
567	case 0xca: /* retf */
568	case 0xcb: /* retf */
 
569		*type = INSN_CONTEXT_SWITCH;
570		break;
571
572	case 0xe8:
573		*type = INSN_CALL;
574		/*
575		 * For the impact on the stack, a CALL behaves like
576		 * a PUSH of an immediate value (the return address).
577		 */
578		ADD_OP(op) {
579			op->src.type = OP_SRC_CONST;
580			op->dest.type = OP_DEST_PUSH;
581		}
582		break;
583
584	case 0xfc:
585		*type = INSN_CLD;
586		break;
587
588	case 0xfd:
589		*type = INSN_STD;
590		break;
591
592	case 0xff:
593		if (modrm_reg == 2 || modrm_reg == 3)
594
595			*type = INSN_CALL_DYNAMIC;
596
597		else if (modrm_reg == 4)
598
599			*type = INSN_JUMP_DYNAMIC;
600
601		else if (modrm_reg == 5)
602
603			/* jmpf */
604			*type = INSN_CONTEXT_SWITCH;
605
606		else if (modrm_reg == 6) {
607
608			/* push from mem */
609			ADD_OP(op) {
610				op->src.type = OP_SRC_CONST;
611				op->dest.type = OP_DEST_PUSH;
612			}
613		}
614
615		break;
616
617	default:
618		break;
619	}
620
621	*immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
622
623	return 0;
624}
625
626void arch_initial_func_cfi_state(struct cfi_init_state *state)
627{
628	int i;
629
630	for (i = 0; i < CFI_NUM_REGS; i++) {
631		state->regs[i].base = CFI_UNDEFINED;
632		state->regs[i].offset = 0;
633	}
634
635	/* initial CFA (call frame address) */
636	state->cfa.base = CFI_SP;
637	state->cfa.offset = 8;
638
639	/* initial RA (return address) */
640	state->regs[CFI_RA].base = CFI_CFA;
641	state->regs[CFI_RA].offset = -8;
642}
643
644const char *arch_nop_insn(int len)
645{
646	static const char nops[5][5] = {
647		{ BYTES_NOP1 },
648		{ BYTES_NOP2 },
649		{ BYTES_NOP3 },
650		{ BYTES_NOP4 },
651		{ BYTES_NOP5 },
652	};
653
654	if (len < 1 || len > 5) {
655		WARN("invalid NOP size: %d\n", len);
656		return NULL;
657	}
658
659	return nops[len-1];
660}
661
662/* asm/alternative.h ? */
663
664#define ALTINSTR_FLAG_INV	(1 << 15)
665#define ALT_NOT(feat)		((feat) | ALTINSTR_FLAG_INV)
666
667struct alt_instr {
668	s32 instr_offset;	/* original instruction */
669	s32 repl_offset;	/* offset to replacement instruction */
670	u16 cpuid;		/* cpuid bit set for replacement */
671	u8  instrlen;		/* length of original instruction */
672	u8  replacementlen;	/* length of new instruction */
673} __packed;
674
675static int elf_add_alternative(struct elf *elf,
676			       struct instruction *orig, struct symbol *sym,
677			       int cpuid, u8 orig_len, u8 repl_len)
678{
679	const int size = sizeof(struct alt_instr);
680	struct alt_instr *alt;
681	struct section *sec;
682	Elf_Scn *s;
683
684	sec = find_section_by_name(elf, ".altinstructions");
685	if (!sec) {
686		sec = elf_create_section(elf, ".altinstructions",
687					 SHF_ALLOC, 0, 0);
688
689		if (!sec) {
690			WARN_ELF("elf_create_section");
691			return -1;
692		}
693	}
694
695	s = elf_getscn(elf->elf, sec->idx);
696	if (!s) {
697		WARN_ELF("elf_getscn");
698		return -1;
699	}
700
701	sec->data = elf_newdata(s);
702	if (!sec->data) {
703		WARN_ELF("elf_newdata");
704		return -1;
705	}
706
707	sec->data->d_size = size;
708	sec->data->d_align = 1;
709
710	alt = sec->data->d_buf = malloc(size);
711	if (!sec->data->d_buf) {
712		perror("malloc");
713		return -1;
714	}
715	memset(sec->data->d_buf, 0, size);
716
717	if (elf_add_reloc_to_insn(elf, sec, sec->sh.sh_size,
718				  R_X86_64_PC32, orig->sec, orig->offset)) {
719		WARN("elf_create_reloc: alt_instr::instr_offset");
720		return -1;
721	}
722
723	if (elf_add_reloc(elf, sec, sec->sh.sh_size + 4,
724			  R_X86_64_PC32, sym, 0)) {
725		WARN("elf_create_reloc: alt_instr::repl_offset");
726		return -1;
727	}
728
729	alt->cpuid = bswap_if_needed(cpuid);
730	alt->instrlen = orig_len;
731	alt->replacementlen = repl_len;
732
733	sec->sh.sh_size += size;
734	sec->changed = true;
735
736	return 0;
737}
738
739#define X86_FEATURE_RETPOLINE                ( 7*32+12)
740
741int arch_rewrite_retpolines(struct objtool_file *file)
742{
743	struct instruction *insn;
744	struct reloc *reloc;
745	struct symbol *sym;
746	char name[32] = "";
747
748	list_for_each_entry(insn, &file->retpoline_call_list, call_node) {
749
750		if (insn->type != INSN_JUMP_DYNAMIC &&
751		    insn->type != INSN_CALL_DYNAMIC)
752			continue;
753
754		if (!strcmp(insn->sec->name, ".text.__x86.indirect_thunk"))
755			continue;
756
757		reloc = insn->reloc;
758
759		sprintf(name, "__x86_indirect_alt_%s_%s",
760			insn->type == INSN_JUMP_DYNAMIC ? "jmp" : "call",
761			reloc->sym->name + 21);
762
763		sym = find_symbol_by_name(file->elf, name);
764		if (!sym) {
765			sym = elf_create_undef_symbol(file->elf, name);
766			if (!sym) {
767				WARN("elf_create_undef_symbol");
768				return -1;
769			}
770		}
771
772		if (elf_add_alternative(file->elf, insn, sym,
773					ALT_NOT(X86_FEATURE_RETPOLINE), 5, 5)) {
774			WARN("elf_add_alternative");
775			return -1;
776		}
777	}
778
779	return 0;
780}
781
782int arch_decode_hint_reg(struct instruction *insn, u8 sp_reg)
783{
784	struct cfi_reg *cfa = &insn->cfi.cfa;
785
786	switch (sp_reg) {
787	case ORC_REG_UNDEFINED:
788		cfa->base = CFI_UNDEFINED;
789		break;
790	case ORC_REG_SP:
791		cfa->base = CFI_SP;
792		break;
793	case ORC_REG_BP:
794		cfa->base = CFI_BP;
795		break;
796	case ORC_REG_SP_INDIRECT:
797		cfa->base = CFI_SP_INDIRECT;
798		break;
799	case ORC_REG_R10:
800		cfa->base = CFI_R10;
801		break;
802	case ORC_REG_R13:
803		cfa->base = CFI_R13;
804		break;
805	case ORC_REG_DI:
806		cfa->base = CFI_DI;
807		break;
808	case ORC_REG_DX:
809		cfa->base = CFI_DX;
810		break;
811	default:
812		return -1;
813	}
814
815	return 0;
816}
817
818bool arch_is_retpoline(struct symbol *sym)
819{
820	return !strncmp(sym->name, "__x86_indirect_", 15);
821}