1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
9 #define unlikely(cond) (cond)
11 #include "../../../arch/x86/lib/inat.c"
12 #include "../../../arch/x86/lib/insn.c"
14 #define CONFIG_64BIT 1
17 #include <asm/orc_types.h>
18 #include <objtool/check.h>
19 #include <objtool/elf.h>
20 #include <objtool/arch.h>
21 #include <objtool/warn.h>
22 #include <objtool/endianness.h>
23 #include <objtool/builtin.h>
26 int arch_ftrace_match(char *name)
28 return !strcmp(name, "__fentry__");
31 static int is_x86_64(const struct elf *elf)
33 switch (elf->ehdr.e_machine) {
39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
44 bool arch_callee_saved_reg(unsigned char reg)
71 unsigned long arch_dest_reloc_offset(int addend)
76 unsigned long arch_jump_destination(struct instruction *insn)
78 return insn->offset + insn->len + insn->immediate;
81 bool arch_pc_relative_reloc(struct reloc *reloc)
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
87 switch (reloc->type) {
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
106 if (!(op = calloc(1, sizeof(*op)))) \
108 else for (list_add_tail(&op->list, ops_list); op; op = NULL)
111 * Helpers to decode ModRM/SIB:
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
122 #define mod_is_mem() (modrm_mod != 3)
123 #define mod_is_reg() (modrm_mod == 3)
125 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
128 #define rm_is(reg) (have_SIB() ? \
129 sib_base == (reg) && sib_index == CFI_SP : \
132 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
133 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
135 static bool has_notrack_prefix(struct insn *insn)
139 for (i = 0; i < insn->prefixes.nbytes; i++) {
140 if (insn->prefixes.bytes[i] == 0x3e)
147 int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148 unsigned long offset, unsigned int maxlen,
149 unsigned int *len, enum insn_type *type,
150 unsigned long *immediate,
151 struct list_head *ops_list)
153 const struct elf *elf = file->elf;
156 unsigned char op1, op2, op3, prefix,
157 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
158 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
159 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
160 struct stack_op *op = NULL;
164 x86_64 = is_x86_64(elf);
168 ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
169 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
171 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
178 if (insn.vex_prefix.nbytes)
181 prefix = insn.prefixes.bytes[0];
183 op1 = insn.opcode.bytes[0];
184 op2 = insn.opcode.bytes[1];
185 op3 = insn.opcode.bytes[2];
187 if (insn.rex_prefix.nbytes) {
188 rex = insn.rex_prefix.bytes[0];
189 rex_w = X86_REX_W(rex) >> 3;
190 rex_r = X86_REX_R(rex) >> 2;
191 rex_x = X86_REX_X(rex) >> 1;
192 rex_b = X86_REX_B(rex);
195 if (insn.modrm.nbytes) {
196 modrm = insn.modrm.bytes[0];
197 modrm_mod = X86_MODRM_MOD(modrm);
198 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
199 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
202 if (insn.sib.nbytes) {
203 sib = insn.sib.bytes[0];
204 /* sib_scale = X86_SIB_SCALE(sib); */
205 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
206 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
213 if (rex_w && rm_is_reg(CFI_SP)) {
215 /* add/sub reg, %rsp */
217 op->src.type = OP_SRC_ADD;
218 op->src.reg = modrm_reg;
219 op->dest.type = OP_DEST_REG;
220 op->dest.reg = CFI_SP;
229 op->src.type = OP_SRC_REG;
230 op->src.reg = (op1 & 0x7) + 8*rex_b;
231 op->dest.type = OP_DEST_PUSH;
240 op->src.type = OP_SRC_POP;
241 op->dest.type = OP_DEST_REG;
242 op->dest.reg = (op1 & 0x7) + 8*rex_b;
251 op->src.type = OP_SRC_CONST;
252 op->dest.type = OP_DEST_PUSH;
257 *type = INSN_JUMP_CONDITIONAL;
262 * 1000 00sw : mod OP r/m : immediate
264 * s - sign extend immediate
267 * OP: 000 ADD 100 AND
277 /* %rsp target only */
278 if (!rm_is_reg(CFI_SP))
281 imm = insn.immediate.value;
282 if (op1 & 2) { /* sign extend */
283 if (op1 & 1) { /* imm32 */
285 imm = (s64)imm >> 32;
288 imm = (s64)imm >> 56;
292 switch (modrm_reg & 7) {
297 /* add/sub imm, %rsp */
299 op->src.type = OP_SRC_ADD;
300 op->src.reg = CFI_SP;
301 op->src.offset = imm;
302 op->dest.type = OP_DEST_REG;
303 op->dest.reg = CFI_SP;
310 op->src.type = OP_SRC_AND;
311 op->src.reg = CFI_SP;
312 op->src.offset = insn.immediate.value;
313 op->dest.type = OP_DEST_REG;
314 op->dest.reg = CFI_SP;
329 if (modrm_reg == CFI_SP) {
334 op->src.type = OP_SRC_REG;
335 op->src.reg = CFI_SP;
336 op->dest.type = OP_DEST_REG;
337 op->dest.reg = modrm_rm;
342 /* skip RIP relative displacement */
346 /* skip nontrivial SIB */
349 if (sib_index != CFI_SP)
353 /* mov %rsp, disp(%reg) */
355 op->src.type = OP_SRC_REG;
356 op->src.reg = CFI_SP;
357 op->dest.type = OP_DEST_REG_INDIRECT;
358 op->dest.reg = modrm_rm;
359 op->dest.offset = insn.displacement.value;
367 if (rm_is_reg(CFI_SP)) {
371 op->src.type = OP_SRC_REG;
372 op->src.reg = modrm_reg;
373 op->dest.type = OP_DEST_REG;
374 op->dest.reg = CFI_SP;
384 if (rm_is_mem(CFI_BP)) {
386 /* mov reg, disp(%rbp) */
388 op->src.type = OP_SRC_REG;
389 op->src.reg = modrm_reg;
390 op->dest.type = OP_DEST_REG_INDIRECT;
391 op->dest.reg = CFI_BP;
392 op->dest.offset = insn.displacement.value;
397 if (rm_is_mem(CFI_SP)) {
399 /* mov reg, disp(%rsp) */
401 op->src.type = OP_SRC_REG;
402 op->src.reg = modrm_reg;
403 op->dest.type = OP_DEST_REG_INDIRECT;
404 op->dest.reg = CFI_SP;
405 op->dest.offset = insn.displacement.value;
416 if (rm_is_mem(CFI_BP)) {
418 /* mov disp(%rbp), reg */
420 op->src.type = OP_SRC_REG_INDIRECT;
421 op->src.reg = CFI_BP;
422 op->src.offset = insn.displacement.value;
423 op->dest.type = OP_DEST_REG;
424 op->dest.reg = modrm_reg;
429 if (rm_is_mem(CFI_SP)) {
431 /* mov disp(%rsp), reg */
433 op->src.type = OP_SRC_REG_INDIRECT;
434 op->src.reg = CFI_SP;
435 op->src.offset = insn.displacement.value;
436 op->dest.type = OP_DEST_REG;
437 op->dest.reg = modrm_reg;
446 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
450 /* skip non 64bit ops */
454 /* skip RIP relative displacement */
458 /* skip nontrivial SIB */
461 if (sib_index != CFI_SP)
465 /* lea disp(%src), %dst */
467 op->src.offset = insn.displacement.value;
468 if (!op->src.offset) {
469 /* lea (%src), %dst */
470 op->src.type = OP_SRC_REG;
472 /* lea disp(%src), %dst */
473 op->src.type = OP_SRC_ADD;
475 op->src.reg = modrm_rm;
476 op->dest.type = OP_DEST_REG;
477 op->dest.reg = modrm_reg;
484 op->src.type = OP_SRC_POP;
485 op->dest.type = OP_DEST_MEM;
496 op->src.type = OP_SRC_CONST;
497 op->dest.type = OP_DEST_PUSHF;
504 op->src.type = OP_SRC_POPF;
505 op->dest.type = OP_DEST_MEM;
515 else if (modrm == 0xcb)
518 } else if (op2 >= 0x80 && op2 <= 0x8f) {
520 *type = INSN_JUMP_CONDITIONAL;
522 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
525 /* sysenter, sysret */
526 *type = INSN_CONTEXT_SWITCH;
528 } else if (op2 == 0x0b || op2 == 0xb9) {
533 } else if (op2 == 0x0d || op2 == 0x1f) {
538 } else if (op2 == 0x1e) {
540 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
544 } else if (op2 == 0x38 && op3 == 0xf8) {
545 if (insn.prefixes.nbytes == 1 &&
546 insn.prefixes.bytes[0] == 0xf2) {
547 /* ENQCMD cannot be used in the kernel. */
548 WARN("ENQCMD instruction at %s:%lx", sec->name,
552 } else if (op2 == 0xa0 || op2 == 0xa8) {
556 op->src.type = OP_SRC_CONST;
557 op->dest.type = OP_DEST_PUSH;
560 } else if (op2 == 0xa1 || op2 == 0xa9) {
564 op->src.type = OP_SRC_POP;
565 op->dest.type = OP_DEST_MEM;
580 op->src.type = OP_SRC_REG;
581 op->src.reg = CFI_BP;
582 op->dest.type = OP_DEST_REG;
583 op->dest.reg = CFI_SP;
586 op->src.type = OP_SRC_POP;
587 op->dest.type = OP_DEST_REG;
588 op->dest.reg = CFI_BP;
599 *type = INSN_JUMP_CONDITIONAL;
604 *type = INSN_JUMP_UNCONDITIONAL;
612 case 0xc7: /* mov imm, r/m */
616 if (insn.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
617 struct reloc *immr, *disp;
621 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
622 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
624 if (!immr || strcmp(immr->sym->name, "pv_ops"))
627 idx = (immr->addend + 8) / sizeof(void *);
630 if (disp->sym->type == STT_SECTION)
631 func = find_symbol_by_offset(disp->sym->sec, disp->addend);
633 WARN("no func for pv_ops[]");
637 objtool_pv_add(file, idx, func);
642 case 0xcf: /* iret */
644 * Handle sync_core(), which has an IRET to self.
645 * All other IRET are in STT_NONE entry code.
647 sym = find_symbol_containing(sec, offset);
648 if (sym && sym->type == STT_FUNC) {
651 op->src.type = OP_SRC_ADD;
652 op->src.reg = CFI_SP;
653 op->src.offset = 5*8;
654 op->dest.type = OP_DEST_REG;
655 op->dest.reg = CFI_SP;
662 case 0xca: /* retf */
663 case 0xcb: /* retf */
664 *type = INSN_CONTEXT_SWITCH;
667 case 0xe0: /* loopne */
668 case 0xe1: /* loope */
669 case 0xe2: /* loop */
670 *type = INSN_JUMP_CONDITIONAL;
676 * For the impact on the stack, a CALL behaves like
677 * a PUSH of an immediate value (the return address).
680 op->src.type = OP_SRC_CONST;
681 op->dest.type = OP_DEST_PUSH;
694 if (modrm_reg == 2 || modrm_reg == 3) {
696 *type = INSN_CALL_DYNAMIC;
697 if (has_notrack_prefix(&insn))
698 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
700 } else if (modrm_reg == 4) {
702 *type = INSN_JUMP_DYNAMIC;
703 if (has_notrack_prefix(&insn))
704 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
706 } else if (modrm_reg == 5) {
709 *type = INSN_CONTEXT_SWITCH;
711 } else if (modrm_reg == 6) {
715 op->src.type = OP_SRC_CONST;
716 op->dest.type = OP_DEST_PUSH;
726 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
731 void arch_initial_func_cfi_state(struct cfi_init_state *state)
735 for (i = 0; i < CFI_NUM_REGS; i++) {
736 state->regs[i].base = CFI_UNDEFINED;
737 state->regs[i].offset = 0;
740 /* initial CFA (call frame address) */
741 state->cfa.base = CFI_SP;
742 state->cfa.offset = 8;
744 /* initial RA (return address) */
745 state->regs[CFI_RA].base = CFI_CFA;
746 state->regs[CFI_RA].offset = -8;
749 const char *arch_nop_insn(int len)
751 static const char nops[5][5] = {
759 if (len < 1 || len > 5) {
760 WARN("invalid NOP size: %d\n", len);
767 #define BYTE_RET 0xC3
769 const char *arch_ret_insn(int len)
771 static const char ret[5][5] = {
774 { BYTE_RET, 0xcc, BYTES_NOP1 },
775 { BYTE_RET, 0xcc, BYTES_NOP2 },
776 { BYTE_RET, 0xcc, BYTES_NOP3 },
779 if (len < 1 || len > 5) {
780 WARN("invalid RET size: %d\n", len);
787 int arch_decode_hint_reg(u8 sp_reg, int *base)
790 case ORC_REG_UNDEFINED:
791 *base = CFI_UNDEFINED;
799 case ORC_REG_SP_INDIRECT:
800 *base = CFI_SP_INDIRECT;
821 bool arch_is_retpoline(struct symbol *sym)
823 return !strncmp(sym->name, "__x86_indirect_", 15);
826 bool arch_is_rethunk(struct symbol *sym)
828 return !strcmp(sym->name, "__x86_return_thunk");