objtool,x86: Fix uaccess PUSHF/POPF validation
[linux-2.6-microblaze.git] / tools / objtool / check.c
index 85993e6..5e5388a 100644 (file)
@@ -6,21 +6,20 @@
 #include <string.h>
 #include <stdlib.h>
 
-#include "builtin.h"
-#include "cfi.h"
-#include "arch.h"
-#include "check.h"
-#include "special.h"
-#include "warn.h"
-#include "arch_elf.h"
+#include <arch/elf.h>
+#include <objtool/builtin.h>
+#include <objtool/cfi.h>
+#include <objtool/arch.h>
+#include <objtool/check.h>
+#include <objtool/special.h>
+#include <objtool/warn.h>
+#include <objtool/endianness.h>
 
 #include <linux/objtool.h>
 #include <linux/hashtable.h>
 #include <linux/kernel.h>
 #include <linux/static_call_types.h>
 
-#define FAKE_JUMP_OFFSET -1
-
 struct alternative {
        struct list_head list;
        struct instruction *insn;
@@ -111,15 +110,20 @@ static struct instruction *prev_insn_same_sym(struct objtool_file *file,
 
 static bool is_sibling_call(struct instruction *insn)
 {
+       /*
+        * Assume only ELF functions can make sibling calls.  This ensures
+        * sibling call detection consistency between vmlinux.o and individual
+        * objects.
+        */
+       if (!insn->func)
+               return false;
+
        /* An indirect jump is either a sibling call or a jump to a table. */
        if (insn->type == INSN_JUMP_DYNAMIC)
                return list_empty(&insn->alts);
 
-       if (!is_static_jump(insn))
-               return false;
-
        /* add_jump_destinations() sets insn->call_dest for sibling calls. */
-       return !!insn->call_dest;
+       return (is_static_jump(insn) && insn->call_dest);
 }
 
 /*
@@ -156,6 +160,7 @@ static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
                "machine_real_restart",
                "rewind_stack_do_exit",
                "kunit_try_catch_throw",
+               "xen_start_kernel",
        };
 
        if (!func)
@@ -502,8 +507,21 @@ static int create_static_call_sections(struct objtool_file *file)
 
                key_sym = find_symbol_by_name(file->elf, tmp);
                if (!key_sym) {
-                       WARN("static_call: can't find static_call_key symbol: %s", tmp);
-                       return -1;
+                       if (!module) {
+                               WARN("static_call: can't find static_call_key symbol: %s", tmp);
+                               return -1;
+                       }
+
+                       /*
+                        * For modules(), the key might not be exported, which
+                        * means the module can make static calls but isn't
+                        * allowed to change them.
+                        *
+                        * In that case we temporarily set the key to be the
+                        * trampoline address.  This is fixed up in
+                        * static_call_add_module().
+                        */
+                       key_sym = insn->call_dest;
                }
                free(key_name);
 
@@ -648,7 +666,7 @@ static void add_ignores(struct objtool_file *file)
 static const char *uaccess_safe_builtin[] = {
        /* KASAN */
        "kasan_report",
-       "check_memory_region",
+       "kasan_check_range",
        /* KASAN out-of-line */
        "__asan_loadN_noabort",
        "__asan_load1_noabort",
@@ -846,22 +864,16 @@ static int add_jump_destinations(struct objtool_file *file)
                if (!is_static_jump(insn))
                        continue;
 
-               if (insn->offset == FAKE_JUMP_OFFSET)
-                       continue;
-
                reloc = find_reloc_by_dest_range(file->elf, insn->sec,
-                                              insn->offset, insn->len);
+                                                insn->offset, insn->len);
                if (!reloc) {
                        dest_sec = insn->sec;
                        dest_off = arch_jump_destination(insn);
                } else if (reloc->sym->type == STT_SECTION) {
                        dest_sec = reloc->sym->sec;
                        dest_off = arch_dest_reloc_offset(reloc->addend);
-               } else if (reloc->sym->sec->idx) {
-                       dest_sec = reloc->sym->sec;
-                       dest_off = reloc->sym->sym.st_value +
-                                  arch_dest_reloc_offset(reloc->addend);
-               } else if (strstr(reloc->sym->name, "_indirect_thunk_")) {
+               } else if (!strncmp(reloc->sym->name, "__x86_indirect_thunk_", 21) ||
+                          !strncmp(reloc->sym->name, "__x86_retpoline_", 16)) {
                        /*
                         * Retpoline jumps are really dynamic jumps in
                         * disguise, so convert them accordingly.
@@ -873,14 +885,21 @@ static int add_jump_destinations(struct objtool_file *file)
 
                        insn->retpoline_safe = true;
                        continue;
-               } else {
-                       /* external sibling call */
+               } else if (insn->func) {
+                       /* internal or external sibling call (with reloc) */
                        insn->call_dest = reloc->sym;
                        if (insn->call_dest->static_call_tramp) {
                                list_add_tail(&insn->static_call_node,
                                              &file->static_call_list);
                        }
                        continue;
+               } else if (reloc->sym->sec->idx) {
+                       dest_sec = reloc->sym->sec;
+                       dest_off = reloc->sym->sym.st_value +
+                                  arch_dest_reloc_offset(reloc->addend);
+               } else {
+                       /* non-func asm code jumping to another file */
+                       continue;
                }
 
                insn->jump_dest = find_insn(file, dest_sec, dest_off);
@@ -921,15 +940,15 @@ static int add_jump_destinations(struct objtool_file *file)
                         * case where the parent function's only reference to a
                         * subfunction is through a jump table.
                         */
-                       if (!strstr(insn->func->name, ".cold.") &&
-                           strstr(insn->jump_dest->func->name, ".cold.")) {
+                       if (!strstr(insn->func->name, ".cold") &&
+                           strstr(insn->jump_dest->func->name, ".cold")) {
                                insn->func->cfunc = insn->jump_dest->func;
                                insn->jump_dest->func->pfunc = insn->func;
 
                        } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
                                   insn->jump_dest->offset == insn->jump_dest->func->offset) {
 
-                               /* internal sibling call */
+                               /* internal sibling call (without reloc) */
                                insn->call_dest = insn->jump_dest->func;
                                if (insn->call_dest->static_call_tramp) {
                                        list_add_tail(&insn->static_call_node,
@@ -1058,73 +1077,83 @@ static int add_call_destinations(struct objtool_file *file)
 }
 
 /*
- * The .alternatives section requires some extra special care, over and above
- * what other special sections require:
- *
- * 1. Because alternatives are patched in-place, we need to insert a fake jump
- *    instruction at the end so that validate_branch() skips all the original
- *    replaced instructions when validating the new instruction path.
- *
- * 2. An added wrinkle is that the new instruction length might be zero.  In
- *    that case the old instructions are replaced with noops.  We simulate that
- *    by creating a fake jump as the only new instruction.
- *
- * 3. In some cases, the alternative section includes an instruction which
- *    conditionally jumps to the _end_ of the entry.  We have to modify these
- *    jumps' destinations to point back to .text rather than the end of the
- *    entry in .altinstr_replacement.
+ * The .alternatives section requires some extra special care over and above
+ * other special sections because alternatives are patched in place.
  */
 static int handle_group_alt(struct objtool_file *file,
                            struct special_alt *special_alt,
                            struct instruction *orig_insn,
                            struct instruction **new_insn)
 {
-       static unsigned int alt_group_next_index = 1;
-       struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
-       unsigned int alt_group = alt_group_next_index++;
+       struct instruction *last_orig_insn, *last_new_insn = NULL, *insn, *nop = NULL;
+       struct alt_group *orig_alt_group, *new_alt_group;
        unsigned long dest_off;
 
+
+       orig_alt_group = malloc(sizeof(*orig_alt_group));
+       if (!orig_alt_group) {
+               WARN("malloc failed");
+               return -1;
+       }
+       orig_alt_group->cfi = calloc(special_alt->orig_len,
+                                    sizeof(struct cfi_state *));
+       if (!orig_alt_group->cfi) {
+               WARN("calloc failed");
+               return -1;
+       }
+
        last_orig_insn = NULL;
        insn = orig_insn;
        sec_for_each_insn_from(file, insn) {
                if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
                        break;
 
-               insn->alt_group = alt_group;
+               insn->alt_group = orig_alt_group;
                last_orig_insn = insn;
        }
+       orig_alt_group->orig_group = NULL;
+       orig_alt_group->first_insn = orig_insn;
+       orig_alt_group->last_insn = last_orig_insn;
 
-       if (next_insn_same_sec(file, last_orig_insn)) {
-               fake_jump = malloc(sizeof(*fake_jump));
-               if (!fake_jump) {
+
+       new_alt_group = malloc(sizeof(*new_alt_group));
+       if (!new_alt_group) {
+               WARN("malloc failed");
+               return -1;
+       }
+
+       if (special_alt->new_len < special_alt->orig_len) {
+               /*
+                * Insert a fake nop at the end to make the replacement
+                * alt_group the same size as the original.  This is needed to
+                * allow propagate_alt_cfi() to do its magic.  When the last
+                * instruction affects the stack, the instruction after it (the
+                * nop) will propagate the new state to the shared CFI array.
+                */
+               nop = malloc(sizeof(*nop));
+               if (!nop) {
                        WARN("malloc failed");
                        return -1;
                }
-               memset(fake_jump, 0, sizeof(*fake_jump));
-               INIT_LIST_HEAD(&fake_jump->alts);
-               INIT_LIST_HEAD(&fake_jump->stack_ops);
-               init_cfi_state(&fake_jump->cfi);
+               memset(nop, 0, sizeof(*nop));
+               INIT_LIST_HEAD(&nop->alts);
+               INIT_LIST_HEAD(&nop->stack_ops);
+               init_cfi_state(&nop->cfi);
 
-               fake_jump->sec = special_alt->new_sec;
-               fake_jump->offset = FAKE_JUMP_OFFSET;
-               fake_jump->type = INSN_JUMP_UNCONDITIONAL;
-               fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
-               fake_jump->func = orig_insn->func;
+               nop->sec = special_alt->new_sec;
+               nop->offset = special_alt->new_off + special_alt->new_len;
+               nop->len = special_alt->orig_len - special_alt->new_len;
+               nop->type = INSN_NOP;
+               nop->func = orig_insn->func;
+               nop->alt_group = new_alt_group;
+               nop->ignore = orig_insn->ignore_alts;
        }
 
        if (!special_alt->new_len) {
-               if (!fake_jump) {
-                       WARN("%s: empty alternative at end of section",
-                            special_alt->orig_sec->name);
-                       return -1;
-               }
-
-               *new_insn = fake_jump;
-               return 0;
+               *new_insn = nop;
+               goto end;
        }
 
-       last_new_insn = NULL;
-       alt_group = alt_group_next_index++;
        insn = *new_insn;
        sec_for_each_insn_from(file, insn) {
                struct reloc *alt_reloc;
@@ -1136,7 +1165,7 @@ static int handle_group_alt(struct objtool_file *file,
 
                insn->ignore = orig_insn->ignore_alts;
                insn->func = orig_insn->func;
-               insn->alt_group = alt_group;
+               insn->alt_group = new_alt_group;
 
                /*
                 * Since alternative replacement code is copy/pasted by the
@@ -1163,14 +1192,8 @@ static int handle_group_alt(struct objtool_file *file,
                        continue;
 
                dest_off = arch_jump_destination(insn);
-               if (dest_off == special_alt->new_off + special_alt->new_len) {
-                       if (!fake_jump) {
-                               WARN("%s: alternative jump to end of section",
-                                    special_alt->orig_sec->name);
-                               return -1;
-                       }
-                       insn->jump_dest = fake_jump;
-               }
+               if (dest_off == special_alt->new_off + special_alt->new_len)
+                       insn->jump_dest = next_insn_same_sec(file, last_orig_insn);
 
                if (!insn->jump_dest) {
                        WARN_FUNC("can't find alternative jump destination",
@@ -1185,9 +1208,13 @@ static int handle_group_alt(struct objtool_file *file,
                return -1;
        }
 
-       if (fake_jump)
-               list_add(&fake_jump->list, &last_new_insn->list);
-
+       if (nop)
+               list_add(&nop->list, &last_new_insn->list);
+end:
+       new_alt_group->orig_group = orig_alt_group;
+       new_alt_group->first_insn = *new_insn;
+       new_alt_group->last_insn = nop ? : last_new_insn;
+       new_alt_group->cfi = orig_alt_group->cfi;
        return 0;
 }
 
@@ -1479,13 +1506,20 @@ static int add_jump_table_alts(struct objtool_file *file)
        return 0;
 }
 
+static void set_func_state(struct cfi_state *state)
+{
+       state->cfa = initial_func_cfi.cfa;
+       memcpy(&state->regs, &initial_func_cfi.regs,
+              CFI_NUM_REGS * sizeof(struct cfi_reg));
+       state->stack_size = initial_func_cfi.cfa.offset;
+}
+
 static int read_unwind_hints(struct objtool_file *file)
 {
        struct section *sec, *relocsec;
        struct reloc *reloc;
        struct unwind_hint *hint;
        struct instruction *insn;
-       struct cfi_reg *cfa;
        int i;
 
        sec = find_section_by_name(file->elf, ".discard.unwind_hints");
@@ -1520,22 +1554,20 @@ static int read_unwind_hints(struct objtool_file *file)
                        return -1;
                }
 
-               cfa = &insn->cfi.cfa;
+               insn->hint = true;
 
-               if (hint->type == UNWIND_HINT_TYPE_RET_OFFSET) {
-                       insn->ret_offset = hint->sp_offset;
+               if (hint->type == UNWIND_HINT_TYPE_FUNC) {
+                       set_func_state(&insn->cfi);
                        continue;
                }
 
-               insn->hint = true;
-
                if (arch_decode_hint_reg(insn, hint->sp_reg)) {
                        WARN_FUNC("unsupported unwind_hint sp base reg %d",
                                  insn->sec, insn->offset, hint->sp_reg);
                        return -1;
                }
 
-               cfa->offset = hint->sp_offset;
+               insn->cfi.cfa.offset = bswap_if_needed(hint->sp_offset);
                insn->cfi.type = hint->type;
                insn->cfi.end = hint->end;
        }
@@ -1791,27 +1823,18 @@ static bool is_fentry_call(struct instruction *insn)
 
 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
 {
-       u8 ret_offset = insn->ret_offset;
        struct cfi_state *cfi = &state->cfi;
        int i;
 
        if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
                return true;
 
-       if (cfi->cfa.offset != initial_func_cfi.cfa.offset + ret_offset)
+       if (cfi->cfa.offset != initial_func_cfi.cfa.offset)
                return true;
 
-       if (cfi->stack_size != initial_func_cfi.cfa.offset + ret_offset)
+       if (cfi->stack_size != initial_func_cfi.cfa.offset)
                return true;
 
-       /*
-        * If there is a ret offset hint then don't check registers
-        * because a callee-saved register might have been pushed on
-        * the stack.
-        */
-       if (ret_offset)
-               return false;
-
        for (i = 0; i < CFI_NUM_REGS; i++) {
                if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
                    cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
@@ -1821,12 +1844,20 @@ static bool has_modified_stack_frame(struct instruction *insn, struct insn_state
        return false;
 }
 
+static bool check_reg_frame_pos(const struct cfi_reg *reg,
+                               int expected_offset)
+{
+       return reg->base == CFI_CFA &&
+              reg->offset == expected_offset;
+}
+
 static bool has_valid_stack_frame(struct insn_state *state)
 {
        struct cfi_state *cfi = &state->cfi;
 
-       if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA &&
-           cfi->regs[CFI_BP].offset == -16)
+       if (cfi->cfa.base == CFI_BP &&
+           check_reg_frame_pos(&cfi->regs[CFI_BP], -cfi->cfa.offset) &&
+           check_reg_frame_pos(&cfi->regs[CFI_RA], -cfi->cfa.offset + 8))
                return true;
 
        if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
@@ -1955,8 +1986,7 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
                case OP_SRC_REG:
                        if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
                            cfa->base == CFI_SP &&
-                           regs[CFI_BP].base == CFI_CFA &&
-                           regs[CFI_BP].offset == -cfa->offset) {
+                           check_reg_frame_pos(&regs[CFI_BP], -cfa->offset)) {
 
                                /* mov %rsp, %rbp */
                                cfa->base = op->dest.reg;
@@ -2016,12 +2046,58 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
                                        cfa->offset = -cfi->vals[op->src.reg].offset;
                                        cfi->stack_size = cfa->offset;
 
+                               } else if (cfa->base == CFI_SP &&
+                                          cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
+                                          cfi->vals[op->src.reg].offset == cfa->offset) {
+
+                                       /*
+                                        * Stack swizzle:
+                                        *
+                                        * 1: mov %rsp, (%[tos])
+                                        * 2: mov %[tos], %rsp
+                                        *    ...
+                                        * 3: pop %rsp
+                                        *
+                                        * Where:
+                                        *
+                                        * 1 - places a pointer to the previous
+                                        *     stack at the Top-of-Stack of the
+                                        *     new stack.
+                                        *
+                                        * 2 - switches to the new stack.
+                                        *
+                                        * 3 - pops the Top-of-Stack to restore
+                                        *     the original stack.
+                                        *
+                                        * Note: we set base to SP_INDIRECT
+                                        * here and preserve offset. Therefore
+                                        * when the unwinder reaches ToS it
+                                        * will dereference SP and then add the
+                                        * offset to find the next frame, IOW:
+                                        * (%rsp) + offset.
+                                        */
+                                       cfa->base = CFI_SP_INDIRECT;
+
                                } else {
                                        cfa->base = CFI_UNDEFINED;
                                        cfa->offset = 0;
                                }
                        }
 
+                       else if (op->dest.reg == CFI_SP &&
+                                cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
+                                cfi->vals[op->src.reg].offset == cfa->offset) {
+
+                               /*
+                                * The same stack swizzle case 2) as above. But
+                                * because we can't change cfa->base, case 3)
+                                * will become a regular POP. Pretend we're a
+                                * PUSH so things don't go unbalanced.
+                                */
+                               cfi->stack_size += 8;
+                       }
+
+
                        break;
 
                case OP_SRC_ADD:
@@ -2041,6 +2117,17 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
                                break;
                        }
 
+                       if (!cfi->drap && op->src.reg == CFI_SP &&
+                           op->dest.reg == CFI_BP && cfa->base == CFI_SP &&
+                           check_reg_frame_pos(&regs[CFI_BP], -cfa->offset + op->src.offset)) {
+
+                               /* lea disp(%rsp), %rbp */
+                               cfa->base = CFI_BP;
+                               cfa->offset -= op->src.offset;
+                               cfi->bp_scratch = false;
+                               break;
+                       }
+
                        if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
 
                                /* drap: lea disp(%rsp), %drap */
@@ -2107,6 +2194,13 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
 
                case OP_SRC_POP:
                case OP_SRC_POPF:
+                       if (op->dest.reg == CFI_SP && cfa->base == CFI_SP_INDIRECT) {
+
+                               /* pop %rsp; # restore from a stack swizzle */
+                               cfa->base = CFI_SP;
+                               break;
+                       }
+
                        if (!cfi->drap && op->dest.reg == cfa->base) {
 
                                /* pop %rbp */
@@ -2135,6 +2229,14 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
                        break;
 
                case OP_SRC_REG_INDIRECT:
+                       if (!cfi->drap && op->dest.reg == cfa->base &&
+                           op->dest.reg == CFI_BP) {
+
+                               /* mov disp(%rsp), %rbp */
+                               cfa->base = CFI_SP;
+                               cfa->offset = cfi->stack_size;
+                       }
+
                        if (cfi->drap && op->src.reg == CFI_BP &&
                            op->src.offset == cfi->drap_offset) {
 
@@ -2156,6 +2258,12 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
                                /* mov disp(%rbp), %reg */
                                /* mov disp(%rsp), %reg */
                                restore_reg(cfi, op->dest.reg);
+
+                       } else if (op->src.reg == CFI_SP &&
+                                  op->src.offset == regs[op->dest.reg].offset + cfi->stack_size) {
+
+                               /* mov disp(%rsp), %reg */
+                               restore_reg(cfi, op->dest.reg);
                        }
 
                        break;
@@ -2233,6 +2341,18 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
                        /* mov reg, disp(%rsp) */
                        save_reg(cfi, op->src.reg, CFI_CFA,
                                 op->dest.offset - cfi->cfa.offset);
+
+               } else if (op->dest.reg == CFI_SP) {
+
+                       /* mov reg, disp(%rsp) */
+                       save_reg(cfi, op->src.reg, CFI_CFA,
+                                op->dest.offset - cfi->stack_size);
+
+               } else if (op->src.reg == CFI_SP && op->dest.offset == 0) {
+
+                       /* mov %rsp, (%reg); # setup a stack swizzle. */
+                       cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT;
+                       cfi->vals[op->dest.reg].offset = cfa->offset;
                }
 
                break;
@@ -2280,22 +2400,50 @@ static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
        return 0;
 }
 
+/*
+ * The stack layouts of alternatives instructions can sometimes diverge when
+ * they have stack modifications.  That's fine as long as the potential stack
+ * layouts don't conflict at any given potential instruction boundary.
+ *
+ * Flatten the CFIs of the different alternative code streams (both original
+ * and replacement) into a single shared CFI array which can be used to detect
+ * conflicts and nicely feed a linear array of ORC entries to the unwinder.
+ */
+static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn)
+{
+       struct cfi_state **alt_cfi;
+       int group_off;
+
+       if (!insn->alt_group)
+               return 0;
+
+       alt_cfi = insn->alt_group->cfi;
+       group_off = insn->offset - insn->alt_group->first_insn->offset;
+
+       if (!alt_cfi[group_off]) {
+               alt_cfi[group_off] = &insn->cfi;
+       } else {
+               if (memcmp(alt_cfi[group_off], &insn->cfi, sizeof(struct cfi_state))) {
+                       WARN_FUNC("stack layout conflict in alternatives",
+                                 insn->sec, insn->offset);
+                       return -1;
+               }
+       }
+
+       return 0;
+}
+
 static int handle_insn_ops(struct instruction *insn, struct insn_state *state)
 {
        struct stack_op *op;
 
        list_for_each_entry(op, &insn->stack_ops, list) {
-               struct cfi_state old_cfi = state->cfi;
-               int res;
 
-               res = update_cfi_state(insn, &state->cfi, op);
-               if (res)
-                       return res;
+               if (update_cfi_state(insn, &state->cfi, op))
+                       return 1;
 
-               if (insn->alt_group && memcmp(&state->cfi, &old_cfi, sizeof(struct cfi_state))) {
-                       WARN_FUNC("alternative modifies stack", insn->sec, insn->offset);
-                       return -1;
-               }
+               if (!insn->alt_group)
+                       continue;
 
                if (op->dest.type == OP_DEST_PUSHF) {
                        if (!state->uaccess_stack) {
@@ -2485,28 +2633,20 @@ static int validate_return(struct symbol *func, struct instruction *insn, struct
        return 0;
 }
 
-/*
- * Alternatives should not contain any ORC entries, this in turn means they
- * should not contain any CFI ops, which implies all instructions should have
- * the same same CFI state.
- *
- * It is possible to constuct alternatives that have unreachable holes that go
- * unreported (because they're NOPs), such holes would result in CFI_UNDEFINED
- * states which then results in ORC entries, which we just said we didn't want.
- *
- * Avoid them by copying the CFI entry of the first instruction into the whole
- * alternative.
- */
-static void fill_alternative_cfi(struct objtool_file *file, struct instruction *insn)
+static struct instruction *next_insn_to_validate(struct objtool_file *file,
+                                                struct instruction *insn)
 {
-       struct instruction *first_insn = insn;
-       int alt_group = insn->alt_group;
+       struct alt_group *alt_group = insn->alt_group;
 
-       sec_for_each_insn_continue(file, insn) {
-               if (insn->alt_group != alt_group)
-                       break;
-               insn->cfi = first_insn->cfi;
-       }
+       /*
+        * Simulate the fact that alternatives are patched in-place.  When the
+        * end of a replacement alt_group is reached, redirect objtool flow to
+        * the end of the original alt_group.
+        */
+       if (alt_group && insn == alt_group->last_insn && alt_group->orig_group)
+               return next_insn_same_sec(file, alt_group->orig_group->last_insn);
+
+       return next_insn_same_sec(file, insn);
 }
 
 /*
@@ -2527,7 +2667,7 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
        sec = insn->sec;
 
        while (1) {
-               next_insn = next_insn_same_sec(file, insn);
+               next_insn = next_insn_to_validate(file, insn);
 
                if (file->c_file && func && insn->func && func != insn->func->pfunc) {
                        WARN("%s() falls through to next function %s()",
@@ -2560,6 +2700,9 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
 
                insn->visited |= visited;
 
+               if (propagate_alt_cfi(file, insn))
+                       return 1;
+
                if (!insn->ignore_alts && !list_empty(&insn->alts)) {
                        bool skip_orig = false;
 
@@ -2575,9 +2718,6 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
                                }
                        }
 
-                       if (insn->alt_group)
-                               fill_alternative_cfi(file, insn);
-
                        if (skip_orig)
                                return 0;
                }
@@ -2615,7 +2755,7 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
 
                case INSN_JUMP_CONDITIONAL:
                case INSN_JUMP_UNCONDITIONAL:
-                       if (func && is_sibling_call(insn)) {
+                       if (is_sibling_call(insn)) {
                                ret = validate_sibling_call(insn, &state);
                                if (ret)
                                        return ret;
@@ -2637,7 +2777,7 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
 
                case INSN_JUMP_DYNAMIC:
                case INSN_JUMP_DYNAMIC_CONDITIONAL:
-                       if (func && is_sibling_call(insn)) {
+                       if (is_sibling_call(insn)) {
                                ret = validate_sibling_call(insn, &state);
                                if (ret)
                                        return ret;
@@ -2680,15 +2820,19 @@ static int validate_branch(struct objtool_file *file, struct symbol *func,
                        break;
 
                case INSN_STD:
-                       if (state.df)
+                       if (state.df) {
                                WARN_FUNC("recursive STD", sec, insn->offset);
+                               return 1;
+                       }
 
                        state.df = true;
                        break;
 
                case INSN_CLD:
-                       if (!state.df && func)
+                       if (!state.df && func) {
                                WARN_FUNC("redundant CLD", sec, insn->offset);
+                               return 1;
+                       }
 
                        state.df = false;
                        break;
@@ -2811,9 +2955,6 @@ static bool ignore_unreachable_insn(struct objtool_file *file, struct instructio
            !strcmp(insn->sec->name, ".altinstr_aux"))
                return true;
 
-       if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->offset == FAKE_JUMP_OFFSET)
-               return true;
-
        if (!insn->func)
                return false;
 
@@ -2899,10 +3040,7 @@ static int validate_section(struct objtool_file *file, struct section *sec)
                        continue;
 
                init_insn_state(&state, sec);
-               state.cfi.cfa = initial_func_cfi.cfa;
-               memcpy(&state.cfi.regs, &initial_func_cfi.regs,
-                      CFI_NUM_REGS * sizeof(struct cfi_reg));
-               state.cfi.stack_size = initial_func_cfi.cfa.offset;
+               set_func_state(&state.cfi);
 
                warnings += validate_symbol(file, sec, func, &state);
        }
@@ -3023,14 +3161,10 @@ int check(struct objtool_file *file)
        }
 
 out:
-       if (ret < 0) {
-               /*
-                *  Fatal error.  The binary is corrupt or otherwise broken in
-                *  some way, or objtool itself is broken.  Fail the kernel
-                *  build.
-                */
-               return ret;
-       }
-
+       /*
+        *  For now, don't fail the kernel build on fatal warnings.  These
+        *  errors are still fairly common due to the growing matrix of
+        *  supported toolchains and their recent pace of change.
+        */
        return 0;
 }