Merge tag 'x86_entry_for_v5.11_rc6' of git://git.kernel.org/pub/scm/linux/kernel...
[linux-2.6-microblaze.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5
6 #include <string.h>
7 #include <stdlib.h>
8
9 #include "builtin.h"
10 #include "cfi.h"
11 #include "arch.h"
12 #include "check.h"
13 #include "special.h"
14 #include "warn.h"
15 #include "arch_elf.h"
16
17 #include <linux/objtool.h>
18 #include <linux/hashtable.h>
19 #include <linux/kernel.h>
20 #include <linux/static_call_types.h>
21
22 #define FAKE_JUMP_OFFSET -1
23
24 struct alternative {
25         struct list_head list;
26         struct instruction *insn;
27         bool skip_orig;
28 };
29
30 struct cfi_init_state initial_func_cfi;
31
32 struct instruction *find_insn(struct objtool_file *file,
33                               struct section *sec, unsigned long offset)
34 {
35         struct instruction *insn;
36
37         hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
38                 if (insn->sec == sec && insn->offset == offset)
39                         return insn;
40         }
41
42         return NULL;
43 }
44
45 static struct instruction *next_insn_same_sec(struct objtool_file *file,
46                                               struct instruction *insn)
47 {
48         struct instruction *next = list_next_entry(insn, list);
49
50         if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
51                 return NULL;
52
53         return next;
54 }
55
56 static struct instruction *next_insn_same_func(struct objtool_file *file,
57                                                struct instruction *insn)
58 {
59         struct instruction *next = list_next_entry(insn, list);
60         struct symbol *func = insn->func;
61
62         if (!func)
63                 return NULL;
64
65         if (&next->list != &file->insn_list && next->func == func)
66                 return next;
67
68         /* Check if we're already in the subfunction: */
69         if (func == func->cfunc)
70                 return NULL;
71
72         /* Move to the subfunction: */
73         return find_insn(file, func->cfunc->sec, func->cfunc->offset);
74 }
75
76 static struct instruction *prev_insn_same_sym(struct objtool_file *file,
77                                                struct instruction *insn)
78 {
79         struct instruction *prev = list_prev_entry(insn, list);
80
81         if (&prev->list != &file->insn_list && prev->func == insn->func)
82                 return prev;
83
84         return NULL;
85 }
86
87 #define func_for_each_insn(file, func, insn)                            \
88         for (insn = find_insn(file, func->sec, func->offset);           \
89              insn;                                                      \
90              insn = next_insn_same_func(file, insn))
91
92 #define sym_for_each_insn(file, sym, insn)                              \
93         for (insn = find_insn(file, sym->sec, sym->offset);             \
94              insn && &insn->list != &file->insn_list &&                 \
95                 insn->sec == sym->sec &&                                \
96                 insn->offset < sym->offset + sym->len;                  \
97              insn = list_next_entry(insn, list))
98
99 #define sym_for_each_insn_continue_reverse(file, sym, insn)             \
100         for (insn = list_prev_entry(insn, list);                        \
101              &insn->list != &file->insn_list &&                         \
102                 insn->sec == sym->sec && insn->offset >= sym->offset;   \
103              insn = list_prev_entry(insn, list))
104
105 #define sec_for_each_insn_from(file, insn)                              \
106         for (; insn; insn = next_insn_same_sec(file, insn))
107
108 #define sec_for_each_insn_continue(file, insn)                          \
109         for (insn = next_insn_same_sec(file, insn); insn;               \
110              insn = next_insn_same_sec(file, insn))
111
112 static bool is_sibling_call(struct instruction *insn)
113 {
114         /* An indirect jump is either a sibling call or a jump to a table. */
115         if (insn->type == INSN_JUMP_DYNAMIC)
116                 return list_empty(&insn->alts);
117
118         if (!is_static_jump(insn))
119                 return false;
120
121         /* add_jump_destinations() sets insn->call_dest for sibling calls. */
122         return !!insn->call_dest;
123 }
124
125 /*
126  * This checks to see if the given function is a "noreturn" function.
127  *
128  * For global functions which are outside the scope of this object file, we
129  * have to keep a manual list of them.
130  *
131  * For local functions, we have to detect them manually by simply looking for
132  * the lack of a return instruction.
133  */
134 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
135                                 int recursion)
136 {
137         int i;
138         struct instruction *insn;
139         bool empty = true;
140
141         /*
142          * Unfortunately these have to be hard coded because the noreturn
143          * attribute isn't provided in ELF data.
144          */
145         static const char * const global_noreturns[] = {
146                 "__stack_chk_fail",
147                 "panic",
148                 "do_exit",
149                 "do_task_dead",
150                 "__module_put_and_exit",
151                 "complete_and_exit",
152                 "__reiserfs_panic",
153                 "lbug_with_loc",
154                 "fortify_panic",
155                 "usercopy_abort",
156                 "machine_real_restart",
157                 "rewind_stack_do_exit",
158                 "kunit_try_catch_throw",
159         };
160
161         if (!func)
162                 return false;
163
164         if (func->bind == STB_WEAK)
165                 return false;
166
167         if (func->bind == STB_GLOBAL)
168                 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
169                         if (!strcmp(func->name, global_noreturns[i]))
170                                 return true;
171
172         if (!func->len)
173                 return false;
174
175         insn = find_insn(file, func->sec, func->offset);
176         if (!insn->func)
177                 return false;
178
179         func_for_each_insn(file, func, insn) {
180                 empty = false;
181
182                 if (insn->type == INSN_RETURN)
183                         return false;
184         }
185
186         if (empty)
187                 return false;
188
189         /*
190          * A function can have a sibling call instead of a return.  In that
191          * case, the function's dead-end status depends on whether the target
192          * of the sibling call returns.
193          */
194         func_for_each_insn(file, func, insn) {
195                 if (is_sibling_call(insn)) {
196                         struct instruction *dest = insn->jump_dest;
197
198                         if (!dest)
199                                 /* sibling call to another file */
200                                 return false;
201
202                         /* local sibling call */
203                         if (recursion == 5) {
204                                 /*
205                                  * Infinite recursion: two functions have
206                                  * sibling calls to each other.  This is a very
207                                  * rare case.  It means they aren't dead ends.
208                                  */
209                                 return false;
210                         }
211
212                         return __dead_end_function(file, dest->func, recursion+1);
213                 }
214         }
215
216         return true;
217 }
218
219 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
220 {
221         return __dead_end_function(file, func, 0);
222 }
223
224 static void init_cfi_state(struct cfi_state *cfi)
225 {
226         int i;
227
228         for (i = 0; i < CFI_NUM_REGS; i++) {
229                 cfi->regs[i].base = CFI_UNDEFINED;
230                 cfi->vals[i].base = CFI_UNDEFINED;
231         }
232         cfi->cfa.base = CFI_UNDEFINED;
233         cfi->drap_reg = CFI_UNDEFINED;
234         cfi->drap_offset = -1;
235 }
236
237 static void init_insn_state(struct insn_state *state, struct section *sec)
238 {
239         memset(state, 0, sizeof(*state));
240         init_cfi_state(&state->cfi);
241
242         /*
243          * We need the full vmlinux for noinstr validation, otherwise we can
244          * not correctly determine insn->call_dest->sec (external symbols do
245          * not have a section).
246          */
247         if (vmlinux && sec)
248                 state->noinstr = sec->noinstr;
249 }
250
251 /*
252  * Call the arch-specific instruction decoder for all the instructions and add
253  * them to the global instruction list.
254  */
255 static int decode_instructions(struct objtool_file *file)
256 {
257         struct section *sec;
258         struct symbol *func;
259         unsigned long offset;
260         struct instruction *insn;
261         unsigned long nr_insns = 0;
262         int ret;
263
264         for_each_sec(file, sec) {
265
266                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
267                         continue;
268
269                 if (strcmp(sec->name, ".altinstr_replacement") &&
270                     strcmp(sec->name, ".altinstr_aux") &&
271                     strncmp(sec->name, ".discard.", 9))
272                         sec->text = true;
273
274                 if (!strcmp(sec->name, ".noinstr.text") ||
275                     !strcmp(sec->name, ".entry.text"))
276                         sec->noinstr = true;
277
278                 for (offset = 0; offset < sec->len; offset += insn->len) {
279                         insn = malloc(sizeof(*insn));
280                         if (!insn) {
281                                 WARN("malloc failed");
282                                 return -1;
283                         }
284                         memset(insn, 0, sizeof(*insn));
285                         INIT_LIST_HEAD(&insn->alts);
286                         INIT_LIST_HEAD(&insn->stack_ops);
287                         init_cfi_state(&insn->cfi);
288
289                         insn->sec = sec;
290                         insn->offset = offset;
291
292                         ret = arch_decode_instruction(file->elf, sec, offset,
293                                                       sec->len - offset,
294                                                       &insn->len, &insn->type,
295                                                       &insn->immediate,
296                                                       &insn->stack_ops);
297                         if (ret)
298                                 goto err;
299
300                         hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
301                         list_add_tail(&insn->list, &file->insn_list);
302                         nr_insns++;
303                 }
304
305                 list_for_each_entry(func, &sec->symbol_list, list) {
306                         if (func->type != STT_FUNC || func->alias != func)
307                                 continue;
308
309                         if (!find_insn(file, sec, func->offset)) {
310                                 WARN("%s(): can't find starting instruction",
311                                      func->name);
312                                 return -1;
313                         }
314
315                         sym_for_each_insn(file, func, insn)
316                                 insn->func = func;
317                 }
318         }
319
320         if (stats)
321                 printf("nr_insns: %lu\n", nr_insns);
322
323         return 0;
324
325 err:
326         free(insn);
327         return ret;
328 }
329
330 static struct instruction *find_last_insn(struct objtool_file *file,
331                                           struct section *sec)
332 {
333         struct instruction *insn = NULL;
334         unsigned int offset;
335         unsigned int end = (sec->len > 10) ? sec->len - 10 : 0;
336
337         for (offset = sec->len - 1; offset >= end && !insn; offset--)
338                 insn = find_insn(file, sec, offset);
339
340         return insn;
341 }
342
343 /*
344  * Mark "ud2" instructions and manually annotated dead ends.
345  */
346 static int add_dead_ends(struct objtool_file *file)
347 {
348         struct section *sec;
349         struct reloc *reloc;
350         struct instruction *insn;
351
352         /*
353          * By default, "ud2" is a dead end unless otherwise annotated, because
354          * GCC 7 inserts it for certain divide-by-zero cases.
355          */
356         for_each_insn(file, insn)
357                 if (insn->type == INSN_BUG)
358                         insn->dead_end = true;
359
360         /*
361          * Check for manually annotated dead ends.
362          */
363         sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
364         if (!sec)
365                 goto reachable;
366
367         list_for_each_entry(reloc, &sec->reloc_list, list) {
368                 if (reloc->sym->type != STT_SECTION) {
369                         WARN("unexpected relocation symbol type in %s", sec->name);
370                         return -1;
371                 }
372                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
373                 if (insn)
374                         insn = list_prev_entry(insn, list);
375                 else if (reloc->addend == reloc->sym->sec->len) {
376                         insn = find_last_insn(file, reloc->sym->sec);
377                         if (!insn) {
378                                 WARN("can't find unreachable insn at %s+0x%x",
379                                      reloc->sym->sec->name, reloc->addend);
380                                 return -1;
381                         }
382                 } else {
383                         WARN("can't find unreachable insn at %s+0x%x",
384                              reloc->sym->sec->name, reloc->addend);
385                         return -1;
386                 }
387
388                 insn->dead_end = true;
389         }
390
391 reachable:
392         /*
393          * These manually annotated reachable checks are needed for GCC 4.4,
394          * where the Linux unreachable() macro isn't supported.  In that case
395          * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
396          * not a dead end.
397          */
398         sec = find_section_by_name(file->elf, ".rela.discard.reachable");
399         if (!sec)
400                 return 0;
401
402         list_for_each_entry(reloc, &sec->reloc_list, list) {
403                 if (reloc->sym->type != STT_SECTION) {
404                         WARN("unexpected relocation symbol type in %s", sec->name);
405                         return -1;
406                 }
407                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
408                 if (insn)
409                         insn = list_prev_entry(insn, list);
410                 else if (reloc->addend == reloc->sym->sec->len) {
411                         insn = find_last_insn(file, reloc->sym->sec);
412                         if (!insn) {
413                                 WARN("can't find reachable insn at %s+0x%x",
414                                      reloc->sym->sec->name, reloc->addend);
415                                 return -1;
416                         }
417                 } else {
418                         WARN("can't find reachable insn at %s+0x%x",
419                              reloc->sym->sec->name, reloc->addend);
420                         return -1;
421                 }
422
423                 insn->dead_end = false;
424         }
425
426         return 0;
427 }
428
429 static int create_static_call_sections(struct objtool_file *file)
430 {
431         struct section *sec, *reloc_sec;
432         struct reloc *reloc;
433         struct static_call_site *site;
434         struct instruction *insn;
435         struct symbol *key_sym;
436         char *key_name, *tmp;
437         int idx;
438
439         sec = find_section_by_name(file->elf, ".static_call_sites");
440         if (sec) {
441                 INIT_LIST_HEAD(&file->static_call_list);
442                 WARN("file already has .static_call_sites section, skipping");
443                 return 0;
444         }
445
446         if (list_empty(&file->static_call_list))
447                 return 0;
448
449         idx = 0;
450         list_for_each_entry(insn, &file->static_call_list, static_call_node)
451                 idx++;
452
453         sec = elf_create_section(file->elf, ".static_call_sites", SHF_WRITE,
454                                  sizeof(struct static_call_site), idx);
455         if (!sec)
456                 return -1;
457
458         reloc_sec = elf_create_reloc_section(file->elf, sec, SHT_RELA);
459         if (!reloc_sec)
460                 return -1;
461
462         idx = 0;
463         list_for_each_entry(insn, &file->static_call_list, static_call_node) {
464
465                 site = (struct static_call_site *)sec->data->d_buf + idx;
466                 memset(site, 0, sizeof(struct static_call_site));
467
468                 /* populate reloc for 'addr' */
469                 reloc = malloc(sizeof(*reloc));
470
471                 if (!reloc) {
472                         perror("malloc");
473                         return -1;
474                 }
475                 memset(reloc, 0, sizeof(*reloc));
476
477                 insn_to_reloc_sym_addend(insn->sec, insn->offset, reloc);
478                 if (!reloc->sym) {
479                         WARN_FUNC("static call tramp: missing containing symbol",
480                                   insn->sec, insn->offset);
481                         return -1;
482                 }
483
484                 reloc->type = R_X86_64_PC32;
485                 reloc->offset = idx * sizeof(struct static_call_site);
486                 reloc->sec = reloc_sec;
487                 elf_add_reloc(file->elf, reloc);
488
489                 /* find key symbol */
490                 key_name = strdup(insn->call_dest->name);
491                 if (!key_name) {
492                         perror("strdup");
493                         return -1;
494                 }
495                 if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
496                             STATIC_CALL_TRAMP_PREFIX_LEN)) {
497                         WARN("static_call: trampoline name malformed: %s", key_name);
498                         return -1;
499                 }
500                 tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
501                 memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
502
503                 key_sym = find_symbol_by_name(file->elf, tmp);
504                 if (!key_sym) {
505                         WARN("static_call: can't find static_call_key symbol: %s", tmp);
506                         return -1;
507                 }
508                 free(key_name);
509
510                 /* populate reloc for 'key' */
511                 reloc = malloc(sizeof(*reloc));
512                 if (!reloc) {
513                         perror("malloc");
514                         return -1;
515                 }
516                 memset(reloc, 0, sizeof(*reloc));
517                 reloc->sym = key_sym;
518                 reloc->addend = is_sibling_call(insn) ? STATIC_CALL_SITE_TAIL : 0;
519                 reloc->type = R_X86_64_PC32;
520                 reloc->offset = idx * sizeof(struct static_call_site) + 4;
521                 reloc->sec = reloc_sec;
522                 elf_add_reloc(file->elf, reloc);
523
524                 idx++;
525         }
526
527         if (elf_rebuild_reloc_section(file->elf, reloc_sec))
528                 return -1;
529
530         return 0;
531 }
532
533 /*
534  * Warnings shouldn't be reported for ignored functions.
535  */
536 static void add_ignores(struct objtool_file *file)
537 {
538         struct instruction *insn;
539         struct section *sec;
540         struct symbol *func;
541         struct reloc *reloc;
542
543         sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
544         if (!sec)
545                 return;
546
547         list_for_each_entry(reloc, &sec->reloc_list, list) {
548                 switch (reloc->sym->type) {
549                 case STT_FUNC:
550                         func = reloc->sym;
551                         break;
552
553                 case STT_SECTION:
554                         func = find_func_by_offset(reloc->sym->sec, reloc->addend);
555                         if (!func)
556                                 continue;
557                         break;
558
559                 default:
560                         WARN("unexpected relocation symbol type in %s: %d", sec->name, reloc->sym->type);
561                         continue;
562                 }
563
564                 func_for_each_insn(file, func, insn)
565                         insn->ignore = true;
566         }
567 }
568
569 /*
570  * This is a whitelist of functions that is allowed to be called with AC set.
571  * The list is meant to be minimal and only contains compiler instrumentation
572  * ABI and a few functions used to implement *_{to,from}_user() functions.
573  *
574  * These functions must not directly change AC, but may PUSHF/POPF.
575  */
576 static const char *uaccess_safe_builtin[] = {
577         /* KASAN */
578         "kasan_report",
579         "check_memory_region",
580         /* KASAN out-of-line */
581         "__asan_loadN_noabort",
582         "__asan_load1_noabort",
583         "__asan_load2_noabort",
584         "__asan_load4_noabort",
585         "__asan_load8_noabort",
586         "__asan_load16_noabort",
587         "__asan_storeN_noabort",
588         "__asan_store1_noabort",
589         "__asan_store2_noabort",
590         "__asan_store4_noabort",
591         "__asan_store8_noabort",
592         "__asan_store16_noabort",
593         "__kasan_check_read",
594         "__kasan_check_write",
595         /* KASAN in-line */
596         "__asan_report_load_n_noabort",
597         "__asan_report_load1_noabort",
598         "__asan_report_load2_noabort",
599         "__asan_report_load4_noabort",
600         "__asan_report_load8_noabort",
601         "__asan_report_load16_noabort",
602         "__asan_report_store_n_noabort",
603         "__asan_report_store1_noabort",
604         "__asan_report_store2_noabort",
605         "__asan_report_store4_noabort",
606         "__asan_report_store8_noabort",
607         "__asan_report_store16_noabort",
608         /* KCSAN */
609         "__kcsan_check_access",
610         "kcsan_found_watchpoint",
611         "kcsan_setup_watchpoint",
612         "kcsan_check_scoped_accesses",
613         "kcsan_disable_current",
614         "kcsan_enable_current_nowarn",
615         /* KCSAN/TSAN */
616         "__tsan_func_entry",
617         "__tsan_func_exit",
618         "__tsan_read_range",
619         "__tsan_write_range",
620         "__tsan_read1",
621         "__tsan_read2",
622         "__tsan_read4",
623         "__tsan_read8",
624         "__tsan_read16",
625         "__tsan_write1",
626         "__tsan_write2",
627         "__tsan_write4",
628         "__tsan_write8",
629         "__tsan_write16",
630         "__tsan_read_write1",
631         "__tsan_read_write2",
632         "__tsan_read_write4",
633         "__tsan_read_write8",
634         "__tsan_read_write16",
635         "__tsan_atomic8_load",
636         "__tsan_atomic16_load",
637         "__tsan_atomic32_load",
638         "__tsan_atomic64_load",
639         "__tsan_atomic8_store",
640         "__tsan_atomic16_store",
641         "__tsan_atomic32_store",
642         "__tsan_atomic64_store",
643         "__tsan_atomic8_exchange",
644         "__tsan_atomic16_exchange",
645         "__tsan_atomic32_exchange",
646         "__tsan_atomic64_exchange",
647         "__tsan_atomic8_fetch_add",
648         "__tsan_atomic16_fetch_add",
649         "__tsan_atomic32_fetch_add",
650         "__tsan_atomic64_fetch_add",
651         "__tsan_atomic8_fetch_sub",
652         "__tsan_atomic16_fetch_sub",
653         "__tsan_atomic32_fetch_sub",
654         "__tsan_atomic64_fetch_sub",
655         "__tsan_atomic8_fetch_and",
656         "__tsan_atomic16_fetch_and",
657         "__tsan_atomic32_fetch_and",
658         "__tsan_atomic64_fetch_and",
659         "__tsan_atomic8_fetch_or",
660         "__tsan_atomic16_fetch_or",
661         "__tsan_atomic32_fetch_or",
662         "__tsan_atomic64_fetch_or",
663         "__tsan_atomic8_fetch_xor",
664         "__tsan_atomic16_fetch_xor",
665         "__tsan_atomic32_fetch_xor",
666         "__tsan_atomic64_fetch_xor",
667         "__tsan_atomic8_fetch_nand",
668         "__tsan_atomic16_fetch_nand",
669         "__tsan_atomic32_fetch_nand",
670         "__tsan_atomic64_fetch_nand",
671         "__tsan_atomic8_compare_exchange_strong",
672         "__tsan_atomic16_compare_exchange_strong",
673         "__tsan_atomic32_compare_exchange_strong",
674         "__tsan_atomic64_compare_exchange_strong",
675         "__tsan_atomic8_compare_exchange_weak",
676         "__tsan_atomic16_compare_exchange_weak",
677         "__tsan_atomic32_compare_exchange_weak",
678         "__tsan_atomic64_compare_exchange_weak",
679         "__tsan_atomic8_compare_exchange_val",
680         "__tsan_atomic16_compare_exchange_val",
681         "__tsan_atomic32_compare_exchange_val",
682         "__tsan_atomic64_compare_exchange_val",
683         "__tsan_atomic_thread_fence",
684         "__tsan_atomic_signal_fence",
685         /* KCOV */
686         "write_comp_data",
687         "check_kcov_mode",
688         "__sanitizer_cov_trace_pc",
689         "__sanitizer_cov_trace_const_cmp1",
690         "__sanitizer_cov_trace_const_cmp2",
691         "__sanitizer_cov_trace_const_cmp4",
692         "__sanitizer_cov_trace_const_cmp8",
693         "__sanitizer_cov_trace_cmp1",
694         "__sanitizer_cov_trace_cmp2",
695         "__sanitizer_cov_trace_cmp4",
696         "__sanitizer_cov_trace_cmp8",
697         "__sanitizer_cov_trace_switch",
698         /* UBSAN */
699         "ubsan_type_mismatch_common",
700         "__ubsan_handle_type_mismatch",
701         "__ubsan_handle_type_mismatch_v1",
702         "__ubsan_handle_shift_out_of_bounds",
703         /* misc */
704         "csum_partial_copy_generic",
705         "copy_mc_fragile",
706         "copy_mc_fragile_handle_tail",
707         "copy_mc_enhanced_fast_string",
708         "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
709         NULL
710 };
711
712 static void add_uaccess_safe(struct objtool_file *file)
713 {
714         struct symbol *func;
715         const char **name;
716
717         if (!uaccess)
718                 return;
719
720         for (name = uaccess_safe_builtin; *name; name++) {
721                 func = find_symbol_by_name(file->elf, *name);
722                 if (!func)
723                         continue;
724
725                 func->uaccess_safe = true;
726         }
727 }
728
729 /*
730  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
731  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
732  * But it at least allows objtool to understand the control flow *around* the
733  * retpoline.
734  */
735 static int add_ignore_alternatives(struct objtool_file *file)
736 {
737         struct section *sec;
738         struct reloc *reloc;
739         struct instruction *insn;
740
741         sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
742         if (!sec)
743                 return 0;
744
745         list_for_each_entry(reloc, &sec->reloc_list, list) {
746                 if (reloc->sym->type != STT_SECTION) {
747                         WARN("unexpected relocation symbol type in %s", sec->name);
748                         return -1;
749                 }
750
751                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
752                 if (!insn) {
753                         WARN("bad .discard.ignore_alts entry");
754                         return -1;
755                 }
756
757                 insn->ignore_alts = true;
758         }
759
760         return 0;
761 }
762
763 /*
764  * Find the destination instructions for all jumps.
765  */
766 static int add_jump_destinations(struct objtool_file *file)
767 {
768         struct instruction *insn;
769         struct reloc *reloc;
770         struct section *dest_sec;
771         unsigned long dest_off;
772
773         for_each_insn(file, insn) {
774                 if (!is_static_jump(insn))
775                         continue;
776
777                 if (insn->offset == FAKE_JUMP_OFFSET)
778                         continue;
779
780                 reloc = find_reloc_by_dest_range(file->elf, insn->sec,
781                                                insn->offset, insn->len);
782                 if (!reloc) {
783                         dest_sec = insn->sec;
784                         dest_off = arch_jump_destination(insn);
785                 } else if (reloc->sym->type == STT_SECTION) {
786                         dest_sec = reloc->sym->sec;
787                         dest_off = arch_dest_reloc_offset(reloc->addend);
788                 } else if (reloc->sym->sec->idx) {
789                         dest_sec = reloc->sym->sec;
790                         dest_off = reloc->sym->sym.st_value +
791                                    arch_dest_reloc_offset(reloc->addend);
792                 } else if (strstr(reloc->sym->name, "_indirect_thunk_")) {
793                         /*
794                          * Retpoline jumps are really dynamic jumps in
795                          * disguise, so convert them accordingly.
796                          */
797                         if (insn->type == INSN_JUMP_UNCONDITIONAL)
798                                 insn->type = INSN_JUMP_DYNAMIC;
799                         else
800                                 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
801
802                         insn->retpoline_safe = true;
803                         continue;
804                 } else {
805                         /* external sibling call */
806                         insn->call_dest = reloc->sym;
807                         if (insn->call_dest->static_call_tramp) {
808                                 list_add_tail(&insn->static_call_node,
809                                               &file->static_call_list);
810                         }
811                         continue;
812                 }
813
814                 insn->jump_dest = find_insn(file, dest_sec, dest_off);
815                 if (!insn->jump_dest) {
816
817                         /*
818                          * This is a special case where an alt instruction
819                          * jumps past the end of the section.  These are
820                          * handled later in handle_group_alt().
821                          */
822                         if (!strcmp(insn->sec->name, ".altinstr_replacement"))
823                                 continue;
824
825                         WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
826                                   insn->sec, insn->offset, dest_sec->name,
827                                   dest_off);
828                         return -1;
829                 }
830
831                 /*
832                  * Cross-function jump.
833                  */
834                 if (insn->func && insn->jump_dest->func &&
835                     insn->func != insn->jump_dest->func) {
836
837                         /*
838                          * For GCC 8+, create parent/child links for any cold
839                          * subfunctions.  This is _mostly_ redundant with a
840                          * similar initialization in read_symbols().
841                          *
842                          * If a function has aliases, we want the *first* such
843                          * function in the symbol table to be the subfunction's
844                          * parent.  In that case we overwrite the
845                          * initialization done in read_symbols().
846                          *
847                          * However this code can't completely replace the
848                          * read_symbols() code because this doesn't detect the
849                          * case where the parent function's only reference to a
850                          * subfunction is through a jump table.
851                          */
852                         if (!strstr(insn->func->name, ".cold.") &&
853                             strstr(insn->jump_dest->func->name, ".cold.")) {
854                                 insn->func->cfunc = insn->jump_dest->func;
855                                 insn->jump_dest->func->pfunc = insn->func;
856
857                         } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
858                                    insn->jump_dest->offset == insn->jump_dest->func->offset) {
859
860                                 /* internal sibling call */
861                                 insn->call_dest = insn->jump_dest->func;
862                                 if (insn->call_dest->static_call_tramp) {
863                                         list_add_tail(&insn->static_call_node,
864                                                       &file->static_call_list);
865                                 }
866                         }
867                 }
868         }
869
870         return 0;
871 }
872
873 static void remove_insn_ops(struct instruction *insn)
874 {
875         struct stack_op *op, *tmp;
876
877         list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
878                 list_del(&op->list);
879                 free(op);
880         }
881 }
882
883 static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
884 {
885         struct symbol *call_dest;
886
887         call_dest = find_func_by_offset(sec, offset);
888         if (!call_dest)
889                 call_dest = find_symbol_by_offset(sec, offset);
890
891         return call_dest;
892 }
893
894 /*
895  * Find the destination instructions for all calls.
896  */
897 static int add_call_destinations(struct objtool_file *file)
898 {
899         struct instruction *insn;
900         unsigned long dest_off;
901         struct reloc *reloc;
902
903         for_each_insn(file, insn) {
904                 if (insn->type != INSN_CALL)
905                         continue;
906
907                 reloc = find_reloc_by_dest_range(file->elf, insn->sec,
908                                                insn->offset, insn->len);
909                 if (!reloc) {
910                         dest_off = arch_jump_destination(insn);
911                         insn->call_dest = find_call_destination(insn->sec, dest_off);
912
913                         if (insn->ignore)
914                                 continue;
915
916                         if (!insn->call_dest) {
917                                 WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
918                                 return -1;
919                         }
920
921                         if (insn->func && insn->call_dest->type != STT_FUNC) {
922                                 WARN_FUNC("unsupported call to non-function",
923                                           insn->sec, insn->offset);
924                                 return -1;
925                         }
926
927                 } else if (reloc->sym->type == STT_SECTION) {
928                         dest_off = arch_dest_reloc_offset(reloc->addend);
929                         insn->call_dest = find_call_destination(reloc->sym->sec,
930                                                                 dest_off);
931                         if (!insn->call_dest) {
932                                 WARN_FUNC("can't find call dest symbol at %s+0x%lx",
933                                           insn->sec, insn->offset,
934                                           reloc->sym->sec->name,
935                                           dest_off);
936                                 return -1;
937                         }
938                 } else
939                         insn->call_dest = reloc->sym;
940
941                 /*
942                  * Many compilers cannot disable KCOV with a function attribute
943                  * so they need a little help, NOP out any KCOV calls from noinstr
944                  * text.
945                  */
946                 if (insn->sec->noinstr &&
947                     !strncmp(insn->call_dest->name, "__sanitizer_cov_", 16)) {
948                         if (reloc) {
949                                 reloc->type = R_NONE;
950                                 elf_write_reloc(file->elf, reloc);
951                         }
952
953                         elf_write_insn(file->elf, insn->sec,
954                                        insn->offset, insn->len,
955                                        arch_nop_insn(insn->len));
956                         insn->type = INSN_NOP;
957                 }
958
959                 /*
960                  * Whatever stack impact regular CALLs have, should be undone
961                  * by the RETURN of the called function.
962                  *
963                  * Annotated intra-function calls retain the stack_ops but
964                  * are converted to JUMP, see read_intra_function_calls().
965                  */
966                 remove_insn_ops(insn);
967         }
968
969         return 0;
970 }
971
972 /*
973  * The .alternatives section requires some extra special care, over and above
974  * what other special sections require:
975  *
976  * 1. Because alternatives are patched in-place, we need to insert a fake jump
977  *    instruction at the end so that validate_branch() skips all the original
978  *    replaced instructions when validating the new instruction path.
979  *
980  * 2. An added wrinkle is that the new instruction length might be zero.  In
981  *    that case the old instructions are replaced with noops.  We simulate that
982  *    by creating a fake jump as the only new instruction.
983  *
984  * 3. In some cases, the alternative section includes an instruction which
985  *    conditionally jumps to the _end_ of the entry.  We have to modify these
986  *    jumps' destinations to point back to .text rather than the end of the
987  *    entry in .altinstr_replacement.
988  */
989 static int handle_group_alt(struct objtool_file *file,
990                             struct special_alt *special_alt,
991                             struct instruction *orig_insn,
992                             struct instruction **new_insn)
993 {
994         static unsigned int alt_group_next_index = 1;
995         struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
996         unsigned int alt_group = alt_group_next_index++;
997         unsigned long dest_off;
998
999         last_orig_insn = NULL;
1000         insn = orig_insn;
1001         sec_for_each_insn_from(file, insn) {
1002                 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
1003                         break;
1004
1005                 insn->alt_group = alt_group;
1006                 last_orig_insn = insn;
1007         }
1008
1009         if (next_insn_same_sec(file, last_orig_insn)) {
1010                 fake_jump = malloc(sizeof(*fake_jump));
1011                 if (!fake_jump) {
1012                         WARN("malloc failed");
1013                         return -1;
1014                 }
1015                 memset(fake_jump, 0, sizeof(*fake_jump));
1016                 INIT_LIST_HEAD(&fake_jump->alts);
1017                 INIT_LIST_HEAD(&fake_jump->stack_ops);
1018                 init_cfi_state(&fake_jump->cfi);
1019
1020                 fake_jump->sec = special_alt->new_sec;
1021                 fake_jump->offset = FAKE_JUMP_OFFSET;
1022                 fake_jump->type = INSN_JUMP_UNCONDITIONAL;
1023                 fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
1024                 fake_jump->func = orig_insn->func;
1025         }
1026
1027         if (!special_alt->new_len) {
1028                 if (!fake_jump) {
1029                         WARN("%s: empty alternative at end of section",
1030                              special_alt->orig_sec->name);
1031                         return -1;
1032                 }
1033
1034                 *new_insn = fake_jump;
1035                 return 0;
1036         }
1037
1038         last_new_insn = NULL;
1039         alt_group = alt_group_next_index++;
1040         insn = *new_insn;
1041         sec_for_each_insn_from(file, insn) {
1042                 struct reloc *alt_reloc;
1043
1044                 if (insn->offset >= special_alt->new_off + special_alt->new_len)
1045                         break;
1046
1047                 last_new_insn = insn;
1048
1049                 insn->ignore = orig_insn->ignore_alts;
1050                 insn->func = orig_insn->func;
1051                 insn->alt_group = alt_group;
1052
1053                 /*
1054                  * Since alternative replacement code is copy/pasted by the
1055                  * kernel after applying relocations, generally such code can't
1056                  * have relative-address relocation references to outside the
1057                  * .altinstr_replacement section, unless the arch's
1058                  * alternatives code can adjust the relative offsets
1059                  * accordingly.
1060                  */
1061                 alt_reloc = find_reloc_by_dest_range(file->elf, insn->sec,
1062                                                    insn->offset, insn->len);
1063                 if (alt_reloc &&
1064                     !arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
1065
1066                         WARN_FUNC("unsupported relocation in alternatives section",
1067                                   insn->sec, insn->offset);
1068                         return -1;
1069                 }
1070
1071                 if (!is_static_jump(insn))
1072                         continue;
1073
1074                 if (!insn->immediate)
1075                         continue;
1076
1077                 dest_off = arch_jump_destination(insn);
1078                 if (dest_off == special_alt->new_off + special_alt->new_len) {
1079                         if (!fake_jump) {
1080                                 WARN("%s: alternative jump to end of section",
1081                                      special_alt->orig_sec->name);
1082                                 return -1;
1083                         }
1084                         insn->jump_dest = fake_jump;
1085                 }
1086
1087                 if (!insn->jump_dest) {
1088                         WARN_FUNC("can't find alternative jump destination",
1089                                   insn->sec, insn->offset);
1090                         return -1;
1091                 }
1092         }
1093
1094         if (!last_new_insn) {
1095                 WARN_FUNC("can't find last new alternative instruction",
1096                           special_alt->new_sec, special_alt->new_off);
1097                 return -1;
1098         }
1099
1100         if (fake_jump)
1101                 list_add(&fake_jump->list, &last_new_insn->list);
1102
1103         return 0;
1104 }
1105
1106 /*
1107  * A jump table entry can either convert a nop to a jump or a jump to a nop.
1108  * If the original instruction is a jump, make the alt entry an effective nop
1109  * by just skipping the original instruction.
1110  */
1111 static int handle_jump_alt(struct objtool_file *file,
1112                            struct special_alt *special_alt,
1113                            struct instruction *orig_insn,
1114                            struct instruction **new_insn)
1115 {
1116         if (orig_insn->type == INSN_NOP)
1117                 return 0;
1118
1119         if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
1120                 WARN_FUNC("unsupported instruction at jump label",
1121                           orig_insn->sec, orig_insn->offset);
1122                 return -1;
1123         }
1124
1125         *new_insn = list_next_entry(orig_insn, list);
1126         return 0;
1127 }
1128
1129 /*
1130  * Read all the special sections which have alternate instructions which can be
1131  * patched in or redirected to at runtime.  Each instruction having alternate
1132  * instruction(s) has them added to its insn->alts list, which will be
1133  * traversed in validate_branch().
1134  */
1135 static int add_special_section_alts(struct objtool_file *file)
1136 {
1137         struct list_head special_alts;
1138         struct instruction *orig_insn, *new_insn;
1139         struct special_alt *special_alt, *tmp;
1140         struct alternative *alt;
1141         int ret;
1142
1143         ret = special_get_alts(file->elf, &special_alts);
1144         if (ret)
1145                 return ret;
1146
1147         list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
1148
1149                 orig_insn = find_insn(file, special_alt->orig_sec,
1150                                       special_alt->orig_off);
1151                 if (!orig_insn) {
1152                         WARN_FUNC("special: can't find orig instruction",
1153                                   special_alt->orig_sec, special_alt->orig_off);
1154                         ret = -1;
1155                         goto out;
1156                 }
1157
1158                 new_insn = NULL;
1159                 if (!special_alt->group || special_alt->new_len) {
1160                         new_insn = find_insn(file, special_alt->new_sec,
1161                                              special_alt->new_off);
1162                         if (!new_insn) {
1163                                 WARN_FUNC("special: can't find new instruction",
1164                                           special_alt->new_sec,
1165                                           special_alt->new_off);
1166                                 ret = -1;
1167                                 goto out;
1168                         }
1169                 }
1170
1171                 if (special_alt->group) {
1172                         if (!special_alt->orig_len) {
1173                                 WARN_FUNC("empty alternative entry",
1174                                           orig_insn->sec, orig_insn->offset);
1175                                 continue;
1176                         }
1177
1178                         ret = handle_group_alt(file, special_alt, orig_insn,
1179                                                &new_insn);
1180                         if (ret)
1181                                 goto out;
1182                 } else if (special_alt->jump_or_nop) {
1183                         ret = handle_jump_alt(file, special_alt, orig_insn,
1184                                               &new_insn);
1185                         if (ret)
1186                                 goto out;
1187                 }
1188
1189                 alt = malloc(sizeof(*alt));
1190                 if (!alt) {
1191                         WARN("malloc failed");
1192                         ret = -1;
1193                         goto out;
1194                 }
1195
1196                 alt->insn = new_insn;
1197                 alt->skip_orig = special_alt->skip_orig;
1198                 orig_insn->ignore_alts |= special_alt->skip_alt;
1199                 list_add_tail(&alt->list, &orig_insn->alts);
1200
1201                 list_del(&special_alt->list);
1202                 free(special_alt);
1203         }
1204
1205 out:
1206         return ret;
1207 }
1208
1209 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
1210                             struct reloc *table)
1211 {
1212         struct reloc *reloc = table;
1213         struct instruction *dest_insn;
1214         struct alternative *alt;
1215         struct symbol *pfunc = insn->func->pfunc;
1216         unsigned int prev_offset = 0;
1217
1218         /*
1219          * Each @reloc is a switch table relocation which points to the target
1220          * instruction.
1221          */
1222         list_for_each_entry_from(reloc, &table->sec->reloc_list, list) {
1223
1224                 /* Check for the end of the table: */
1225                 if (reloc != table && reloc->jump_table_start)
1226                         break;
1227
1228                 /* Make sure the table entries are consecutive: */
1229                 if (prev_offset && reloc->offset != prev_offset + 8)
1230                         break;
1231
1232                 /* Detect function pointers from contiguous objects: */
1233                 if (reloc->sym->sec == pfunc->sec &&
1234                     reloc->addend == pfunc->offset)
1235                         break;
1236
1237                 dest_insn = find_insn(file, reloc->sym->sec, reloc->addend);
1238                 if (!dest_insn)
1239                         break;
1240
1241                 /* Make sure the destination is in the same function: */
1242                 if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
1243                         break;
1244
1245                 alt = malloc(sizeof(*alt));
1246                 if (!alt) {
1247                         WARN("malloc failed");
1248                         return -1;
1249                 }
1250
1251                 alt->insn = dest_insn;
1252                 list_add_tail(&alt->list, &insn->alts);
1253                 prev_offset = reloc->offset;
1254         }
1255
1256         if (!prev_offset) {
1257                 WARN_FUNC("can't find switch jump table",
1258                           insn->sec, insn->offset);
1259                 return -1;
1260         }
1261
1262         return 0;
1263 }
1264
1265 /*
1266  * find_jump_table() - Given a dynamic jump, find the switch jump table
1267  * associated with it.
1268  */
1269 static struct reloc *find_jump_table(struct objtool_file *file,
1270                                       struct symbol *func,
1271                                       struct instruction *insn)
1272 {
1273         struct reloc *table_reloc;
1274         struct instruction *dest_insn, *orig_insn = insn;
1275
1276         /*
1277          * Backward search using the @first_jump_src links, these help avoid
1278          * much of the 'in between' code. Which avoids us getting confused by
1279          * it.
1280          */
1281         for (;
1282              insn && insn->func && insn->func->pfunc == func;
1283              insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
1284
1285                 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1286                         break;
1287
1288                 /* allow small jumps within the range */
1289                 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1290                     insn->jump_dest &&
1291                     (insn->jump_dest->offset <= insn->offset ||
1292                      insn->jump_dest->offset > orig_insn->offset))
1293                     break;
1294
1295                 table_reloc = arch_find_switch_table(file, insn);
1296                 if (!table_reloc)
1297                         continue;
1298                 dest_insn = find_insn(file, table_reloc->sym->sec, table_reloc->addend);
1299                 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1300                         continue;
1301
1302                 return table_reloc;
1303         }
1304
1305         return NULL;
1306 }
1307
1308 /*
1309  * First pass: Mark the head of each jump table so that in the next pass,
1310  * we know when a given jump table ends and the next one starts.
1311  */
1312 static void mark_func_jump_tables(struct objtool_file *file,
1313                                     struct symbol *func)
1314 {
1315         struct instruction *insn, *last = NULL;
1316         struct reloc *reloc;
1317
1318         func_for_each_insn(file, func, insn) {
1319                 if (!last)
1320                         last = insn;
1321
1322                 /*
1323                  * Store back-pointers for unconditional forward jumps such
1324                  * that find_jump_table() can back-track using those and
1325                  * avoid some potentially confusing code.
1326                  */
1327                 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1328                     insn->offset > last->offset &&
1329                     insn->jump_dest->offset > insn->offset &&
1330                     !insn->jump_dest->first_jump_src) {
1331
1332                         insn->jump_dest->first_jump_src = insn;
1333                         last = insn->jump_dest;
1334                 }
1335
1336                 if (insn->type != INSN_JUMP_DYNAMIC)
1337                         continue;
1338
1339                 reloc = find_jump_table(file, func, insn);
1340                 if (reloc) {
1341                         reloc->jump_table_start = true;
1342                         insn->jump_table = reloc;
1343                 }
1344         }
1345 }
1346
1347 static int add_func_jump_tables(struct objtool_file *file,
1348                                   struct symbol *func)
1349 {
1350         struct instruction *insn;
1351         int ret;
1352
1353         func_for_each_insn(file, func, insn) {
1354                 if (!insn->jump_table)
1355                         continue;
1356
1357                 ret = add_jump_table(file, insn, insn->jump_table);
1358                 if (ret)
1359                         return ret;
1360         }
1361
1362         return 0;
1363 }
1364
1365 /*
1366  * For some switch statements, gcc generates a jump table in the .rodata
1367  * section which contains a list of addresses within the function to jump to.
1368  * This finds these jump tables and adds them to the insn->alts lists.
1369  */
1370 static int add_jump_table_alts(struct objtool_file *file)
1371 {
1372         struct section *sec;
1373         struct symbol *func;
1374         int ret;
1375
1376         if (!file->rodata)
1377                 return 0;
1378
1379         for_each_sec(file, sec) {
1380                 list_for_each_entry(func, &sec->symbol_list, list) {
1381                         if (func->type != STT_FUNC)
1382                                 continue;
1383
1384                         mark_func_jump_tables(file, func);
1385                         ret = add_func_jump_tables(file, func);
1386                         if (ret)
1387                                 return ret;
1388                 }
1389         }
1390
1391         return 0;
1392 }
1393
1394 static int read_unwind_hints(struct objtool_file *file)
1395 {
1396         struct section *sec, *relocsec;
1397         struct reloc *reloc;
1398         struct unwind_hint *hint;
1399         struct instruction *insn;
1400         struct cfi_reg *cfa;
1401         int i;
1402
1403         sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1404         if (!sec)
1405                 return 0;
1406
1407         relocsec = sec->reloc;
1408         if (!relocsec) {
1409                 WARN("missing .rela.discard.unwind_hints section");
1410                 return -1;
1411         }
1412
1413         if (sec->len % sizeof(struct unwind_hint)) {
1414                 WARN("struct unwind_hint size mismatch");
1415                 return -1;
1416         }
1417
1418         file->hints = true;
1419
1420         for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1421                 hint = (struct unwind_hint *)sec->data->d_buf + i;
1422
1423                 reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
1424                 if (!reloc) {
1425                         WARN("can't find reloc for unwind_hints[%d]", i);
1426                         return -1;
1427                 }
1428
1429                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1430                 if (!insn) {
1431                         WARN("can't find insn for unwind_hints[%d]", i);
1432                         return -1;
1433                 }
1434
1435                 cfa = &insn->cfi.cfa;
1436
1437                 if (hint->type == UNWIND_HINT_TYPE_RET_OFFSET) {
1438                         insn->ret_offset = hint->sp_offset;
1439                         continue;
1440                 }
1441
1442                 insn->hint = true;
1443
1444                 if (arch_decode_hint_reg(insn, hint->sp_reg)) {
1445                         WARN_FUNC("unsupported unwind_hint sp base reg %d",
1446                                   insn->sec, insn->offset, hint->sp_reg);
1447                         return -1;
1448                 }
1449
1450                 cfa->offset = hint->sp_offset;
1451                 insn->cfi.type = hint->type;
1452                 insn->cfi.end = hint->end;
1453         }
1454
1455         return 0;
1456 }
1457
1458 static int read_retpoline_hints(struct objtool_file *file)
1459 {
1460         struct section *sec;
1461         struct instruction *insn;
1462         struct reloc *reloc;
1463
1464         sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1465         if (!sec)
1466                 return 0;
1467
1468         list_for_each_entry(reloc, &sec->reloc_list, list) {
1469                 if (reloc->sym->type != STT_SECTION) {
1470                         WARN("unexpected relocation symbol type in %s", sec->name);
1471                         return -1;
1472                 }
1473
1474                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1475                 if (!insn) {
1476                         WARN("bad .discard.retpoline_safe entry");
1477                         return -1;
1478                 }
1479
1480                 if (insn->type != INSN_JUMP_DYNAMIC &&
1481                     insn->type != INSN_CALL_DYNAMIC) {
1482                         WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1483                                   insn->sec, insn->offset);
1484                         return -1;
1485                 }
1486
1487                 insn->retpoline_safe = true;
1488         }
1489
1490         return 0;
1491 }
1492
1493 static int read_instr_hints(struct objtool_file *file)
1494 {
1495         struct section *sec;
1496         struct instruction *insn;
1497         struct reloc *reloc;
1498
1499         sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
1500         if (!sec)
1501                 return 0;
1502
1503         list_for_each_entry(reloc, &sec->reloc_list, list) {
1504                 if (reloc->sym->type != STT_SECTION) {
1505                         WARN("unexpected relocation symbol type in %s", sec->name);
1506                         return -1;
1507                 }
1508
1509                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1510                 if (!insn) {
1511                         WARN("bad .discard.instr_end entry");
1512                         return -1;
1513                 }
1514
1515                 insn->instr--;
1516         }
1517
1518         sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
1519         if (!sec)
1520                 return 0;
1521
1522         list_for_each_entry(reloc, &sec->reloc_list, list) {
1523                 if (reloc->sym->type != STT_SECTION) {
1524                         WARN("unexpected relocation symbol type in %s", sec->name);
1525                         return -1;
1526                 }
1527
1528                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1529                 if (!insn) {
1530                         WARN("bad .discard.instr_begin entry");
1531                         return -1;
1532                 }
1533
1534                 insn->instr++;
1535         }
1536
1537         return 0;
1538 }
1539
1540 static int read_intra_function_calls(struct objtool_file *file)
1541 {
1542         struct instruction *insn;
1543         struct section *sec;
1544         struct reloc *reloc;
1545
1546         sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
1547         if (!sec)
1548                 return 0;
1549
1550         list_for_each_entry(reloc, &sec->reloc_list, list) {
1551                 unsigned long dest_off;
1552
1553                 if (reloc->sym->type != STT_SECTION) {
1554                         WARN("unexpected relocation symbol type in %s",
1555                              sec->name);
1556                         return -1;
1557                 }
1558
1559                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1560                 if (!insn) {
1561                         WARN("bad .discard.intra_function_call entry");
1562                         return -1;
1563                 }
1564
1565                 if (insn->type != INSN_CALL) {
1566                         WARN_FUNC("intra_function_call not a direct call",
1567                                   insn->sec, insn->offset);
1568                         return -1;
1569                 }
1570
1571                 /*
1572                  * Treat intra-function CALLs as JMPs, but with a stack_op.
1573                  * See add_call_destinations(), which strips stack_ops from
1574                  * normal CALLs.
1575                  */
1576                 insn->type = INSN_JUMP_UNCONDITIONAL;
1577
1578                 dest_off = insn->offset + insn->len + insn->immediate;
1579                 insn->jump_dest = find_insn(file, insn->sec, dest_off);
1580                 if (!insn->jump_dest) {
1581                         WARN_FUNC("can't find call dest at %s+0x%lx",
1582                                   insn->sec, insn->offset,
1583                                   insn->sec->name, dest_off);
1584                         return -1;
1585                 }
1586         }
1587
1588         return 0;
1589 }
1590
1591 static int read_static_call_tramps(struct objtool_file *file)
1592 {
1593         struct section *sec;
1594         struct symbol *func;
1595
1596         for_each_sec(file, sec) {
1597                 list_for_each_entry(func, &sec->symbol_list, list) {
1598                         if (func->bind == STB_GLOBAL &&
1599                             !strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
1600                                      strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
1601                                 func->static_call_tramp = true;
1602                 }
1603         }
1604
1605         return 0;
1606 }
1607
1608 static void mark_rodata(struct objtool_file *file)
1609 {
1610         struct section *sec;
1611         bool found = false;
1612
1613         /*
1614          * Search for the following rodata sections, each of which can
1615          * potentially contain jump tables:
1616          *
1617          * - .rodata: can contain GCC switch tables
1618          * - .rodata.<func>: same, if -fdata-sections is being used
1619          * - .rodata..c_jump_table: contains C annotated jump tables
1620          *
1621          * .rodata.str1.* sections are ignored; they don't contain jump tables.
1622          */
1623         for_each_sec(file, sec) {
1624                 if (!strncmp(sec->name, ".rodata", 7) &&
1625                     !strstr(sec->name, ".str1.")) {
1626                         sec->rodata = true;
1627                         found = true;
1628                 }
1629         }
1630
1631         file->rodata = found;
1632 }
1633
1634 static int decode_sections(struct objtool_file *file)
1635 {
1636         int ret;
1637
1638         mark_rodata(file);
1639
1640         ret = decode_instructions(file);
1641         if (ret)
1642                 return ret;
1643
1644         ret = add_dead_ends(file);
1645         if (ret)
1646                 return ret;
1647
1648         add_ignores(file);
1649         add_uaccess_safe(file);
1650
1651         ret = add_ignore_alternatives(file);
1652         if (ret)
1653                 return ret;
1654
1655         ret = read_static_call_tramps(file);
1656         if (ret)
1657                 return ret;
1658
1659         ret = add_jump_destinations(file);
1660         if (ret)
1661                 return ret;
1662
1663         ret = add_special_section_alts(file);
1664         if (ret)
1665                 return ret;
1666
1667         ret = read_intra_function_calls(file);
1668         if (ret)
1669                 return ret;
1670
1671         ret = add_call_destinations(file);
1672         if (ret)
1673                 return ret;
1674
1675         ret = add_jump_table_alts(file);
1676         if (ret)
1677                 return ret;
1678
1679         ret = read_unwind_hints(file);
1680         if (ret)
1681                 return ret;
1682
1683         ret = read_retpoline_hints(file);
1684         if (ret)
1685                 return ret;
1686
1687         ret = read_instr_hints(file);
1688         if (ret)
1689                 return ret;
1690
1691         return 0;
1692 }
1693
1694 static bool is_fentry_call(struct instruction *insn)
1695 {
1696         if (insn->type == INSN_CALL && insn->call_dest &&
1697             insn->call_dest->type == STT_NOTYPE &&
1698             !strcmp(insn->call_dest->name, "__fentry__"))
1699                 return true;
1700
1701         return false;
1702 }
1703
1704 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
1705 {
1706         u8 ret_offset = insn->ret_offset;
1707         struct cfi_state *cfi = &state->cfi;
1708         int i;
1709
1710         if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
1711                 return true;
1712
1713         if (cfi->cfa.offset != initial_func_cfi.cfa.offset + ret_offset)
1714                 return true;
1715
1716         if (cfi->stack_size != initial_func_cfi.cfa.offset + ret_offset)
1717                 return true;
1718
1719         /*
1720          * If there is a ret offset hint then don't check registers
1721          * because a callee-saved register might have been pushed on
1722          * the stack.
1723          */
1724         if (ret_offset)
1725                 return false;
1726
1727         for (i = 0; i < CFI_NUM_REGS; i++) {
1728                 if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
1729                     cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
1730                         return true;
1731         }
1732
1733         return false;
1734 }
1735
1736 static bool has_valid_stack_frame(struct insn_state *state)
1737 {
1738         struct cfi_state *cfi = &state->cfi;
1739
1740         if (cfi->cfa.base == CFI_BP && cfi->regs[CFI_BP].base == CFI_CFA &&
1741             cfi->regs[CFI_BP].offset == -16)
1742                 return true;
1743
1744         if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
1745                 return true;
1746
1747         return false;
1748 }
1749
1750 static int update_cfi_state_regs(struct instruction *insn,
1751                                   struct cfi_state *cfi,
1752                                   struct stack_op *op)
1753 {
1754         struct cfi_reg *cfa = &cfi->cfa;
1755
1756         if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
1757                 return 0;
1758
1759         /* push */
1760         if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1761                 cfa->offset += 8;
1762
1763         /* pop */
1764         if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1765                 cfa->offset -= 8;
1766
1767         /* add immediate to sp */
1768         if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1769             op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1770                 cfa->offset -= op->src.offset;
1771
1772         return 0;
1773 }
1774
1775 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
1776 {
1777         if (arch_callee_saved_reg(reg) &&
1778             cfi->regs[reg].base == CFI_UNDEFINED) {
1779                 cfi->regs[reg].base = base;
1780                 cfi->regs[reg].offset = offset;
1781         }
1782 }
1783
1784 static void restore_reg(struct cfi_state *cfi, unsigned char reg)
1785 {
1786         cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
1787         cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
1788 }
1789
1790 /*
1791  * A note about DRAP stack alignment:
1792  *
1793  * GCC has the concept of a DRAP register, which is used to help keep track of
1794  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1795  * register.  The typical DRAP pattern is:
1796  *
1797  *   4c 8d 54 24 08             lea    0x8(%rsp),%r10
1798  *   48 83 e4 c0                and    $0xffffffffffffffc0,%rsp
1799  *   41 ff 72 f8                pushq  -0x8(%r10)
1800  *   55                         push   %rbp
1801  *   48 89 e5                   mov    %rsp,%rbp
1802  *                              (more pushes)
1803  *   41 52                      push   %r10
1804  *                              ...
1805  *   41 5a                      pop    %r10
1806  *                              (more pops)
1807  *   5d                         pop    %rbp
1808  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
1809  *   c3                         retq
1810  *
1811  * There are some variations in the epilogues, like:
1812  *
1813  *   5b                         pop    %rbx
1814  *   41 5a                      pop    %r10
1815  *   41 5c                      pop    %r12
1816  *   41 5d                      pop    %r13
1817  *   41 5e                      pop    %r14
1818  *   c9                         leaveq
1819  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
1820  *   c3                         retq
1821  *
1822  * and:
1823  *
1824  *   4c 8b 55 e8                mov    -0x18(%rbp),%r10
1825  *   48 8b 5d e0                mov    -0x20(%rbp),%rbx
1826  *   4c 8b 65 f0                mov    -0x10(%rbp),%r12
1827  *   4c 8b 6d f8                mov    -0x8(%rbp),%r13
1828  *   c9                         leaveq
1829  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
1830  *   c3                         retq
1831  *
1832  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1833  * restored beforehand:
1834  *
1835  *   41 55                      push   %r13
1836  *   4c 8d 6c 24 10             lea    0x10(%rsp),%r13
1837  *   48 83 e4 f0                and    $0xfffffffffffffff0,%rsp
1838  *                              ...
1839  *   49 8d 65 f0                lea    -0x10(%r13),%rsp
1840  *   41 5d                      pop    %r13
1841  *   c3                         retq
1842  */
1843 static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
1844                              struct stack_op *op)
1845 {
1846         struct cfi_reg *cfa = &cfi->cfa;
1847         struct cfi_reg *regs = cfi->regs;
1848
1849         /* stack operations don't make sense with an undefined CFA */
1850         if (cfa->base == CFI_UNDEFINED) {
1851                 if (insn->func) {
1852                         WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1853                         return -1;
1854                 }
1855                 return 0;
1856         }
1857
1858         if (cfi->type == UNWIND_HINT_TYPE_REGS ||
1859             cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL)
1860                 return update_cfi_state_regs(insn, cfi, op);
1861
1862         switch (op->dest.type) {
1863
1864         case OP_DEST_REG:
1865                 switch (op->src.type) {
1866
1867                 case OP_SRC_REG:
1868                         if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1869                             cfa->base == CFI_SP &&
1870                             regs[CFI_BP].base == CFI_CFA &&
1871                             regs[CFI_BP].offset == -cfa->offset) {
1872
1873                                 /* mov %rsp, %rbp */
1874                                 cfa->base = op->dest.reg;
1875                                 cfi->bp_scratch = false;
1876                         }
1877
1878                         else if (op->src.reg == CFI_SP &&
1879                                  op->dest.reg == CFI_BP && cfi->drap) {
1880
1881                                 /* drap: mov %rsp, %rbp */
1882                                 regs[CFI_BP].base = CFI_BP;
1883                                 regs[CFI_BP].offset = -cfi->stack_size;
1884                                 cfi->bp_scratch = false;
1885                         }
1886
1887                         else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1888
1889                                 /*
1890                                  * mov %rsp, %reg
1891                                  *
1892                                  * This is needed for the rare case where GCC
1893                                  * does:
1894                                  *
1895                                  *   mov    %rsp, %rax
1896                                  *   ...
1897                                  *   mov    %rax, %rsp
1898                                  */
1899                                 cfi->vals[op->dest.reg].base = CFI_CFA;
1900                                 cfi->vals[op->dest.reg].offset = -cfi->stack_size;
1901                         }
1902
1903                         else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1904                                  cfa->base == CFI_BP) {
1905
1906                                 /*
1907                                  * mov %rbp, %rsp
1908                                  *
1909                                  * Restore the original stack pointer (Clang).
1910                                  */
1911                                 cfi->stack_size = -cfi->regs[CFI_BP].offset;
1912                         }
1913
1914                         else if (op->dest.reg == cfa->base) {
1915
1916                                 /* mov %reg, %rsp */
1917                                 if (cfa->base == CFI_SP &&
1918                                     cfi->vals[op->src.reg].base == CFI_CFA) {
1919
1920                                         /*
1921                                          * This is needed for the rare case
1922                                          * where GCC does something dumb like:
1923                                          *
1924                                          *   lea    0x8(%rsp), %rcx
1925                                          *   ...
1926                                          *   mov    %rcx, %rsp
1927                                          */
1928                                         cfa->offset = -cfi->vals[op->src.reg].offset;
1929                                         cfi->stack_size = cfa->offset;
1930
1931                                 } else {
1932                                         cfa->base = CFI_UNDEFINED;
1933                                         cfa->offset = 0;
1934                                 }
1935                         }
1936
1937                         break;
1938
1939                 case OP_SRC_ADD:
1940                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1941
1942                                 /* add imm, %rsp */
1943                                 cfi->stack_size -= op->src.offset;
1944                                 if (cfa->base == CFI_SP)
1945                                         cfa->offset -= op->src.offset;
1946                                 break;
1947                         }
1948
1949                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1950
1951                                 /* lea disp(%rbp), %rsp */
1952                                 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1953                                 break;
1954                         }
1955
1956                         if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1957
1958                                 /* drap: lea disp(%rsp), %drap */
1959                                 cfi->drap_reg = op->dest.reg;
1960
1961                                 /*
1962                                  * lea disp(%rsp), %reg
1963                                  *
1964                                  * This is needed for the rare case where GCC
1965                                  * does something dumb like:
1966                                  *
1967                                  *   lea    0x8(%rsp), %rcx
1968                                  *   ...
1969                                  *   mov    %rcx, %rsp
1970                                  */
1971                                 cfi->vals[op->dest.reg].base = CFI_CFA;
1972                                 cfi->vals[op->dest.reg].offset = \
1973                                         -cfi->stack_size + op->src.offset;
1974
1975                                 break;
1976                         }
1977
1978                         if (cfi->drap && op->dest.reg == CFI_SP &&
1979                             op->src.reg == cfi->drap_reg) {
1980
1981                                  /* drap: lea disp(%drap), %rsp */
1982                                 cfa->base = CFI_SP;
1983                                 cfa->offset = cfi->stack_size = -op->src.offset;
1984                                 cfi->drap_reg = CFI_UNDEFINED;
1985                                 cfi->drap = false;
1986                                 break;
1987                         }
1988
1989                         if (op->dest.reg == cfi->cfa.base) {
1990                                 WARN_FUNC("unsupported stack register modification",
1991                                           insn->sec, insn->offset);
1992                                 return -1;
1993                         }
1994
1995                         break;
1996
1997                 case OP_SRC_AND:
1998                         if (op->dest.reg != CFI_SP ||
1999                             (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
2000                             (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
2001                                 WARN_FUNC("unsupported stack pointer realignment",
2002                                           insn->sec, insn->offset);
2003                                 return -1;
2004                         }
2005
2006                         if (cfi->drap_reg != CFI_UNDEFINED) {
2007                                 /* drap: and imm, %rsp */
2008                                 cfa->base = cfi->drap_reg;
2009                                 cfa->offset = cfi->stack_size = 0;
2010                                 cfi->drap = true;
2011                         }
2012
2013                         /*
2014                          * Older versions of GCC (4.8ish) realign the stack
2015                          * without DRAP, with a frame pointer.
2016                          */
2017
2018                         break;
2019
2020                 case OP_SRC_POP:
2021                 case OP_SRC_POPF:
2022                         if (!cfi->drap && op->dest.reg == cfa->base) {
2023
2024                                 /* pop %rbp */
2025                                 cfa->base = CFI_SP;
2026                         }
2027
2028                         if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
2029                             op->dest.reg == cfi->drap_reg &&
2030                             cfi->drap_offset == -cfi->stack_size) {
2031
2032                                 /* drap: pop %drap */
2033                                 cfa->base = cfi->drap_reg;
2034                                 cfa->offset = 0;
2035                                 cfi->drap_offset = -1;
2036
2037                         } else if (regs[op->dest.reg].offset == -cfi->stack_size) {
2038
2039                                 /* pop %reg */
2040                                 restore_reg(cfi, op->dest.reg);
2041                         }
2042
2043                         cfi->stack_size -= 8;
2044                         if (cfa->base == CFI_SP)
2045                                 cfa->offset -= 8;
2046
2047                         break;
2048
2049                 case OP_SRC_REG_INDIRECT:
2050                         if (cfi->drap && op->src.reg == CFI_BP &&
2051                             op->src.offset == cfi->drap_offset) {
2052
2053                                 /* drap: mov disp(%rbp), %drap */
2054                                 cfa->base = cfi->drap_reg;
2055                                 cfa->offset = 0;
2056                                 cfi->drap_offset = -1;
2057                         }
2058
2059                         if (cfi->drap && op->src.reg == CFI_BP &&
2060                             op->src.offset == regs[op->dest.reg].offset) {
2061
2062                                 /* drap: mov disp(%rbp), %reg */
2063                                 restore_reg(cfi, op->dest.reg);
2064
2065                         } else if (op->src.reg == cfa->base &&
2066                             op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
2067
2068                                 /* mov disp(%rbp), %reg */
2069                                 /* mov disp(%rsp), %reg */
2070                                 restore_reg(cfi, op->dest.reg);
2071                         }
2072
2073                         break;
2074
2075                 default:
2076                         WARN_FUNC("unknown stack-related instruction",
2077                                   insn->sec, insn->offset);
2078                         return -1;
2079                 }
2080
2081                 break;
2082
2083         case OP_DEST_PUSH:
2084         case OP_DEST_PUSHF:
2085                 cfi->stack_size += 8;
2086                 if (cfa->base == CFI_SP)
2087                         cfa->offset += 8;
2088
2089                 if (op->src.type != OP_SRC_REG)
2090                         break;
2091
2092                 if (cfi->drap) {
2093                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2094
2095                                 /* drap: push %drap */
2096                                 cfa->base = CFI_BP_INDIRECT;
2097                                 cfa->offset = -cfi->stack_size;
2098
2099                                 /* save drap so we know when to restore it */
2100                                 cfi->drap_offset = -cfi->stack_size;
2101
2102                         } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
2103
2104                                 /* drap: push %rbp */
2105                                 cfi->stack_size = 0;
2106
2107                         } else {
2108
2109                                 /* drap: push %reg */
2110                                 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
2111                         }
2112
2113                 } else {
2114
2115                         /* push %reg */
2116                         save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
2117                 }
2118
2119                 /* detect when asm code uses rbp as a scratch register */
2120                 if (!no_fp && insn->func && op->src.reg == CFI_BP &&
2121                     cfa->base != CFI_BP)
2122                         cfi->bp_scratch = true;
2123                 break;
2124
2125         case OP_DEST_REG_INDIRECT:
2126
2127                 if (cfi->drap) {
2128                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2129
2130                                 /* drap: mov %drap, disp(%rbp) */
2131                                 cfa->base = CFI_BP_INDIRECT;
2132                                 cfa->offset = op->dest.offset;
2133
2134                                 /* save drap offset so we know when to restore it */
2135                                 cfi->drap_offset = op->dest.offset;
2136                         } else {
2137
2138                                 /* drap: mov reg, disp(%rbp) */
2139                                 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
2140                         }
2141
2142                 } else if (op->dest.reg == cfa->base) {
2143
2144                         /* mov reg, disp(%rbp) */
2145                         /* mov reg, disp(%rsp) */
2146                         save_reg(cfi, op->src.reg, CFI_CFA,
2147                                  op->dest.offset - cfi->cfa.offset);
2148                 }
2149
2150                 break;
2151
2152         case OP_DEST_LEAVE:
2153                 if ((!cfi->drap && cfa->base != CFI_BP) ||
2154                     (cfi->drap && cfa->base != cfi->drap_reg)) {
2155                         WARN_FUNC("leave instruction with modified stack frame",
2156                                   insn->sec, insn->offset);
2157                         return -1;
2158                 }
2159
2160                 /* leave (mov %rbp, %rsp; pop %rbp) */
2161
2162                 cfi->stack_size = -cfi->regs[CFI_BP].offset - 8;
2163                 restore_reg(cfi, CFI_BP);
2164
2165                 if (!cfi->drap) {
2166                         cfa->base = CFI_SP;
2167                         cfa->offset -= 8;
2168                 }
2169
2170                 break;
2171
2172         case OP_DEST_MEM:
2173                 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
2174                         WARN_FUNC("unknown stack-related memory operation",
2175                                   insn->sec, insn->offset);
2176                         return -1;
2177                 }
2178
2179                 /* pop mem */
2180                 cfi->stack_size -= 8;
2181                 if (cfa->base == CFI_SP)
2182                         cfa->offset -= 8;
2183
2184                 break;
2185
2186         default:
2187                 WARN_FUNC("unknown stack-related instruction",
2188                           insn->sec, insn->offset);
2189                 return -1;
2190         }
2191
2192         return 0;
2193 }
2194
2195 static int handle_insn_ops(struct instruction *insn, struct insn_state *state)
2196 {
2197         struct stack_op *op;
2198
2199         list_for_each_entry(op, &insn->stack_ops, list) {
2200                 struct cfi_state old_cfi = state->cfi;
2201                 int res;
2202
2203                 res = update_cfi_state(insn, &state->cfi, op);
2204                 if (res)
2205                         return res;
2206
2207                 if (insn->alt_group && memcmp(&state->cfi, &old_cfi, sizeof(struct cfi_state))) {
2208                         WARN_FUNC("alternative modifies stack", insn->sec, insn->offset);
2209                         return -1;
2210                 }
2211
2212                 if (op->dest.type == OP_DEST_PUSHF) {
2213                         if (!state->uaccess_stack) {
2214                                 state->uaccess_stack = 1;
2215                         } else if (state->uaccess_stack >> 31) {
2216                                 WARN_FUNC("PUSHF stack exhausted",
2217                                           insn->sec, insn->offset);
2218                                 return 1;
2219                         }
2220                         state->uaccess_stack <<= 1;
2221                         state->uaccess_stack  |= state->uaccess;
2222                 }
2223
2224                 if (op->src.type == OP_SRC_POPF) {
2225                         if (state->uaccess_stack) {
2226                                 state->uaccess = state->uaccess_stack & 1;
2227                                 state->uaccess_stack >>= 1;
2228                                 if (state->uaccess_stack == 1)
2229                                         state->uaccess_stack = 0;
2230                         }
2231                 }
2232         }
2233
2234         return 0;
2235 }
2236
2237 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
2238 {
2239         struct cfi_state *cfi1 = &insn->cfi;
2240         int i;
2241
2242         if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
2243
2244                 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
2245                           insn->sec, insn->offset,
2246                           cfi1->cfa.base, cfi1->cfa.offset,
2247                           cfi2->cfa.base, cfi2->cfa.offset);
2248
2249         } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
2250                 for (i = 0; i < CFI_NUM_REGS; i++) {
2251                         if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
2252                                     sizeof(struct cfi_reg)))
2253                                 continue;
2254
2255                         WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
2256                                   insn->sec, insn->offset,
2257                                   i, cfi1->regs[i].base, cfi1->regs[i].offset,
2258                                   i, cfi2->regs[i].base, cfi2->regs[i].offset);
2259                         break;
2260                 }
2261
2262         } else if (cfi1->type != cfi2->type) {
2263
2264                 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
2265                           insn->sec, insn->offset, cfi1->type, cfi2->type);
2266
2267         } else if (cfi1->drap != cfi2->drap ||
2268                    (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
2269                    (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
2270
2271                 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
2272                           insn->sec, insn->offset,
2273                           cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
2274                           cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
2275
2276         } else
2277                 return true;
2278
2279         return false;
2280 }
2281
2282 static inline bool func_uaccess_safe(struct symbol *func)
2283 {
2284         if (func)
2285                 return func->uaccess_safe;
2286
2287         return false;
2288 }
2289
2290 static inline const char *call_dest_name(struct instruction *insn)
2291 {
2292         if (insn->call_dest)
2293                 return insn->call_dest->name;
2294
2295         return "{dynamic}";
2296 }
2297
2298 static inline bool noinstr_call_dest(struct symbol *func)
2299 {
2300         /*
2301          * We can't deal with indirect function calls at present;
2302          * assume they're instrumented.
2303          */
2304         if (!func)
2305                 return false;
2306
2307         /*
2308          * If the symbol is from a noinstr section; we good.
2309          */
2310         if (func->sec->noinstr)
2311                 return true;
2312
2313         /*
2314          * The __ubsan_handle_*() calls are like WARN(), they only happen when
2315          * something 'BAD' happened. At the risk of taking the machine down,
2316          * let them proceed to get the message out.
2317          */
2318         if (!strncmp(func->name, "__ubsan_handle_", 15))
2319                 return true;
2320
2321         return false;
2322 }
2323
2324 static int validate_call(struct instruction *insn, struct insn_state *state)
2325 {
2326         if (state->noinstr && state->instr <= 0 &&
2327             !noinstr_call_dest(insn->call_dest)) {
2328                 WARN_FUNC("call to %s() leaves .noinstr.text section",
2329                                 insn->sec, insn->offset, call_dest_name(insn));
2330                 return 1;
2331         }
2332
2333         if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
2334                 WARN_FUNC("call to %s() with UACCESS enabled",
2335                                 insn->sec, insn->offset, call_dest_name(insn));
2336                 return 1;
2337         }
2338
2339         if (state->df) {
2340                 WARN_FUNC("call to %s() with DF set",
2341                                 insn->sec, insn->offset, call_dest_name(insn));
2342                 return 1;
2343         }
2344
2345         return 0;
2346 }
2347
2348 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
2349 {
2350         if (has_modified_stack_frame(insn, state)) {
2351                 WARN_FUNC("sibling call from callable instruction with modified stack frame",
2352                                 insn->sec, insn->offset);
2353                 return 1;
2354         }
2355
2356         return validate_call(insn, state);
2357 }
2358
2359 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
2360 {
2361         if (state->noinstr && state->instr > 0) {
2362                 WARN_FUNC("return with instrumentation enabled",
2363                           insn->sec, insn->offset);
2364                 return 1;
2365         }
2366
2367         if (state->uaccess && !func_uaccess_safe(func)) {
2368                 WARN_FUNC("return with UACCESS enabled",
2369                           insn->sec, insn->offset);
2370                 return 1;
2371         }
2372
2373         if (!state->uaccess && func_uaccess_safe(func)) {
2374                 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
2375                           insn->sec, insn->offset);
2376                 return 1;
2377         }
2378
2379         if (state->df) {
2380                 WARN_FUNC("return with DF set",
2381                           insn->sec, insn->offset);
2382                 return 1;
2383         }
2384
2385         if (func && has_modified_stack_frame(insn, state)) {
2386                 WARN_FUNC("return with modified stack frame",
2387                           insn->sec, insn->offset);
2388                 return 1;
2389         }
2390
2391         if (state->cfi.bp_scratch) {
2392                 WARN_FUNC("BP used as a scratch register",
2393                           insn->sec, insn->offset);
2394                 return 1;
2395         }
2396
2397         return 0;
2398 }
2399
2400 /*
2401  * Alternatives should not contain any ORC entries, this in turn means they
2402  * should not contain any CFI ops, which implies all instructions should have
2403  * the same same CFI state.
2404  *
2405  * It is possible to constuct alternatives that have unreachable holes that go
2406  * unreported (because they're NOPs), such holes would result in CFI_UNDEFINED
2407  * states which then results in ORC entries, which we just said we didn't want.
2408  *
2409  * Avoid them by copying the CFI entry of the first instruction into the whole
2410  * alternative.
2411  */
2412 static void fill_alternative_cfi(struct objtool_file *file, struct instruction *insn)
2413 {
2414         struct instruction *first_insn = insn;
2415         int alt_group = insn->alt_group;
2416
2417         sec_for_each_insn_continue(file, insn) {
2418                 if (insn->alt_group != alt_group)
2419                         break;
2420                 insn->cfi = first_insn->cfi;
2421         }
2422 }
2423
2424 /*
2425  * Follow the branch starting at the given instruction, and recursively follow
2426  * any other branches (jumps).  Meanwhile, track the frame pointer state at
2427  * each instruction and validate all the rules described in
2428  * tools/objtool/Documentation/stack-validation.txt.
2429  */
2430 static int validate_branch(struct objtool_file *file, struct symbol *func,
2431                            struct instruction *insn, struct insn_state state)
2432 {
2433         struct alternative *alt;
2434         struct instruction *next_insn;
2435         struct section *sec;
2436         u8 visited;
2437         int ret;
2438
2439         sec = insn->sec;
2440
2441         while (1) {
2442                 next_insn = next_insn_same_sec(file, insn);
2443
2444                 if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2445                         WARN("%s() falls through to next function %s()",
2446                              func->name, insn->func->name);
2447                         return 1;
2448                 }
2449
2450                 if (func && insn->ignore) {
2451                         WARN_FUNC("BUG: why am I validating an ignored function?",
2452                                   sec, insn->offset);
2453                         return 1;
2454                 }
2455
2456                 visited = 1 << state.uaccess;
2457                 if (insn->visited) {
2458                         if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
2459                                 return 1;
2460
2461                         if (insn->visited & visited)
2462                                 return 0;
2463                 }
2464
2465                 if (state.noinstr)
2466                         state.instr += insn->instr;
2467
2468                 if (insn->hint)
2469                         state.cfi = insn->cfi;
2470                 else
2471                         insn->cfi = state.cfi;
2472
2473                 insn->visited |= visited;
2474
2475                 if (!insn->ignore_alts && !list_empty(&insn->alts)) {
2476                         bool skip_orig = false;
2477
2478                         list_for_each_entry(alt, &insn->alts, list) {
2479                                 if (alt->skip_orig)
2480                                         skip_orig = true;
2481
2482                                 ret = validate_branch(file, func, alt->insn, state);
2483                                 if (ret) {
2484                                         if (backtrace)
2485                                                 BT_FUNC("(alt)", insn);
2486                                         return ret;
2487                                 }
2488                         }
2489
2490                         if (insn->alt_group)
2491                                 fill_alternative_cfi(file, insn);
2492
2493                         if (skip_orig)
2494                                 return 0;
2495                 }
2496
2497                 if (handle_insn_ops(insn, &state))
2498                         return 1;
2499
2500                 switch (insn->type) {
2501
2502                 case INSN_RETURN:
2503                         return validate_return(func, insn, &state);
2504
2505                 case INSN_CALL:
2506                 case INSN_CALL_DYNAMIC:
2507                         ret = validate_call(insn, &state);
2508                         if (ret)
2509                                 return ret;
2510
2511                         if (!no_fp && func && !is_fentry_call(insn) &&
2512                             !has_valid_stack_frame(&state)) {
2513                                 WARN_FUNC("call without frame pointer save/setup",
2514                                           sec, insn->offset);
2515                                 return 1;
2516                         }
2517
2518                         if (dead_end_function(file, insn->call_dest))
2519                                 return 0;
2520
2521                         if (insn->type == INSN_CALL && insn->call_dest->static_call_tramp) {
2522                                 list_add_tail(&insn->static_call_node,
2523                                               &file->static_call_list);
2524                         }
2525
2526                         break;
2527
2528                 case INSN_JUMP_CONDITIONAL:
2529                 case INSN_JUMP_UNCONDITIONAL:
2530                         if (func && is_sibling_call(insn)) {
2531                                 ret = validate_sibling_call(insn, &state);
2532                                 if (ret)
2533                                         return ret;
2534
2535                         } else if (insn->jump_dest) {
2536                                 ret = validate_branch(file, func,
2537                                                       insn->jump_dest, state);
2538                                 if (ret) {
2539                                         if (backtrace)
2540                                                 BT_FUNC("(branch)", insn);
2541                                         return ret;
2542                                 }
2543                         }
2544
2545                         if (insn->type == INSN_JUMP_UNCONDITIONAL)
2546                                 return 0;
2547
2548                         break;
2549
2550                 case INSN_JUMP_DYNAMIC:
2551                 case INSN_JUMP_DYNAMIC_CONDITIONAL:
2552                         if (func && is_sibling_call(insn)) {
2553                                 ret = validate_sibling_call(insn, &state);
2554                                 if (ret)
2555                                         return ret;
2556                         }
2557
2558                         if (insn->type == INSN_JUMP_DYNAMIC)
2559                                 return 0;
2560
2561                         break;
2562
2563                 case INSN_CONTEXT_SWITCH:
2564                         if (func && (!next_insn || !next_insn->hint)) {
2565                                 WARN_FUNC("unsupported instruction in callable function",
2566                                           sec, insn->offset);
2567                                 return 1;
2568                         }
2569                         return 0;
2570
2571                 case INSN_STAC:
2572                         if (state.uaccess) {
2573                                 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2574                                 return 1;
2575                         }
2576
2577                         state.uaccess = true;
2578                         break;
2579
2580                 case INSN_CLAC:
2581                         if (!state.uaccess && func) {
2582                                 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2583                                 return 1;
2584                         }
2585
2586                         if (func_uaccess_safe(func) && !state.uaccess_stack) {
2587                                 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2588                                 return 1;
2589                         }
2590
2591                         state.uaccess = false;
2592                         break;
2593
2594                 case INSN_STD:
2595                         if (state.df)
2596                                 WARN_FUNC("recursive STD", sec, insn->offset);
2597
2598                         state.df = true;
2599                         break;
2600
2601                 case INSN_CLD:
2602                         if (!state.df && func)
2603                                 WARN_FUNC("redundant CLD", sec, insn->offset);
2604
2605                         state.df = false;
2606                         break;
2607
2608                 default:
2609                         break;
2610                 }
2611
2612                 if (insn->dead_end)
2613                         return 0;
2614
2615                 if (!next_insn) {
2616                         if (state.cfi.cfa.base == CFI_UNDEFINED)
2617                                 return 0;
2618                         WARN("%s: unexpected end of section", sec->name);
2619                         return 1;
2620                 }
2621
2622                 insn = next_insn;
2623         }
2624
2625         return 0;
2626 }
2627
2628 static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
2629 {
2630         struct instruction *insn;
2631         struct insn_state state;
2632         int ret, warnings = 0;
2633
2634         if (!file->hints)
2635                 return 0;
2636
2637         init_insn_state(&state, sec);
2638
2639         if (sec) {
2640                 insn = find_insn(file, sec, 0);
2641                 if (!insn)
2642                         return 0;
2643         } else {
2644                 insn = list_first_entry(&file->insn_list, typeof(*insn), list);
2645         }
2646
2647         while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
2648                 if (insn->hint && !insn->visited) {
2649                         ret = validate_branch(file, insn->func, insn, state);
2650                         if (ret && backtrace)
2651                                 BT_FUNC("<=== (hint)", insn);
2652                         warnings += ret;
2653                 }
2654
2655                 insn = list_next_entry(insn, list);
2656         }
2657
2658         return warnings;
2659 }
2660
2661 static int validate_retpoline(struct objtool_file *file)
2662 {
2663         struct instruction *insn;
2664         int warnings = 0;
2665
2666         for_each_insn(file, insn) {
2667                 if (insn->type != INSN_JUMP_DYNAMIC &&
2668                     insn->type != INSN_CALL_DYNAMIC)
2669                         continue;
2670
2671                 if (insn->retpoline_safe)
2672                         continue;
2673
2674                 /*
2675                  * .init.text code is ran before userspace and thus doesn't
2676                  * strictly need retpolines, except for modules which are
2677                  * loaded late, they very much do need retpoline in their
2678                  * .init.text
2679                  */
2680                 if (!strcmp(insn->sec->name, ".init.text") && !module)
2681                         continue;
2682
2683                 WARN_FUNC("indirect %s found in RETPOLINE build",
2684                           insn->sec, insn->offset,
2685                           insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2686
2687                 warnings++;
2688         }
2689
2690         return warnings;
2691 }
2692
2693 static bool is_kasan_insn(struct instruction *insn)
2694 {
2695         return (insn->type == INSN_CALL &&
2696                 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2697 }
2698
2699 static bool is_ubsan_insn(struct instruction *insn)
2700 {
2701         return (insn->type == INSN_CALL &&
2702                 !strcmp(insn->call_dest->name,
2703                         "__ubsan_handle_builtin_unreachable"));
2704 }
2705
2706 static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn)
2707 {
2708         int i;
2709         struct instruction *prev_insn;
2710
2711         if (insn->ignore || insn->type == INSN_NOP)
2712                 return true;
2713
2714         /*
2715          * Ignore any unused exceptions.  This can happen when a whitelisted
2716          * function has an exception table entry.
2717          *
2718          * Also ignore alternative replacement instructions.  This can happen
2719          * when a whitelisted function uses one of the ALTERNATIVE macros.
2720          */
2721         if (!strcmp(insn->sec->name, ".fixup") ||
2722             !strcmp(insn->sec->name, ".altinstr_replacement") ||
2723             !strcmp(insn->sec->name, ".altinstr_aux"))
2724                 return true;
2725
2726         if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->offset == FAKE_JUMP_OFFSET)
2727                 return true;
2728
2729         if (!insn->func)
2730                 return false;
2731
2732         /*
2733          * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
2734          * __builtin_unreachable().  The BUG() macro has an unreachable() after
2735          * the UD2, which causes GCC's undefined trap logic to emit another UD2
2736          * (or occasionally a JMP to UD2).
2737          *
2738          * It may also insert a UD2 after calling a __noreturn function.
2739          */
2740         prev_insn = list_prev_entry(insn, list);
2741         if ((prev_insn->dead_end || dead_end_function(file, prev_insn->call_dest)) &&
2742             (insn->type == INSN_BUG ||
2743              (insn->type == INSN_JUMP_UNCONDITIONAL &&
2744               insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
2745                 return true;
2746
2747         /*
2748          * Check if this (or a subsequent) instruction is related to
2749          * CONFIG_UBSAN or CONFIG_KASAN.
2750          *
2751          * End the search at 5 instructions to avoid going into the weeds.
2752          */
2753         for (i = 0; i < 5; i++) {
2754
2755                 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2756                         return true;
2757
2758                 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2759                         if (insn->jump_dest &&
2760                             insn->jump_dest->func == insn->func) {
2761                                 insn = insn->jump_dest;
2762                                 continue;
2763                         }
2764
2765                         break;
2766                 }
2767
2768                 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2769                         break;
2770
2771                 insn = list_next_entry(insn, list);
2772         }
2773
2774         return false;
2775 }
2776
2777 static int validate_symbol(struct objtool_file *file, struct section *sec,
2778                            struct symbol *sym, struct insn_state *state)
2779 {
2780         struct instruction *insn;
2781         int ret;
2782
2783         if (!sym->len) {
2784                 WARN("%s() is missing an ELF size annotation", sym->name);
2785                 return 1;
2786         }
2787
2788         if (sym->pfunc != sym || sym->alias != sym)
2789                 return 0;
2790
2791         insn = find_insn(file, sec, sym->offset);
2792         if (!insn || insn->ignore || insn->visited)
2793                 return 0;
2794
2795         state->uaccess = sym->uaccess_safe;
2796
2797         ret = validate_branch(file, insn->func, insn, *state);
2798         if (ret && backtrace)
2799                 BT_FUNC("<=== (sym)", insn);
2800         return ret;
2801 }
2802
2803 static int validate_section(struct objtool_file *file, struct section *sec)
2804 {
2805         struct insn_state state;
2806         struct symbol *func;
2807         int warnings = 0;
2808
2809         list_for_each_entry(func, &sec->symbol_list, list) {
2810                 if (func->type != STT_FUNC)
2811                         continue;
2812
2813                 init_insn_state(&state, sec);
2814                 state.cfi.cfa = initial_func_cfi.cfa;
2815                 memcpy(&state.cfi.regs, &initial_func_cfi.regs,
2816                        CFI_NUM_REGS * sizeof(struct cfi_reg));
2817                 state.cfi.stack_size = initial_func_cfi.cfa.offset;
2818
2819                 warnings += validate_symbol(file, sec, func, &state);
2820         }
2821
2822         return warnings;
2823 }
2824
2825 static int validate_vmlinux_functions(struct objtool_file *file)
2826 {
2827         struct section *sec;
2828         int warnings = 0;
2829
2830         sec = find_section_by_name(file->elf, ".noinstr.text");
2831         if (sec) {
2832                 warnings += validate_section(file, sec);
2833                 warnings += validate_unwind_hints(file, sec);
2834         }
2835
2836         sec = find_section_by_name(file->elf, ".entry.text");
2837         if (sec) {
2838                 warnings += validate_section(file, sec);
2839                 warnings += validate_unwind_hints(file, sec);
2840         }
2841
2842         return warnings;
2843 }
2844
2845 static int validate_functions(struct objtool_file *file)
2846 {
2847         struct section *sec;
2848         int warnings = 0;
2849
2850         for_each_sec(file, sec) {
2851                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
2852                         continue;
2853
2854                 warnings += validate_section(file, sec);
2855         }
2856
2857         return warnings;
2858 }
2859
2860 static int validate_reachable_instructions(struct objtool_file *file)
2861 {
2862         struct instruction *insn;
2863
2864         if (file->ignore_unreachables)
2865                 return 0;
2866
2867         for_each_insn(file, insn) {
2868                 if (insn->visited || ignore_unreachable_insn(file, insn))
2869                         continue;
2870
2871                 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2872                 return 1;
2873         }
2874
2875         return 0;
2876 }
2877
2878 int check(struct objtool_file *file)
2879 {
2880         int ret, warnings = 0;
2881
2882         arch_initial_func_cfi_state(&initial_func_cfi);
2883
2884         ret = decode_sections(file);
2885         if (ret < 0)
2886                 goto out;
2887         warnings += ret;
2888
2889         if (list_empty(&file->insn_list))
2890                 goto out;
2891
2892         if (vmlinux && !validate_dup) {
2893                 ret = validate_vmlinux_functions(file);
2894                 if (ret < 0)
2895                         goto out;
2896
2897                 warnings += ret;
2898                 goto out;
2899         }
2900
2901         if (retpoline) {
2902                 ret = validate_retpoline(file);
2903                 if (ret < 0)
2904                         return ret;
2905                 warnings += ret;
2906         }
2907
2908         ret = validate_functions(file);
2909         if (ret < 0)
2910                 goto out;
2911         warnings += ret;
2912
2913         ret = validate_unwind_hints(file, NULL);
2914         if (ret < 0)
2915                 goto out;
2916         warnings += ret;
2917
2918         if (!warnings) {
2919                 ret = validate_reachable_instructions(file);
2920                 if (ret < 0)
2921                         goto out;
2922                 warnings += ret;
2923         }
2924
2925         ret = create_static_call_sections(file);
2926         if (ret < 0)
2927                 goto out;
2928         warnings += ret;
2929
2930 out:
2931         /*
2932          *  For now, don't fail the kernel build on fatal warnings.  These
2933          *  errors are still fairly common due to the growing matrix of
2934          *  supported toolchains and their recent pace of change.
2935          */
2936         return 0;
2937 }