objtool: Keep track of retpoline call sites
[linux-2.6-microblaze.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4  */
5
6 #include <string.h>
7 #include <stdlib.h>
8
9 #include <arch/elf.h>
10 #include <objtool/builtin.h>
11 #include <objtool/cfi.h>
12 #include <objtool/arch.h>
13 #include <objtool/check.h>
14 #include <objtool/special.h>
15 #include <objtool/warn.h>
16 #include <objtool/endianness.h>
17
18 #include <linux/objtool.h>
19 #include <linux/hashtable.h>
20 #include <linux/kernel.h>
21 #include <linux/static_call_types.h>
22
23 struct alternative {
24         struct list_head list;
25         struct instruction *insn;
26         bool skip_orig;
27 };
28
29 struct cfi_init_state initial_func_cfi;
30
31 struct instruction *find_insn(struct objtool_file *file,
32                               struct section *sec, unsigned long offset)
33 {
34         struct instruction *insn;
35
36         hash_for_each_possible(file->insn_hash, insn, hash, sec_offset_hash(sec, offset)) {
37                 if (insn->sec == sec && insn->offset == offset)
38                         return insn;
39         }
40
41         return NULL;
42 }
43
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45                                               struct instruction *insn)
46 {
47         struct instruction *next = list_next_entry(insn, list);
48
49         if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50                 return NULL;
51
52         return next;
53 }
54
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56                                                struct instruction *insn)
57 {
58         struct instruction *next = list_next_entry(insn, list);
59         struct symbol *func = insn->func;
60
61         if (!func)
62                 return NULL;
63
64         if (&next->list != &file->insn_list && next->func == func)
65                 return next;
66
67         /* Check if we're already in the subfunction: */
68         if (func == func->cfunc)
69                 return NULL;
70
71         /* Move to the subfunction: */
72         return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74
75 static struct instruction *prev_insn_same_sym(struct objtool_file *file,
76                                                struct instruction *insn)
77 {
78         struct instruction *prev = list_prev_entry(insn, list);
79
80         if (&prev->list != &file->insn_list && prev->func == insn->func)
81                 return prev;
82
83         return NULL;
84 }
85
86 #define func_for_each_insn(file, func, insn)                            \
87         for (insn = find_insn(file, func->sec, func->offset);           \
88              insn;                                                      \
89              insn = next_insn_same_func(file, insn))
90
91 #define sym_for_each_insn(file, sym, insn)                              \
92         for (insn = find_insn(file, sym->sec, sym->offset);             \
93              insn && &insn->list != &file->insn_list &&                 \
94                 insn->sec == sym->sec &&                                \
95                 insn->offset < sym->offset + sym->len;                  \
96              insn = list_next_entry(insn, list))
97
98 #define sym_for_each_insn_continue_reverse(file, sym, insn)             \
99         for (insn = list_prev_entry(insn, list);                        \
100              &insn->list != &file->insn_list &&                         \
101                 insn->sec == sym->sec && insn->offset >= sym->offset;   \
102              insn = list_prev_entry(insn, list))
103
104 #define sec_for_each_insn_from(file, insn)                              \
105         for (; insn; insn = next_insn_same_sec(file, insn))
106
107 #define sec_for_each_insn_continue(file, insn)                          \
108         for (insn = next_insn_same_sec(file, insn); insn;               \
109              insn = next_insn_same_sec(file, insn))
110
111 static bool is_sibling_call(struct instruction *insn)
112 {
113         /*
114          * Assume only ELF functions can make sibling calls.  This ensures
115          * sibling call detection consistency between vmlinux.o and individual
116          * objects.
117          */
118         if (!insn->func)
119                 return false;
120
121         /* An indirect jump is either a sibling call or a jump to a table. */
122         if (insn->type == INSN_JUMP_DYNAMIC)
123                 return list_empty(&insn->alts);
124
125         /* add_jump_destinations() sets insn->call_dest for sibling calls. */
126         return (is_static_jump(insn) && insn->call_dest);
127 }
128
129 /*
130  * This checks to see if the given function is a "noreturn" function.
131  *
132  * For global functions which are outside the scope of this object file, we
133  * have to keep a manual list of them.
134  *
135  * For local functions, we have to detect them manually by simply looking for
136  * the lack of a return instruction.
137  */
138 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
139                                 int recursion)
140 {
141         int i;
142         struct instruction *insn;
143         bool empty = true;
144
145         /*
146          * Unfortunately these have to be hard coded because the noreturn
147          * attribute isn't provided in ELF data.
148          */
149         static const char * const global_noreturns[] = {
150                 "__stack_chk_fail",
151                 "panic",
152                 "do_exit",
153                 "do_task_dead",
154                 "__module_put_and_exit",
155                 "complete_and_exit",
156                 "__reiserfs_panic",
157                 "lbug_with_loc",
158                 "fortify_panic",
159                 "usercopy_abort",
160                 "machine_real_restart",
161                 "rewind_stack_do_exit",
162                 "kunit_try_catch_throw",
163                 "xen_start_kernel",
164         };
165
166         if (!func)
167                 return false;
168
169         if (func->bind == STB_WEAK)
170                 return false;
171
172         if (func->bind == STB_GLOBAL)
173                 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
174                         if (!strcmp(func->name, global_noreturns[i]))
175                                 return true;
176
177         if (!func->len)
178                 return false;
179
180         insn = find_insn(file, func->sec, func->offset);
181         if (!insn->func)
182                 return false;
183
184         func_for_each_insn(file, func, insn) {
185                 empty = false;
186
187                 if (insn->type == INSN_RETURN)
188                         return false;
189         }
190
191         if (empty)
192                 return false;
193
194         /*
195          * A function can have a sibling call instead of a return.  In that
196          * case, the function's dead-end status depends on whether the target
197          * of the sibling call returns.
198          */
199         func_for_each_insn(file, func, insn) {
200                 if (is_sibling_call(insn)) {
201                         struct instruction *dest = insn->jump_dest;
202
203                         if (!dest)
204                                 /* sibling call to another file */
205                                 return false;
206
207                         /* local sibling call */
208                         if (recursion == 5) {
209                                 /*
210                                  * Infinite recursion: two functions have
211                                  * sibling calls to each other.  This is a very
212                                  * rare case.  It means they aren't dead ends.
213                                  */
214                                 return false;
215                         }
216
217                         return __dead_end_function(file, dest->func, recursion+1);
218                 }
219         }
220
221         return true;
222 }
223
224 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
225 {
226         return __dead_end_function(file, func, 0);
227 }
228
229 static void init_cfi_state(struct cfi_state *cfi)
230 {
231         int i;
232
233         for (i = 0; i < CFI_NUM_REGS; i++) {
234                 cfi->regs[i].base = CFI_UNDEFINED;
235                 cfi->vals[i].base = CFI_UNDEFINED;
236         }
237         cfi->cfa.base = CFI_UNDEFINED;
238         cfi->drap_reg = CFI_UNDEFINED;
239         cfi->drap_offset = -1;
240 }
241
242 static void init_insn_state(struct insn_state *state, struct section *sec)
243 {
244         memset(state, 0, sizeof(*state));
245         init_cfi_state(&state->cfi);
246
247         /*
248          * We need the full vmlinux for noinstr validation, otherwise we can
249          * not correctly determine insn->call_dest->sec (external symbols do
250          * not have a section).
251          */
252         if (vmlinux && noinstr && sec)
253                 state->noinstr = sec->noinstr;
254 }
255
256 /*
257  * Call the arch-specific instruction decoder for all the instructions and add
258  * them to the global instruction list.
259  */
260 static int decode_instructions(struct objtool_file *file)
261 {
262         struct section *sec;
263         struct symbol *func;
264         unsigned long offset;
265         struct instruction *insn;
266         unsigned long nr_insns = 0;
267         int ret;
268
269         for_each_sec(file, sec) {
270
271                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
272                         continue;
273
274                 if (strcmp(sec->name, ".altinstr_replacement") &&
275                     strcmp(sec->name, ".altinstr_aux") &&
276                     strncmp(sec->name, ".discard.", 9))
277                         sec->text = true;
278
279                 if (!strcmp(sec->name, ".noinstr.text") ||
280                     !strcmp(sec->name, ".entry.text"))
281                         sec->noinstr = true;
282
283                 for (offset = 0; offset < sec->len; offset += insn->len) {
284                         insn = malloc(sizeof(*insn));
285                         if (!insn) {
286                                 WARN("malloc failed");
287                                 return -1;
288                         }
289                         memset(insn, 0, sizeof(*insn));
290                         INIT_LIST_HEAD(&insn->alts);
291                         INIT_LIST_HEAD(&insn->stack_ops);
292                         init_cfi_state(&insn->cfi);
293
294                         insn->sec = sec;
295                         insn->offset = offset;
296
297                         ret = arch_decode_instruction(file->elf, sec, offset,
298                                                       sec->len - offset,
299                                                       &insn->len, &insn->type,
300                                                       &insn->immediate,
301                                                       &insn->stack_ops);
302                         if (ret)
303                                 goto err;
304
305                         hash_add(file->insn_hash, &insn->hash, sec_offset_hash(sec, insn->offset));
306                         list_add_tail(&insn->list, &file->insn_list);
307                         nr_insns++;
308                 }
309
310                 list_for_each_entry(func, &sec->symbol_list, list) {
311                         if (func->type != STT_FUNC || func->alias != func)
312                                 continue;
313
314                         if (!find_insn(file, sec, func->offset)) {
315                                 WARN("%s(): can't find starting instruction",
316                                      func->name);
317                                 return -1;
318                         }
319
320                         sym_for_each_insn(file, func, insn)
321                                 insn->func = func;
322                 }
323         }
324
325         if (stats)
326                 printf("nr_insns: %lu\n", nr_insns);
327
328         return 0;
329
330 err:
331         free(insn);
332         return ret;
333 }
334
335 static struct instruction *find_last_insn(struct objtool_file *file,
336                                           struct section *sec)
337 {
338         struct instruction *insn = NULL;
339         unsigned int offset;
340         unsigned int end = (sec->len > 10) ? sec->len - 10 : 0;
341
342         for (offset = sec->len - 1; offset >= end && !insn; offset--)
343                 insn = find_insn(file, sec, offset);
344
345         return insn;
346 }
347
348 /*
349  * Mark "ud2" instructions and manually annotated dead ends.
350  */
351 static int add_dead_ends(struct objtool_file *file)
352 {
353         struct section *sec;
354         struct reloc *reloc;
355         struct instruction *insn;
356
357         /*
358          * By default, "ud2" is a dead end unless otherwise annotated, because
359          * GCC 7 inserts it for certain divide-by-zero cases.
360          */
361         for_each_insn(file, insn)
362                 if (insn->type == INSN_BUG)
363                         insn->dead_end = true;
364
365         /*
366          * Check for manually annotated dead ends.
367          */
368         sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
369         if (!sec)
370                 goto reachable;
371
372         list_for_each_entry(reloc, &sec->reloc_list, list) {
373                 if (reloc->sym->type != STT_SECTION) {
374                         WARN("unexpected relocation symbol type in %s", sec->name);
375                         return -1;
376                 }
377                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
378                 if (insn)
379                         insn = list_prev_entry(insn, list);
380                 else if (reloc->addend == reloc->sym->sec->len) {
381                         insn = find_last_insn(file, reloc->sym->sec);
382                         if (!insn) {
383                                 WARN("can't find unreachable insn at %s+0x%x",
384                                      reloc->sym->sec->name, reloc->addend);
385                                 return -1;
386                         }
387                 } else {
388                         WARN("can't find unreachable insn at %s+0x%x",
389                              reloc->sym->sec->name, reloc->addend);
390                         return -1;
391                 }
392
393                 insn->dead_end = true;
394         }
395
396 reachable:
397         /*
398          * These manually annotated reachable checks are needed for GCC 4.4,
399          * where the Linux unreachable() macro isn't supported.  In that case
400          * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
401          * not a dead end.
402          */
403         sec = find_section_by_name(file->elf, ".rela.discard.reachable");
404         if (!sec)
405                 return 0;
406
407         list_for_each_entry(reloc, &sec->reloc_list, list) {
408                 if (reloc->sym->type != STT_SECTION) {
409                         WARN("unexpected relocation symbol type in %s", sec->name);
410                         return -1;
411                 }
412                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
413                 if (insn)
414                         insn = list_prev_entry(insn, list);
415                 else if (reloc->addend == reloc->sym->sec->len) {
416                         insn = find_last_insn(file, reloc->sym->sec);
417                         if (!insn) {
418                                 WARN("can't find reachable insn at %s+0x%x",
419                                      reloc->sym->sec->name, reloc->addend);
420                                 return -1;
421                         }
422                 } else {
423                         WARN("can't find reachable insn at %s+0x%x",
424                              reloc->sym->sec->name, reloc->addend);
425                         return -1;
426                 }
427
428                 insn->dead_end = false;
429         }
430
431         return 0;
432 }
433
434 static int create_static_call_sections(struct objtool_file *file)
435 {
436         struct section *sec;
437         struct static_call_site *site;
438         struct instruction *insn;
439         struct symbol *key_sym;
440         char *key_name, *tmp;
441         int idx;
442
443         sec = find_section_by_name(file->elf, ".static_call_sites");
444         if (sec) {
445                 INIT_LIST_HEAD(&file->static_call_list);
446                 WARN("file already has .static_call_sites section, skipping");
447                 return 0;
448         }
449
450         if (list_empty(&file->static_call_list))
451                 return 0;
452
453         idx = 0;
454         list_for_each_entry(insn, &file->static_call_list, call_node)
455                 idx++;
456
457         sec = elf_create_section(file->elf, ".static_call_sites", SHF_WRITE,
458                                  sizeof(struct static_call_site), idx);
459         if (!sec)
460                 return -1;
461
462         idx = 0;
463         list_for_each_entry(insn, &file->static_call_list, call_node) {
464
465                 site = (struct static_call_site *)sec->data->d_buf + idx;
466                 memset(site, 0, sizeof(struct static_call_site));
467
468                 /* populate reloc for 'addr' */
469                 if (elf_add_reloc_to_insn(file->elf, sec,
470                                           idx * sizeof(struct static_call_site),
471                                           R_X86_64_PC32,
472                                           insn->sec, insn->offset))
473                         return -1;
474
475                 /* find key symbol */
476                 key_name = strdup(insn->call_dest->name);
477                 if (!key_name) {
478                         perror("strdup");
479                         return -1;
480                 }
481                 if (strncmp(key_name, STATIC_CALL_TRAMP_PREFIX_STR,
482                             STATIC_CALL_TRAMP_PREFIX_LEN)) {
483                         WARN("static_call: trampoline name malformed: %s", key_name);
484                         return -1;
485                 }
486                 tmp = key_name + STATIC_CALL_TRAMP_PREFIX_LEN - STATIC_CALL_KEY_PREFIX_LEN;
487                 memcpy(tmp, STATIC_CALL_KEY_PREFIX_STR, STATIC_CALL_KEY_PREFIX_LEN);
488
489                 key_sym = find_symbol_by_name(file->elf, tmp);
490                 if (!key_sym) {
491                         if (!module) {
492                                 WARN("static_call: can't find static_call_key symbol: %s", tmp);
493                                 return -1;
494                         }
495
496                         /*
497                          * For modules(), the key might not be exported, which
498                          * means the module can make static calls but isn't
499                          * allowed to change them.
500                          *
501                          * In that case we temporarily set the key to be the
502                          * trampoline address.  This is fixed up in
503                          * static_call_add_module().
504                          */
505                         key_sym = insn->call_dest;
506                 }
507                 free(key_name);
508
509                 /* populate reloc for 'key' */
510                 if (elf_add_reloc(file->elf, sec,
511                                   idx * sizeof(struct static_call_site) + 4,
512                                   R_X86_64_PC32, key_sym,
513                                   is_sibling_call(insn) * STATIC_CALL_SITE_TAIL))
514                         return -1;
515
516                 idx++;
517         }
518
519         return 0;
520 }
521
522 static int create_mcount_loc_sections(struct objtool_file *file)
523 {
524         struct section *sec;
525         unsigned long *loc;
526         struct instruction *insn;
527         int idx;
528
529         sec = find_section_by_name(file->elf, "__mcount_loc");
530         if (sec) {
531                 INIT_LIST_HEAD(&file->mcount_loc_list);
532                 WARN("file already has __mcount_loc section, skipping");
533                 return 0;
534         }
535
536         if (list_empty(&file->mcount_loc_list))
537                 return 0;
538
539         idx = 0;
540         list_for_each_entry(insn, &file->mcount_loc_list, mcount_loc_node)
541                 idx++;
542
543         sec = elf_create_section(file->elf, "__mcount_loc", 0, sizeof(unsigned long), idx);
544         if (!sec)
545                 return -1;
546
547         idx = 0;
548         list_for_each_entry(insn, &file->mcount_loc_list, mcount_loc_node) {
549
550                 loc = (unsigned long *)sec->data->d_buf + idx;
551                 memset(loc, 0, sizeof(unsigned long));
552
553                 if (elf_add_reloc_to_insn(file->elf, sec,
554                                           idx * sizeof(unsigned long),
555                                           R_X86_64_64,
556                                           insn->sec, insn->offset))
557                         return -1;
558
559                 idx++;
560         }
561
562         return 0;
563 }
564
565 /*
566  * Warnings shouldn't be reported for ignored functions.
567  */
568 static void add_ignores(struct objtool_file *file)
569 {
570         struct instruction *insn;
571         struct section *sec;
572         struct symbol *func;
573         struct reloc *reloc;
574
575         sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
576         if (!sec)
577                 return;
578
579         list_for_each_entry(reloc, &sec->reloc_list, list) {
580                 switch (reloc->sym->type) {
581                 case STT_FUNC:
582                         func = reloc->sym;
583                         break;
584
585                 case STT_SECTION:
586                         func = find_func_by_offset(reloc->sym->sec, reloc->addend);
587                         if (!func)
588                                 continue;
589                         break;
590
591                 default:
592                         WARN("unexpected relocation symbol type in %s: %d", sec->name, reloc->sym->type);
593                         continue;
594                 }
595
596                 func_for_each_insn(file, func, insn)
597                         insn->ignore = true;
598         }
599 }
600
601 /*
602  * This is a whitelist of functions that is allowed to be called with AC set.
603  * The list is meant to be minimal and only contains compiler instrumentation
604  * ABI and a few functions used to implement *_{to,from}_user() functions.
605  *
606  * These functions must not directly change AC, but may PUSHF/POPF.
607  */
608 static const char *uaccess_safe_builtin[] = {
609         /* KASAN */
610         "kasan_report",
611         "kasan_check_range",
612         /* KASAN out-of-line */
613         "__asan_loadN_noabort",
614         "__asan_load1_noabort",
615         "__asan_load2_noabort",
616         "__asan_load4_noabort",
617         "__asan_load8_noabort",
618         "__asan_load16_noabort",
619         "__asan_storeN_noabort",
620         "__asan_store1_noabort",
621         "__asan_store2_noabort",
622         "__asan_store4_noabort",
623         "__asan_store8_noabort",
624         "__asan_store16_noabort",
625         "__kasan_check_read",
626         "__kasan_check_write",
627         /* KASAN in-line */
628         "__asan_report_load_n_noabort",
629         "__asan_report_load1_noabort",
630         "__asan_report_load2_noabort",
631         "__asan_report_load4_noabort",
632         "__asan_report_load8_noabort",
633         "__asan_report_load16_noabort",
634         "__asan_report_store_n_noabort",
635         "__asan_report_store1_noabort",
636         "__asan_report_store2_noabort",
637         "__asan_report_store4_noabort",
638         "__asan_report_store8_noabort",
639         "__asan_report_store16_noabort",
640         /* KCSAN */
641         "__kcsan_check_access",
642         "kcsan_found_watchpoint",
643         "kcsan_setup_watchpoint",
644         "kcsan_check_scoped_accesses",
645         "kcsan_disable_current",
646         "kcsan_enable_current_nowarn",
647         /* KCSAN/TSAN */
648         "__tsan_func_entry",
649         "__tsan_func_exit",
650         "__tsan_read_range",
651         "__tsan_write_range",
652         "__tsan_read1",
653         "__tsan_read2",
654         "__tsan_read4",
655         "__tsan_read8",
656         "__tsan_read16",
657         "__tsan_write1",
658         "__tsan_write2",
659         "__tsan_write4",
660         "__tsan_write8",
661         "__tsan_write16",
662         "__tsan_read_write1",
663         "__tsan_read_write2",
664         "__tsan_read_write4",
665         "__tsan_read_write8",
666         "__tsan_read_write16",
667         "__tsan_atomic8_load",
668         "__tsan_atomic16_load",
669         "__tsan_atomic32_load",
670         "__tsan_atomic64_load",
671         "__tsan_atomic8_store",
672         "__tsan_atomic16_store",
673         "__tsan_atomic32_store",
674         "__tsan_atomic64_store",
675         "__tsan_atomic8_exchange",
676         "__tsan_atomic16_exchange",
677         "__tsan_atomic32_exchange",
678         "__tsan_atomic64_exchange",
679         "__tsan_atomic8_fetch_add",
680         "__tsan_atomic16_fetch_add",
681         "__tsan_atomic32_fetch_add",
682         "__tsan_atomic64_fetch_add",
683         "__tsan_atomic8_fetch_sub",
684         "__tsan_atomic16_fetch_sub",
685         "__tsan_atomic32_fetch_sub",
686         "__tsan_atomic64_fetch_sub",
687         "__tsan_atomic8_fetch_and",
688         "__tsan_atomic16_fetch_and",
689         "__tsan_atomic32_fetch_and",
690         "__tsan_atomic64_fetch_and",
691         "__tsan_atomic8_fetch_or",
692         "__tsan_atomic16_fetch_or",
693         "__tsan_atomic32_fetch_or",
694         "__tsan_atomic64_fetch_or",
695         "__tsan_atomic8_fetch_xor",
696         "__tsan_atomic16_fetch_xor",
697         "__tsan_atomic32_fetch_xor",
698         "__tsan_atomic64_fetch_xor",
699         "__tsan_atomic8_fetch_nand",
700         "__tsan_atomic16_fetch_nand",
701         "__tsan_atomic32_fetch_nand",
702         "__tsan_atomic64_fetch_nand",
703         "__tsan_atomic8_compare_exchange_strong",
704         "__tsan_atomic16_compare_exchange_strong",
705         "__tsan_atomic32_compare_exchange_strong",
706         "__tsan_atomic64_compare_exchange_strong",
707         "__tsan_atomic8_compare_exchange_weak",
708         "__tsan_atomic16_compare_exchange_weak",
709         "__tsan_atomic32_compare_exchange_weak",
710         "__tsan_atomic64_compare_exchange_weak",
711         "__tsan_atomic8_compare_exchange_val",
712         "__tsan_atomic16_compare_exchange_val",
713         "__tsan_atomic32_compare_exchange_val",
714         "__tsan_atomic64_compare_exchange_val",
715         "__tsan_atomic_thread_fence",
716         "__tsan_atomic_signal_fence",
717         /* KCOV */
718         "write_comp_data",
719         "check_kcov_mode",
720         "__sanitizer_cov_trace_pc",
721         "__sanitizer_cov_trace_const_cmp1",
722         "__sanitizer_cov_trace_const_cmp2",
723         "__sanitizer_cov_trace_const_cmp4",
724         "__sanitizer_cov_trace_const_cmp8",
725         "__sanitizer_cov_trace_cmp1",
726         "__sanitizer_cov_trace_cmp2",
727         "__sanitizer_cov_trace_cmp4",
728         "__sanitizer_cov_trace_cmp8",
729         "__sanitizer_cov_trace_switch",
730         /* UBSAN */
731         "ubsan_type_mismatch_common",
732         "__ubsan_handle_type_mismatch",
733         "__ubsan_handle_type_mismatch_v1",
734         "__ubsan_handle_shift_out_of_bounds",
735         /* misc */
736         "csum_partial_copy_generic",
737         "copy_mc_fragile",
738         "copy_mc_fragile_handle_tail",
739         "copy_mc_enhanced_fast_string",
740         "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
741         NULL
742 };
743
744 static void add_uaccess_safe(struct objtool_file *file)
745 {
746         struct symbol *func;
747         const char **name;
748
749         if (!uaccess)
750                 return;
751
752         for (name = uaccess_safe_builtin; *name; name++) {
753                 func = find_symbol_by_name(file->elf, *name);
754                 if (!func)
755                         continue;
756
757                 func->uaccess_safe = true;
758         }
759 }
760
761 /*
762  * FIXME: For now, just ignore any alternatives which add retpolines.  This is
763  * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
764  * But it at least allows objtool to understand the control flow *around* the
765  * retpoline.
766  */
767 static int add_ignore_alternatives(struct objtool_file *file)
768 {
769         struct section *sec;
770         struct reloc *reloc;
771         struct instruction *insn;
772
773         sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
774         if (!sec)
775                 return 0;
776
777         list_for_each_entry(reloc, &sec->reloc_list, list) {
778                 if (reloc->sym->type != STT_SECTION) {
779                         WARN("unexpected relocation symbol type in %s", sec->name);
780                         return -1;
781                 }
782
783                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
784                 if (!insn) {
785                         WARN("bad .discard.ignore_alts entry");
786                         return -1;
787                 }
788
789                 insn->ignore_alts = true;
790         }
791
792         return 0;
793 }
794
795 __weak bool arch_is_retpoline(struct symbol *sym)
796 {
797         return false;
798 }
799
800 /*
801  * Find the destination instructions for all jumps.
802  */
803 static int add_jump_destinations(struct objtool_file *file)
804 {
805         struct instruction *insn;
806         struct reloc *reloc;
807         struct section *dest_sec;
808         unsigned long dest_off;
809
810         for_each_insn(file, insn) {
811                 if (!is_static_jump(insn))
812                         continue;
813
814                 reloc = find_reloc_by_dest_range(file->elf, insn->sec,
815                                                  insn->offset, insn->len);
816                 if (!reloc) {
817                         dest_sec = insn->sec;
818                         dest_off = arch_jump_destination(insn);
819                 } else if (reloc->sym->type == STT_SECTION) {
820                         dest_sec = reloc->sym->sec;
821                         dest_off = arch_dest_reloc_offset(reloc->addend);
822                 } else if (arch_is_retpoline(reloc->sym)) {
823                         /*
824                          * Retpoline jumps are really dynamic jumps in
825                          * disguise, so convert them accordingly.
826                          */
827                         if (insn->type == INSN_JUMP_UNCONDITIONAL)
828                                 insn->type = INSN_JUMP_DYNAMIC;
829                         else
830                                 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
831
832                         list_add_tail(&insn->call_node,
833                                       &file->retpoline_call_list);
834
835                         insn->retpoline_safe = true;
836                         continue;
837                 } else if (insn->func) {
838                         /* internal or external sibling call (with reloc) */
839                         insn->call_dest = reloc->sym;
840                         if (insn->call_dest->static_call_tramp) {
841                                 list_add_tail(&insn->call_node,
842                                               &file->static_call_list);
843                         }
844                         continue;
845                 } else if (reloc->sym->sec->idx) {
846                         dest_sec = reloc->sym->sec;
847                         dest_off = reloc->sym->sym.st_value +
848                                    arch_dest_reloc_offset(reloc->addend);
849                 } else {
850                         /* non-func asm code jumping to another file */
851                         continue;
852                 }
853
854                 insn->jump_dest = find_insn(file, dest_sec, dest_off);
855                 if (!insn->jump_dest) {
856
857                         /*
858                          * This is a special case where an alt instruction
859                          * jumps past the end of the section.  These are
860                          * handled later in handle_group_alt().
861                          */
862                         if (!strcmp(insn->sec->name, ".altinstr_replacement"))
863                                 continue;
864
865                         WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
866                                   insn->sec, insn->offset, dest_sec->name,
867                                   dest_off);
868                         return -1;
869                 }
870
871                 /*
872                  * Cross-function jump.
873                  */
874                 if (insn->func && insn->jump_dest->func &&
875                     insn->func != insn->jump_dest->func) {
876
877                         /*
878                          * For GCC 8+, create parent/child links for any cold
879                          * subfunctions.  This is _mostly_ redundant with a
880                          * similar initialization in read_symbols().
881                          *
882                          * If a function has aliases, we want the *first* such
883                          * function in the symbol table to be the subfunction's
884                          * parent.  In that case we overwrite the
885                          * initialization done in read_symbols().
886                          *
887                          * However this code can't completely replace the
888                          * read_symbols() code because this doesn't detect the
889                          * case where the parent function's only reference to a
890                          * subfunction is through a jump table.
891                          */
892                         if (!strstr(insn->func->name, ".cold") &&
893                             strstr(insn->jump_dest->func->name, ".cold")) {
894                                 insn->func->cfunc = insn->jump_dest->func;
895                                 insn->jump_dest->func->pfunc = insn->func;
896
897                         } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
898                                    insn->jump_dest->offset == insn->jump_dest->func->offset) {
899
900                                 /* internal sibling call (without reloc) */
901                                 insn->call_dest = insn->jump_dest->func;
902                                 if (insn->call_dest->static_call_tramp) {
903                                         list_add_tail(&insn->call_node,
904                                                       &file->static_call_list);
905                                 }
906                         }
907                 }
908         }
909
910         return 0;
911 }
912
913 static void remove_insn_ops(struct instruction *insn)
914 {
915         struct stack_op *op, *tmp;
916
917         list_for_each_entry_safe(op, tmp, &insn->stack_ops, list) {
918                 list_del(&op->list);
919                 free(op);
920         }
921 }
922
923 static struct symbol *find_call_destination(struct section *sec, unsigned long offset)
924 {
925         struct symbol *call_dest;
926
927         call_dest = find_func_by_offset(sec, offset);
928         if (!call_dest)
929                 call_dest = find_symbol_by_offset(sec, offset);
930
931         return call_dest;
932 }
933
934 /*
935  * Find the destination instructions for all calls.
936  */
937 static int add_call_destinations(struct objtool_file *file)
938 {
939         struct instruction *insn;
940         unsigned long dest_off;
941         struct reloc *reloc;
942
943         for_each_insn(file, insn) {
944                 if (insn->type != INSN_CALL)
945                         continue;
946
947                 reloc = find_reloc_by_dest_range(file->elf, insn->sec,
948                                                insn->offset, insn->len);
949                 if (!reloc) {
950                         dest_off = arch_jump_destination(insn);
951                         insn->call_dest = find_call_destination(insn->sec, dest_off);
952
953                         if (insn->ignore)
954                                 continue;
955
956                         if (!insn->call_dest) {
957                                 WARN_FUNC("unannotated intra-function call", insn->sec, insn->offset);
958                                 return -1;
959                         }
960
961                         if (insn->func && insn->call_dest->type != STT_FUNC) {
962                                 WARN_FUNC("unsupported call to non-function",
963                                           insn->sec, insn->offset);
964                                 return -1;
965                         }
966
967                 } else if (reloc->sym->type == STT_SECTION) {
968                         dest_off = arch_dest_reloc_offset(reloc->addend);
969                         insn->call_dest = find_call_destination(reloc->sym->sec,
970                                                                 dest_off);
971                         if (!insn->call_dest) {
972                                 WARN_FUNC("can't find call dest symbol at %s+0x%lx",
973                                           insn->sec, insn->offset,
974                                           reloc->sym->sec->name,
975                                           dest_off);
976                                 return -1;
977                         }
978
979                 } else if (arch_is_retpoline(reloc->sym)) {
980                         /*
981                          * Retpoline calls are really dynamic calls in
982                          * disguise, so convert them accordingly.
983                          */
984                         insn->type = INSN_CALL_DYNAMIC;
985                         insn->retpoline_safe = true;
986
987                         list_add_tail(&insn->call_node,
988                                       &file->retpoline_call_list);
989
990                         remove_insn_ops(insn);
991                         continue;
992
993                 } else
994                         insn->call_dest = reloc->sym;
995
996                 if (insn->call_dest && insn->call_dest->static_call_tramp) {
997                         list_add_tail(&insn->call_node,
998                                       &file->static_call_list);
999                 }
1000
1001                 /*
1002                  * Many compilers cannot disable KCOV with a function attribute
1003                  * so they need a little help, NOP out any KCOV calls from noinstr
1004                  * text.
1005                  */
1006                 if (insn->sec->noinstr &&
1007                     !strncmp(insn->call_dest->name, "__sanitizer_cov_", 16)) {
1008                         if (reloc) {
1009                                 reloc->type = R_NONE;
1010                                 elf_write_reloc(file->elf, reloc);
1011                         }
1012
1013                         elf_write_insn(file->elf, insn->sec,
1014                                        insn->offset, insn->len,
1015                                        arch_nop_insn(insn->len));
1016                         insn->type = INSN_NOP;
1017                 }
1018
1019                 if (mcount && !strcmp(insn->call_dest->name, "__fentry__")) {
1020                         if (reloc) {
1021                                 reloc->type = R_NONE;
1022                                 elf_write_reloc(file->elf, reloc);
1023                         }
1024
1025                         elf_write_insn(file->elf, insn->sec,
1026                                        insn->offset, insn->len,
1027                                        arch_nop_insn(insn->len));
1028
1029                         insn->type = INSN_NOP;
1030
1031                         list_add_tail(&insn->mcount_loc_node,
1032                                       &file->mcount_loc_list);
1033                 }
1034
1035                 /*
1036                  * Whatever stack impact regular CALLs have, should be undone
1037                  * by the RETURN of the called function.
1038                  *
1039                  * Annotated intra-function calls retain the stack_ops but
1040                  * are converted to JUMP, see read_intra_function_calls().
1041                  */
1042                 remove_insn_ops(insn);
1043         }
1044
1045         return 0;
1046 }
1047
1048 /*
1049  * The .alternatives section requires some extra special care over and above
1050  * other special sections because alternatives are patched in place.
1051  */
1052 static int handle_group_alt(struct objtool_file *file,
1053                             struct special_alt *special_alt,
1054                             struct instruction *orig_insn,
1055                             struct instruction **new_insn)
1056 {
1057         struct instruction *last_orig_insn, *last_new_insn = NULL, *insn, *nop = NULL;
1058         struct alt_group *orig_alt_group, *new_alt_group;
1059         unsigned long dest_off;
1060
1061
1062         orig_alt_group = malloc(sizeof(*orig_alt_group));
1063         if (!orig_alt_group) {
1064                 WARN("malloc failed");
1065                 return -1;
1066         }
1067         orig_alt_group->cfi = calloc(special_alt->orig_len,
1068                                      sizeof(struct cfi_state *));
1069         if (!orig_alt_group->cfi) {
1070                 WARN("calloc failed");
1071                 return -1;
1072         }
1073
1074         last_orig_insn = NULL;
1075         insn = orig_insn;
1076         sec_for_each_insn_from(file, insn) {
1077                 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
1078                         break;
1079
1080                 insn->alt_group = orig_alt_group;
1081                 last_orig_insn = insn;
1082         }
1083         orig_alt_group->orig_group = NULL;
1084         orig_alt_group->first_insn = orig_insn;
1085         orig_alt_group->last_insn = last_orig_insn;
1086
1087
1088         new_alt_group = malloc(sizeof(*new_alt_group));
1089         if (!new_alt_group) {
1090                 WARN("malloc failed");
1091                 return -1;
1092         }
1093
1094         if (special_alt->new_len < special_alt->orig_len) {
1095                 /*
1096                  * Insert a fake nop at the end to make the replacement
1097                  * alt_group the same size as the original.  This is needed to
1098                  * allow propagate_alt_cfi() to do its magic.  When the last
1099                  * instruction affects the stack, the instruction after it (the
1100                  * nop) will propagate the new state to the shared CFI array.
1101                  */
1102                 nop = malloc(sizeof(*nop));
1103                 if (!nop) {
1104                         WARN("malloc failed");
1105                         return -1;
1106                 }
1107                 memset(nop, 0, sizeof(*nop));
1108                 INIT_LIST_HEAD(&nop->alts);
1109                 INIT_LIST_HEAD(&nop->stack_ops);
1110                 init_cfi_state(&nop->cfi);
1111
1112                 nop->sec = special_alt->new_sec;
1113                 nop->offset = special_alt->new_off + special_alt->new_len;
1114                 nop->len = special_alt->orig_len - special_alt->new_len;
1115                 nop->type = INSN_NOP;
1116                 nop->func = orig_insn->func;
1117                 nop->alt_group = new_alt_group;
1118                 nop->ignore = orig_insn->ignore_alts;
1119         }
1120
1121         if (!special_alt->new_len) {
1122                 *new_insn = nop;
1123                 goto end;
1124         }
1125
1126         insn = *new_insn;
1127         sec_for_each_insn_from(file, insn) {
1128                 struct reloc *alt_reloc;
1129
1130                 if (insn->offset >= special_alt->new_off + special_alt->new_len)
1131                         break;
1132
1133                 last_new_insn = insn;
1134
1135                 insn->ignore = orig_insn->ignore_alts;
1136                 insn->func = orig_insn->func;
1137                 insn->alt_group = new_alt_group;
1138
1139                 /*
1140                  * Since alternative replacement code is copy/pasted by the
1141                  * kernel after applying relocations, generally such code can't
1142                  * have relative-address relocation references to outside the
1143                  * .altinstr_replacement section, unless the arch's
1144                  * alternatives code can adjust the relative offsets
1145                  * accordingly.
1146                  */
1147                 alt_reloc = find_reloc_by_dest_range(file->elf, insn->sec,
1148                                                    insn->offset, insn->len);
1149                 if (alt_reloc &&
1150                     !arch_support_alt_relocation(special_alt, insn, alt_reloc)) {
1151
1152                         WARN_FUNC("unsupported relocation in alternatives section",
1153                                   insn->sec, insn->offset);
1154                         return -1;
1155                 }
1156
1157                 if (!is_static_jump(insn))
1158                         continue;
1159
1160                 if (!insn->immediate)
1161                         continue;
1162
1163                 dest_off = arch_jump_destination(insn);
1164                 if (dest_off == special_alt->new_off + special_alt->new_len)
1165                         insn->jump_dest = next_insn_same_sec(file, last_orig_insn);
1166
1167                 if (!insn->jump_dest) {
1168                         WARN_FUNC("can't find alternative jump destination",
1169                                   insn->sec, insn->offset);
1170                         return -1;
1171                 }
1172         }
1173
1174         if (!last_new_insn) {
1175                 WARN_FUNC("can't find last new alternative instruction",
1176                           special_alt->new_sec, special_alt->new_off);
1177                 return -1;
1178         }
1179
1180         if (nop)
1181                 list_add(&nop->list, &last_new_insn->list);
1182 end:
1183         new_alt_group->orig_group = orig_alt_group;
1184         new_alt_group->first_insn = *new_insn;
1185         new_alt_group->last_insn = nop ? : last_new_insn;
1186         new_alt_group->cfi = orig_alt_group->cfi;
1187         return 0;
1188 }
1189
1190 /*
1191  * A jump table entry can either convert a nop to a jump or a jump to a nop.
1192  * If the original instruction is a jump, make the alt entry an effective nop
1193  * by just skipping the original instruction.
1194  */
1195 static int handle_jump_alt(struct objtool_file *file,
1196                            struct special_alt *special_alt,
1197                            struct instruction *orig_insn,
1198                            struct instruction **new_insn)
1199 {
1200         if (orig_insn->type == INSN_NOP)
1201                 return 0;
1202
1203         if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
1204                 WARN_FUNC("unsupported instruction at jump label",
1205                           orig_insn->sec, orig_insn->offset);
1206                 return -1;
1207         }
1208
1209         *new_insn = list_next_entry(orig_insn, list);
1210         return 0;
1211 }
1212
1213 /*
1214  * Read all the special sections which have alternate instructions which can be
1215  * patched in or redirected to at runtime.  Each instruction having alternate
1216  * instruction(s) has them added to its insn->alts list, which will be
1217  * traversed in validate_branch().
1218  */
1219 static int add_special_section_alts(struct objtool_file *file)
1220 {
1221         struct list_head special_alts;
1222         struct instruction *orig_insn, *new_insn;
1223         struct special_alt *special_alt, *tmp;
1224         struct alternative *alt;
1225         int ret;
1226
1227         ret = special_get_alts(file->elf, &special_alts);
1228         if (ret)
1229                 return ret;
1230
1231         list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
1232
1233                 orig_insn = find_insn(file, special_alt->orig_sec,
1234                                       special_alt->orig_off);
1235                 if (!orig_insn) {
1236                         WARN_FUNC("special: can't find orig instruction",
1237                                   special_alt->orig_sec, special_alt->orig_off);
1238                         ret = -1;
1239                         goto out;
1240                 }
1241
1242                 new_insn = NULL;
1243                 if (!special_alt->group || special_alt->new_len) {
1244                         new_insn = find_insn(file, special_alt->new_sec,
1245                                              special_alt->new_off);
1246                         if (!new_insn) {
1247                                 WARN_FUNC("special: can't find new instruction",
1248                                           special_alt->new_sec,
1249                                           special_alt->new_off);
1250                                 ret = -1;
1251                                 goto out;
1252                         }
1253                 }
1254
1255                 if (special_alt->group) {
1256                         if (!special_alt->orig_len) {
1257                                 WARN_FUNC("empty alternative entry",
1258                                           orig_insn->sec, orig_insn->offset);
1259                                 continue;
1260                         }
1261
1262                         ret = handle_group_alt(file, special_alt, orig_insn,
1263                                                &new_insn);
1264                         if (ret)
1265                                 goto out;
1266                 } else if (special_alt->jump_or_nop) {
1267                         ret = handle_jump_alt(file, special_alt, orig_insn,
1268                                               &new_insn);
1269                         if (ret)
1270                                 goto out;
1271                 }
1272
1273                 alt = malloc(sizeof(*alt));
1274                 if (!alt) {
1275                         WARN("malloc failed");
1276                         ret = -1;
1277                         goto out;
1278                 }
1279
1280                 alt->insn = new_insn;
1281                 alt->skip_orig = special_alt->skip_orig;
1282                 orig_insn->ignore_alts |= special_alt->skip_alt;
1283                 list_add_tail(&alt->list, &orig_insn->alts);
1284
1285                 list_del(&special_alt->list);
1286                 free(special_alt);
1287         }
1288
1289 out:
1290         return ret;
1291 }
1292
1293 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
1294                             struct reloc *table)
1295 {
1296         struct reloc *reloc = table;
1297         struct instruction *dest_insn;
1298         struct alternative *alt;
1299         struct symbol *pfunc = insn->func->pfunc;
1300         unsigned int prev_offset = 0;
1301
1302         /*
1303          * Each @reloc is a switch table relocation which points to the target
1304          * instruction.
1305          */
1306         list_for_each_entry_from(reloc, &table->sec->reloc_list, list) {
1307
1308                 /* Check for the end of the table: */
1309                 if (reloc != table && reloc->jump_table_start)
1310                         break;
1311
1312                 /* Make sure the table entries are consecutive: */
1313                 if (prev_offset && reloc->offset != prev_offset + 8)
1314                         break;
1315
1316                 /* Detect function pointers from contiguous objects: */
1317                 if (reloc->sym->sec == pfunc->sec &&
1318                     reloc->addend == pfunc->offset)
1319                         break;
1320
1321                 dest_insn = find_insn(file, reloc->sym->sec, reloc->addend);
1322                 if (!dest_insn)
1323                         break;
1324
1325                 /* Make sure the destination is in the same function: */
1326                 if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
1327                         break;
1328
1329                 alt = malloc(sizeof(*alt));
1330                 if (!alt) {
1331                         WARN("malloc failed");
1332                         return -1;
1333                 }
1334
1335                 alt->insn = dest_insn;
1336                 list_add_tail(&alt->list, &insn->alts);
1337                 prev_offset = reloc->offset;
1338         }
1339
1340         if (!prev_offset) {
1341                 WARN_FUNC("can't find switch jump table",
1342                           insn->sec, insn->offset);
1343                 return -1;
1344         }
1345
1346         return 0;
1347 }
1348
1349 /*
1350  * find_jump_table() - Given a dynamic jump, find the switch jump table
1351  * associated with it.
1352  */
1353 static struct reloc *find_jump_table(struct objtool_file *file,
1354                                       struct symbol *func,
1355                                       struct instruction *insn)
1356 {
1357         struct reloc *table_reloc;
1358         struct instruction *dest_insn, *orig_insn = insn;
1359
1360         /*
1361          * Backward search using the @first_jump_src links, these help avoid
1362          * much of the 'in between' code. Which avoids us getting confused by
1363          * it.
1364          */
1365         for (;
1366              insn && insn->func && insn->func->pfunc == func;
1367              insn = insn->first_jump_src ?: prev_insn_same_sym(file, insn)) {
1368
1369                 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1370                         break;
1371
1372                 /* allow small jumps within the range */
1373                 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1374                     insn->jump_dest &&
1375                     (insn->jump_dest->offset <= insn->offset ||
1376                      insn->jump_dest->offset > orig_insn->offset))
1377                     break;
1378
1379                 table_reloc = arch_find_switch_table(file, insn);
1380                 if (!table_reloc)
1381                         continue;
1382                 dest_insn = find_insn(file, table_reloc->sym->sec, table_reloc->addend);
1383                 if (!dest_insn || !dest_insn->func || dest_insn->func->pfunc != func)
1384                         continue;
1385
1386                 return table_reloc;
1387         }
1388
1389         return NULL;
1390 }
1391
1392 /*
1393  * First pass: Mark the head of each jump table so that in the next pass,
1394  * we know when a given jump table ends and the next one starts.
1395  */
1396 static void mark_func_jump_tables(struct objtool_file *file,
1397                                     struct symbol *func)
1398 {
1399         struct instruction *insn, *last = NULL;
1400         struct reloc *reloc;
1401
1402         func_for_each_insn(file, func, insn) {
1403                 if (!last)
1404                         last = insn;
1405
1406                 /*
1407                  * Store back-pointers for unconditional forward jumps such
1408                  * that find_jump_table() can back-track using those and
1409                  * avoid some potentially confusing code.
1410                  */
1411                 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1412                     insn->offset > last->offset &&
1413                     insn->jump_dest->offset > insn->offset &&
1414                     !insn->jump_dest->first_jump_src) {
1415
1416                         insn->jump_dest->first_jump_src = insn;
1417                         last = insn->jump_dest;
1418                 }
1419
1420                 if (insn->type != INSN_JUMP_DYNAMIC)
1421                         continue;
1422
1423                 reloc = find_jump_table(file, func, insn);
1424                 if (reloc) {
1425                         reloc->jump_table_start = true;
1426                         insn->jump_table = reloc;
1427                 }
1428         }
1429 }
1430
1431 static int add_func_jump_tables(struct objtool_file *file,
1432                                   struct symbol *func)
1433 {
1434         struct instruction *insn;
1435         int ret;
1436
1437         func_for_each_insn(file, func, insn) {
1438                 if (!insn->jump_table)
1439                         continue;
1440
1441                 ret = add_jump_table(file, insn, insn->jump_table);
1442                 if (ret)
1443                         return ret;
1444         }
1445
1446         return 0;
1447 }
1448
1449 /*
1450  * For some switch statements, gcc generates a jump table in the .rodata
1451  * section which contains a list of addresses within the function to jump to.
1452  * This finds these jump tables and adds them to the insn->alts lists.
1453  */
1454 static int add_jump_table_alts(struct objtool_file *file)
1455 {
1456         struct section *sec;
1457         struct symbol *func;
1458         int ret;
1459
1460         if (!file->rodata)
1461                 return 0;
1462
1463         for_each_sec(file, sec) {
1464                 list_for_each_entry(func, &sec->symbol_list, list) {
1465                         if (func->type != STT_FUNC)
1466                                 continue;
1467
1468                         mark_func_jump_tables(file, func);
1469                         ret = add_func_jump_tables(file, func);
1470                         if (ret)
1471                                 return ret;
1472                 }
1473         }
1474
1475         return 0;
1476 }
1477
1478 static void set_func_state(struct cfi_state *state)
1479 {
1480         state->cfa = initial_func_cfi.cfa;
1481         memcpy(&state->regs, &initial_func_cfi.regs,
1482                CFI_NUM_REGS * sizeof(struct cfi_reg));
1483         state->stack_size = initial_func_cfi.cfa.offset;
1484 }
1485
1486 static int read_unwind_hints(struct objtool_file *file)
1487 {
1488         struct section *sec, *relocsec;
1489         struct reloc *reloc;
1490         struct unwind_hint *hint;
1491         struct instruction *insn;
1492         int i;
1493
1494         sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1495         if (!sec)
1496                 return 0;
1497
1498         relocsec = sec->reloc;
1499         if (!relocsec) {
1500                 WARN("missing .rela.discard.unwind_hints section");
1501                 return -1;
1502         }
1503
1504         if (sec->len % sizeof(struct unwind_hint)) {
1505                 WARN("struct unwind_hint size mismatch");
1506                 return -1;
1507         }
1508
1509         file->hints = true;
1510
1511         for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1512                 hint = (struct unwind_hint *)sec->data->d_buf + i;
1513
1514                 reloc = find_reloc_by_dest(file->elf, sec, i * sizeof(*hint));
1515                 if (!reloc) {
1516                         WARN("can't find reloc for unwind_hints[%d]", i);
1517                         return -1;
1518                 }
1519
1520                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1521                 if (!insn) {
1522                         WARN("can't find insn for unwind_hints[%d]", i);
1523                         return -1;
1524                 }
1525
1526                 insn->hint = true;
1527
1528                 if (hint->type == UNWIND_HINT_TYPE_FUNC) {
1529                         set_func_state(&insn->cfi);
1530                         continue;
1531                 }
1532
1533                 if (arch_decode_hint_reg(insn, hint->sp_reg)) {
1534                         WARN_FUNC("unsupported unwind_hint sp base reg %d",
1535                                   insn->sec, insn->offset, hint->sp_reg);
1536                         return -1;
1537                 }
1538
1539                 insn->cfi.cfa.offset = bswap_if_needed(hint->sp_offset);
1540                 insn->cfi.type = hint->type;
1541                 insn->cfi.end = hint->end;
1542         }
1543
1544         return 0;
1545 }
1546
1547 static int read_retpoline_hints(struct objtool_file *file)
1548 {
1549         struct section *sec;
1550         struct instruction *insn;
1551         struct reloc *reloc;
1552
1553         sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1554         if (!sec)
1555                 return 0;
1556
1557         list_for_each_entry(reloc, &sec->reloc_list, list) {
1558                 if (reloc->sym->type != STT_SECTION) {
1559                         WARN("unexpected relocation symbol type in %s", sec->name);
1560                         return -1;
1561                 }
1562
1563                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1564                 if (!insn) {
1565                         WARN("bad .discard.retpoline_safe entry");
1566                         return -1;
1567                 }
1568
1569                 if (insn->type != INSN_JUMP_DYNAMIC &&
1570                     insn->type != INSN_CALL_DYNAMIC) {
1571                         WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1572                                   insn->sec, insn->offset);
1573                         return -1;
1574                 }
1575
1576                 insn->retpoline_safe = true;
1577         }
1578
1579         return 0;
1580 }
1581
1582 static int read_instr_hints(struct objtool_file *file)
1583 {
1584         struct section *sec;
1585         struct instruction *insn;
1586         struct reloc *reloc;
1587
1588         sec = find_section_by_name(file->elf, ".rela.discard.instr_end");
1589         if (!sec)
1590                 return 0;
1591
1592         list_for_each_entry(reloc, &sec->reloc_list, list) {
1593                 if (reloc->sym->type != STT_SECTION) {
1594                         WARN("unexpected relocation symbol type in %s", sec->name);
1595                         return -1;
1596                 }
1597
1598                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1599                 if (!insn) {
1600                         WARN("bad .discard.instr_end entry");
1601                         return -1;
1602                 }
1603
1604                 insn->instr--;
1605         }
1606
1607         sec = find_section_by_name(file->elf, ".rela.discard.instr_begin");
1608         if (!sec)
1609                 return 0;
1610
1611         list_for_each_entry(reloc, &sec->reloc_list, list) {
1612                 if (reloc->sym->type != STT_SECTION) {
1613                         WARN("unexpected relocation symbol type in %s", sec->name);
1614                         return -1;
1615                 }
1616
1617                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1618                 if (!insn) {
1619                         WARN("bad .discard.instr_begin entry");
1620                         return -1;
1621                 }
1622
1623                 insn->instr++;
1624         }
1625
1626         return 0;
1627 }
1628
1629 static int read_intra_function_calls(struct objtool_file *file)
1630 {
1631         struct instruction *insn;
1632         struct section *sec;
1633         struct reloc *reloc;
1634
1635         sec = find_section_by_name(file->elf, ".rela.discard.intra_function_calls");
1636         if (!sec)
1637                 return 0;
1638
1639         list_for_each_entry(reloc, &sec->reloc_list, list) {
1640                 unsigned long dest_off;
1641
1642                 if (reloc->sym->type != STT_SECTION) {
1643                         WARN("unexpected relocation symbol type in %s",
1644                              sec->name);
1645                         return -1;
1646                 }
1647
1648                 insn = find_insn(file, reloc->sym->sec, reloc->addend);
1649                 if (!insn) {
1650                         WARN("bad .discard.intra_function_call entry");
1651                         return -1;
1652                 }
1653
1654                 if (insn->type != INSN_CALL) {
1655                         WARN_FUNC("intra_function_call not a direct call",
1656                                   insn->sec, insn->offset);
1657                         return -1;
1658                 }
1659
1660                 /*
1661                  * Treat intra-function CALLs as JMPs, but with a stack_op.
1662                  * See add_call_destinations(), which strips stack_ops from
1663                  * normal CALLs.
1664                  */
1665                 insn->type = INSN_JUMP_UNCONDITIONAL;
1666
1667                 dest_off = insn->offset + insn->len + insn->immediate;
1668                 insn->jump_dest = find_insn(file, insn->sec, dest_off);
1669                 if (!insn->jump_dest) {
1670                         WARN_FUNC("can't find call dest at %s+0x%lx",
1671                                   insn->sec, insn->offset,
1672                                   insn->sec->name, dest_off);
1673                         return -1;
1674                 }
1675         }
1676
1677         return 0;
1678 }
1679
1680 static int read_static_call_tramps(struct objtool_file *file)
1681 {
1682         struct section *sec;
1683         struct symbol *func;
1684
1685         for_each_sec(file, sec) {
1686                 list_for_each_entry(func, &sec->symbol_list, list) {
1687                         if (func->bind == STB_GLOBAL &&
1688                             !strncmp(func->name, STATIC_CALL_TRAMP_PREFIX_STR,
1689                                      strlen(STATIC_CALL_TRAMP_PREFIX_STR)))
1690                                 func->static_call_tramp = true;
1691                 }
1692         }
1693
1694         return 0;
1695 }
1696
1697 static void mark_rodata(struct objtool_file *file)
1698 {
1699         struct section *sec;
1700         bool found = false;
1701
1702         /*
1703          * Search for the following rodata sections, each of which can
1704          * potentially contain jump tables:
1705          *
1706          * - .rodata: can contain GCC switch tables
1707          * - .rodata.<func>: same, if -fdata-sections is being used
1708          * - .rodata..c_jump_table: contains C annotated jump tables
1709          *
1710          * .rodata.str1.* sections are ignored; they don't contain jump tables.
1711          */
1712         for_each_sec(file, sec) {
1713                 if (!strncmp(sec->name, ".rodata", 7) &&
1714                     !strstr(sec->name, ".str1.")) {
1715                         sec->rodata = true;
1716                         found = true;
1717                 }
1718         }
1719
1720         file->rodata = found;
1721 }
1722
1723 __weak int arch_rewrite_retpolines(struct objtool_file *file)
1724 {
1725         return 0;
1726 }
1727
1728 static int decode_sections(struct objtool_file *file)
1729 {
1730         int ret;
1731
1732         mark_rodata(file);
1733
1734         ret = decode_instructions(file);
1735         if (ret)
1736                 return ret;
1737
1738         ret = add_dead_ends(file);
1739         if (ret)
1740                 return ret;
1741
1742         add_ignores(file);
1743         add_uaccess_safe(file);
1744
1745         ret = add_ignore_alternatives(file);
1746         if (ret)
1747                 return ret;
1748
1749         /*
1750          * Must be before add_{jump_call}_destination.
1751          */
1752         ret = read_static_call_tramps(file);
1753         if (ret)
1754                 return ret;
1755
1756         /*
1757          * Must be before add_special_section_alts() as that depends on
1758          * jump_dest being set.
1759          */
1760         ret = add_jump_destinations(file);
1761         if (ret)
1762                 return ret;
1763
1764         ret = add_special_section_alts(file);
1765         if (ret)
1766                 return ret;
1767
1768         /*
1769          * Must be before add_call_destination(); it changes INSN_CALL to
1770          * INSN_JUMP.
1771          */
1772         ret = read_intra_function_calls(file);
1773         if (ret)
1774                 return ret;
1775
1776         ret = add_call_destinations(file);
1777         if (ret)
1778                 return ret;
1779
1780         ret = add_jump_table_alts(file);
1781         if (ret)
1782                 return ret;
1783
1784         ret = read_unwind_hints(file);
1785         if (ret)
1786                 return ret;
1787
1788         ret = read_retpoline_hints(file);
1789         if (ret)
1790                 return ret;
1791
1792         ret = read_instr_hints(file);
1793         if (ret)
1794                 return ret;
1795
1796         /*
1797          * Must be after add_special_section_alts(), since this will emit
1798          * alternatives. Must be after add_{jump,call}_destination(), since
1799          * those create the call insn lists.
1800          */
1801         ret = arch_rewrite_retpolines(file);
1802         if (ret)
1803                 return ret;
1804
1805         return 0;
1806 }
1807
1808 static bool is_fentry_call(struct instruction *insn)
1809 {
1810         if (insn->type == INSN_CALL && insn->call_dest &&
1811             insn->call_dest->type == STT_NOTYPE &&
1812             !strcmp(insn->call_dest->name, "__fentry__"))
1813                 return true;
1814
1815         return false;
1816 }
1817
1818 static bool has_modified_stack_frame(struct instruction *insn, struct insn_state *state)
1819 {
1820         struct cfi_state *cfi = &state->cfi;
1821         int i;
1822
1823         if (cfi->cfa.base != initial_func_cfi.cfa.base || cfi->drap)
1824                 return true;
1825
1826         if (cfi->cfa.offset != initial_func_cfi.cfa.offset)
1827                 return true;
1828
1829         if (cfi->stack_size != initial_func_cfi.cfa.offset)
1830                 return true;
1831
1832         for (i = 0; i < CFI_NUM_REGS; i++) {
1833                 if (cfi->regs[i].base != initial_func_cfi.regs[i].base ||
1834                     cfi->regs[i].offset != initial_func_cfi.regs[i].offset)
1835                         return true;
1836         }
1837
1838         return false;
1839 }
1840
1841 static bool check_reg_frame_pos(const struct cfi_reg *reg,
1842                                 int expected_offset)
1843 {
1844         return reg->base == CFI_CFA &&
1845                reg->offset == expected_offset;
1846 }
1847
1848 static bool has_valid_stack_frame(struct insn_state *state)
1849 {
1850         struct cfi_state *cfi = &state->cfi;
1851
1852         if (cfi->cfa.base == CFI_BP &&
1853             check_reg_frame_pos(&cfi->regs[CFI_BP], -cfi->cfa.offset) &&
1854             check_reg_frame_pos(&cfi->regs[CFI_RA], -cfi->cfa.offset + 8))
1855                 return true;
1856
1857         if (cfi->drap && cfi->regs[CFI_BP].base == CFI_BP)
1858                 return true;
1859
1860         return false;
1861 }
1862
1863 static int update_cfi_state_regs(struct instruction *insn,
1864                                   struct cfi_state *cfi,
1865                                   struct stack_op *op)
1866 {
1867         struct cfi_reg *cfa = &cfi->cfa;
1868
1869         if (cfa->base != CFI_SP && cfa->base != CFI_SP_INDIRECT)
1870                 return 0;
1871
1872         /* push */
1873         if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1874                 cfa->offset += 8;
1875
1876         /* pop */
1877         if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1878                 cfa->offset -= 8;
1879
1880         /* add immediate to sp */
1881         if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1882             op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1883                 cfa->offset -= op->src.offset;
1884
1885         return 0;
1886 }
1887
1888 static void save_reg(struct cfi_state *cfi, unsigned char reg, int base, int offset)
1889 {
1890         if (arch_callee_saved_reg(reg) &&
1891             cfi->regs[reg].base == CFI_UNDEFINED) {
1892                 cfi->regs[reg].base = base;
1893                 cfi->regs[reg].offset = offset;
1894         }
1895 }
1896
1897 static void restore_reg(struct cfi_state *cfi, unsigned char reg)
1898 {
1899         cfi->regs[reg].base = initial_func_cfi.regs[reg].base;
1900         cfi->regs[reg].offset = initial_func_cfi.regs[reg].offset;
1901 }
1902
1903 /*
1904  * A note about DRAP stack alignment:
1905  *
1906  * GCC has the concept of a DRAP register, which is used to help keep track of
1907  * the stack pointer when aligning the stack.  r10 or r13 is used as the DRAP
1908  * register.  The typical DRAP pattern is:
1909  *
1910  *   4c 8d 54 24 08             lea    0x8(%rsp),%r10
1911  *   48 83 e4 c0                and    $0xffffffffffffffc0,%rsp
1912  *   41 ff 72 f8                pushq  -0x8(%r10)
1913  *   55                         push   %rbp
1914  *   48 89 e5                   mov    %rsp,%rbp
1915  *                              (more pushes)
1916  *   41 52                      push   %r10
1917  *                              ...
1918  *   41 5a                      pop    %r10
1919  *                              (more pops)
1920  *   5d                         pop    %rbp
1921  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
1922  *   c3                         retq
1923  *
1924  * There are some variations in the epilogues, like:
1925  *
1926  *   5b                         pop    %rbx
1927  *   41 5a                      pop    %r10
1928  *   41 5c                      pop    %r12
1929  *   41 5d                      pop    %r13
1930  *   41 5e                      pop    %r14
1931  *   c9                         leaveq
1932  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
1933  *   c3                         retq
1934  *
1935  * and:
1936  *
1937  *   4c 8b 55 e8                mov    -0x18(%rbp),%r10
1938  *   48 8b 5d e0                mov    -0x20(%rbp),%rbx
1939  *   4c 8b 65 f0                mov    -0x10(%rbp),%r12
1940  *   4c 8b 6d f8                mov    -0x8(%rbp),%r13
1941  *   c9                         leaveq
1942  *   49 8d 62 f8                lea    -0x8(%r10),%rsp
1943  *   c3                         retq
1944  *
1945  * Sometimes r13 is used as the DRAP register, in which case it's saved and
1946  * restored beforehand:
1947  *
1948  *   41 55                      push   %r13
1949  *   4c 8d 6c 24 10             lea    0x10(%rsp),%r13
1950  *   48 83 e4 f0                and    $0xfffffffffffffff0,%rsp
1951  *                              ...
1952  *   49 8d 65 f0                lea    -0x10(%r13),%rsp
1953  *   41 5d                      pop    %r13
1954  *   c3                         retq
1955  */
1956 static int update_cfi_state(struct instruction *insn, struct cfi_state *cfi,
1957                              struct stack_op *op)
1958 {
1959         struct cfi_reg *cfa = &cfi->cfa;
1960         struct cfi_reg *regs = cfi->regs;
1961
1962         /* stack operations don't make sense with an undefined CFA */
1963         if (cfa->base == CFI_UNDEFINED) {
1964                 if (insn->func) {
1965                         WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1966                         return -1;
1967                 }
1968                 return 0;
1969         }
1970
1971         if (cfi->type == UNWIND_HINT_TYPE_REGS ||
1972             cfi->type == UNWIND_HINT_TYPE_REGS_PARTIAL)
1973                 return update_cfi_state_regs(insn, cfi, op);
1974
1975         switch (op->dest.type) {
1976
1977         case OP_DEST_REG:
1978                 switch (op->src.type) {
1979
1980                 case OP_SRC_REG:
1981                         if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1982                             cfa->base == CFI_SP &&
1983                             check_reg_frame_pos(&regs[CFI_BP], -cfa->offset)) {
1984
1985                                 /* mov %rsp, %rbp */
1986                                 cfa->base = op->dest.reg;
1987                                 cfi->bp_scratch = false;
1988                         }
1989
1990                         else if (op->src.reg == CFI_SP &&
1991                                  op->dest.reg == CFI_BP && cfi->drap) {
1992
1993                                 /* drap: mov %rsp, %rbp */
1994                                 regs[CFI_BP].base = CFI_BP;
1995                                 regs[CFI_BP].offset = -cfi->stack_size;
1996                                 cfi->bp_scratch = false;
1997                         }
1998
1999                         else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2000
2001                                 /*
2002                                  * mov %rsp, %reg
2003                                  *
2004                                  * This is needed for the rare case where GCC
2005                                  * does:
2006                                  *
2007                                  *   mov    %rsp, %rax
2008                                  *   ...
2009                                  *   mov    %rax, %rsp
2010                                  */
2011                                 cfi->vals[op->dest.reg].base = CFI_CFA;
2012                                 cfi->vals[op->dest.reg].offset = -cfi->stack_size;
2013                         }
2014
2015                         else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
2016                                  cfa->base == CFI_BP) {
2017
2018                                 /*
2019                                  * mov %rbp, %rsp
2020                                  *
2021                                  * Restore the original stack pointer (Clang).
2022                                  */
2023                                 cfi->stack_size = -cfi->regs[CFI_BP].offset;
2024                         }
2025
2026                         else if (op->dest.reg == cfa->base) {
2027
2028                                 /* mov %reg, %rsp */
2029                                 if (cfa->base == CFI_SP &&
2030                                     cfi->vals[op->src.reg].base == CFI_CFA) {
2031
2032                                         /*
2033                                          * This is needed for the rare case
2034                                          * where GCC does something dumb like:
2035                                          *
2036                                          *   lea    0x8(%rsp), %rcx
2037                                          *   ...
2038                                          *   mov    %rcx, %rsp
2039                                          */
2040                                         cfa->offset = -cfi->vals[op->src.reg].offset;
2041                                         cfi->stack_size = cfa->offset;
2042
2043                                 } else if (cfa->base == CFI_SP &&
2044                                            cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2045                                            cfi->vals[op->src.reg].offset == cfa->offset) {
2046
2047                                         /*
2048                                          * Stack swizzle:
2049                                          *
2050                                          * 1: mov %rsp, (%[tos])
2051                                          * 2: mov %[tos], %rsp
2052                                          *    ...
2053                                          * 3: pop %rsp
2054                                          *
2055                                          * Where:
2056                                          *
2057                                          * 1 - places a pointer to the previous
2058                                          *     stack at the Top-of-Stack of the
2059                                          *     new stack.
2060                                          *
2061                                          * 2 - switches to the new stack.
2062                                          *
2063                                          * 3 - pops the Top-of-Stack to restore
2064                                          *     the original stack.
2065                                          *
2066                                          * Note: we set base to SP_INDIRECT
2067                                          * here and preserve offset. Therefore
2068                                          * when the unwinder reaches ToS it
2069                                          * will dereference SP and then add the
2070                                          * offset to find the next frame, IOW:
2071                                          * (%rsp) + offset.
2072                                          */
2073                                         cfa->base = CFI_SP_INDIRECT;
2074
2075                                 } else {
2076                                         cfa->base = CFI_UNDEFINED;
2077                                         cfa->offset = 0;
2078                                 }
2079                         }
2080
2081                         else if (op->dest.reg == CFI_SP &&
2082                                  cfi->vals[op->src.reg].base == CFI_SP_INDIRECT &&
2083                                  cfi->vals[op->src.reg].offset == cfa->offset) {
2084
2085                                 /*
2086                                  * The same stack swizzle case 2) as above. But
2087                                  * because we can't change cfa->base, case 3)
2088                                  * will become a regular POP. Pretend we're a
2089                                  * PUSH so things don't go unbalanced.
2090                                  */
2091                                 cfi->stack_size += 8;
2092                         }
2093
2094
2095                         break;
2096
2097                 case OP_SRC_ADD:
2098                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
2099
2100                                 /* add imm, %rsp */
2101                                 cfi->stack_size -= op->src.offset;
2102                                 if (cfa->base == CFI_SP)
2103                                         cfa->offset -= op->src.offset;
2104                                 break;
2105                         }
2106
2107                         if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
2108
2109                                 /* lea disp(%rbp), %rsp */
2110                                 cfi->stack_size = -(op->src.offset + regs[CFI_BP].offset);
2111                                 break;
2112                         }
2113
2114                         if (!cfi->drap && op->src.reg == CFI_SP &&
2115                             op->dest.reg == CFI_BP && cfa->base == CFI_SP &&
2116                             check_reg_frame_pos(&regs[CFI_BP], -cfa->offset + op->src.offset)) {
2117
2118                                 /* lea disp(%rsp), %rbp */
2119                                 cfa->base = CFI_BP;
2120                                 cfa->offset -= op->src.offset;
2121                                 cfi->bp_scratch = false;
2122                                 break;
2123                         }
2124
2125                         if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
2126
2127                                 /* drap: lea disp(%rsp), %drap */
2128                                 cfi->drap_reg = op->dest.reg;
2129
2130                                 /*
2131                                  * lea disp(%rsp), %reg
2132                                  *
2133                                  * This is needed for the rare case where GCC
2134                                  * does something dumb like:
2135                                  *
2136                                  *   lea    0x8(%rsp), %rcx
2137                                  *   ...
2138                                  *   mov    %rcx, %rsp
2139                                  */
2140                                 cfi->vals[op->dest.reg].base = CFI_CFA;
2141                                 cfi->vals[op->dest.reg].offset = \
2142                                         -cfi->stack_size + op->src.offset;
2143
2144                                 break;
2145                         }
2146
2147                         if (cfi->drap && op->dest.reg == CFI_SP &&
2148                             op->src.reg == cfi->drap_reg) {
2149
2150                                  /* drap: lea disp(%drap), %rsp */
2151                                 cfa->base = CFI_SP;
2152                                 cfa->offset = cfi->stack_size = -op->src.offset;
2153                                 cfi->drap_reg = CFI_UNDEFINED;
2154                                 cfi->drap = false;
2155                                 break;
2156                         }
2157
2158                         if (op->dest.reg == cfi->cfa.base) {
2159                                 WARN_FUNC("unsupported stack register modification",
2160                                           insn->sec, insn->offset);
2161                                 return -1;
2162                         }
2163
2164                         break;
2165
2166                 case OP_SRC_AND:
2167                         if (op->dest.reg != CFI_SP ||
2168                             (cfi->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
2169                             (cfi->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
2170                                 WARN_FUNC("unsupported stack pointer realignment",
2171                                           insn->sec, insn->offset);
2172                                 return -1;
2173                         }
2174
2175                         if (cfi->drap_reg != CFI_UNDEFINED) {
2176                                 /* drap: and imm, %rsp */
2177                                 cfa->base = cfi->drap_reg;
2178                                 cfa->offset = cfi->stack_size = 0;
2179                                 cfi->drap = true;
2180                         }
2181
2182                         /*
2183                          * Older versions of GCC (4.8ish) realign the stack
2184                          * without DRAP, with a frame pointer.
2185                          */
2186
2187                         break;
2188
2189                 case OP_SRC_POP:
2190                 case OP_SRC_POPF:
2191                         if (op->dest.reg == CFI_SP && cfa->base == CFI_SP_INDIRECT) {
2192
2193                                 /* pop %rsp; # restore from a stack swizzle */
2194                                 cfa->base = CFI_SP;
2195                                 break;
2196                         }
2197
2198                         if (!cfi->drap && op->dest.reg == cfa->base) {
2199
2200                                 /* pop %rbp */
2201                                 cfa->base = CFI_SP;
2202                         }
2203
2204                         if (cfi->drap && cfa->base == CFI_BP_INDIRECT &&
2205                             op->dest.reg == cfi->drap_reg &&
2206                             cfi->drap_offset == -cfi->stack_size) {
2207
2208                                 /* drap: pop %drap */
2209                                 cfa->base = cfi->drap_reg;
2210                                 cfa->offset = 0;
2211                                 cfi->drap_offset = -1;
2212
2213                         } else if (regs[op->dest.reg].offset == -cfi->stack_size) {
2214
2215                                 /* pop %reg */
2216                                 restore_reg(cfi, op->dest.reg);
2217                         }
2218
2219                         cfi->stack_size -= 8;
2220                         if (cfa->base == CFI_SP)
2221                                 cfa->offset -= 8;
2222
2223                         break;
2224
2225                 case OP_SRC_REG_INDIRECT:
2226                         if (!cfi->drap && op->dest.reg == cfa->base &&
2227                             op->dest.reg == CFI_BP) {
2228
2229                                 /* mov disp(%rsp), %rbp */
2230                                 cfa->base = CFI_SP;
2231                                 cfa->offset = cfi->stack_size;
2232                         }
2233
2234                         if (cfi->drap && op->src.reg == CFI_BP &&
2235                             op->src.offset == cfi->drap_offset) {
2236
2237                                 /* drap: mov disp(%rbp), %drap */
2238                                 cfa->base = cfi->drap_reg;
2239                                 cfa->offset = 0;
2240                                 cfi->drap_offset = -1;
2241                         }
2242
2243                         if (cfi->drap && op->src.reg == CFI_BP &&
2244                             op->src.offset == regs[op->dest.reg].offset) {
2245
2246                                 /* drap: mov disp(%rbp), %reg */
2247                                 restore_reg(cfi, op->dest.reg);
2248
2249                         } else if (op->src.reg == cfa->base &&
2250                             op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
2251
2252                                 /* mov disp(%rbp), %reg */
2253                                 /* mov disp(%rsp), %reg */
2254                                 restore_reg(cfi, op->dest.reg);
2255
2256                         } else if (op->src.reg == CFI_SP &&
2257                                    op->src.offset == regs[op->dest.reg].offset + cfi->stack_size) {
2258
2259                                 /* mov disp(%rsp), %reg */
2260                                 restore_reg(cfi, op->dest.reg);
2261                         }
2262
2263                         break;
2264
2265                 default:
2266                         WARN_FUNC("unknown stack-related instruction",
2267                                   insn->sec, insn->offset);
2268                         return -1;
2269                 }
2270
2271                 break;
2272
2273         case OP_DEST_PUSH:
2274         case OP_DEST_PUSHF:
2275                 cfi->stack_size += 8;
2276                 if (cfa->base == CFI_SP)
2277                         cfa->offset += 8;
2278
2279                 if (op->src.type != OP_SRC_REG)
2280                         break;
2281
2282                 if (cfi->drap) {
2283                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2284
2285                                 /* drap: push %drap */
2286                                 cfa->base = CFI_BP_INDIRECT;
2287                                 cfa->offset = -cfi->stack_size;
2288
2289                                 /* save drap so we know when to restore it */
2290                                 cfi->drap_offset = -cfi->stack_size;
2291
2292                         } else if (op->src.reg == CFI_BP && cfa->base == cfi->drap_reg) {
2293
2294                                 /* drap: push %rbp */
2295                                 cfi->stack_size = 0;
2296
2297                         } else {
2298
2299                                 /* drap: push %reg */
2300                                 save_reg(cfi, op->src.reg, CFI_BP, -cfi->stack_size);
2301                         }
2302
2303                 } else {
2304
2305                         /* push %reg */
2306                         save_reg(cfi, op->src.reg, CFI_CFA, -cfi->stack_size);
2307                 }
2308
2309                 /* detect when asm code uses rbp as a scratch register */
2310                 if (!no_fp && insn->func && op->src.reg == CFI_BP &&
2311                     cfa->base != CFI_BP)
2312                         cfi->bp_scratch = true;
2313                 break;
2314
2315         case OP_DEST_REG_INDIRECT:
2316
2317                 if (cfi->drap) {
2318                         if (op->src.reg == cfa->base && op->src.reg == cfi->drap_reg) {
2319
2320                                 /* drap: mov %drap, disp(%rbp) */
2321                                 cfa->base = CFI_BP_INDIRECT;
2322                                 cfa->offset = op->dest.offset;
2323
2324                                 /* save drap offset so we know when to restore it */
2325                                 cfi->drap_offset = op->dest.offset;
2326                         } else {
2327
2328                                 /* drap: mov reg, disp(%rbp) */
2329                                 save_reg(cfi, op->src.reg, CFI_BP, op->dest.offset);
2330                         }
2331
2332                 } else if (op->dest.reg == cfa->base) {
2333
2334                         /* mov reg, disp(%rbp) */
2335                         /* mov reg, disp(%rsp) */
2336                         save_reg(cfi, op->src.reg, CFI_CFA,
2337                                  op->dest.offset - cfi->cfa.offset);
2338
2339                 } else if (op->dest.reg == CFI_SP) {
2340
2341                         /* mov reg, disp(%rsp) */
2342                         save_reg(cfi, op->src.reg, CFI_CFA,
2343                                  op->dest.offset - cfi->stack_size);
2344
2345                 } else if (op->src.reg == CFI_SP && op->dest.offset == 0) {
2346
2347                         /* mov %rsp, (%reg); # setup a stack swizzle. */
2348                         cfi->vals[op->dest.reg].base = CFI_SP_INDIRECT;
2349                         cfi->vals[op->dest.reg].offset = cfa->offset;
2350                 }
2351
2352                 break;
2353
2354         case OP_DEST_LEAVE:
2355                 if ((!cfi->drap && cfa->base != CFI_BP) ||
2356                     (cfi->drap && cfa->base != cfi->drap_reg)) {
2357                         WARN_FUNC("leave instruction with modified stack frame",
2358                                   insn->sec, insn->offset);
2359                         return -1;
2360                 }
2361
2362                 /* leave (mov %rbp, %rsp; pop %rbp) */
2363
2364                 cfi->stack_size = -cfi->regs[CFI_BP].offset - 8;
2365                 restore_reg(cfi, CFI_BP);
2366
2367                 if (!cfi->drap) {
2368                         cfa->base = CFI_SP;
2369                         cfa->offset -= 8;
2370                 }
2371
2372                 break;
2373
2374         case OP_DEST_MEM:
2375                 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
2376                         WARN_FUNC("unknown stack-related memory operation",
2377                                   insn->sec, insn->offset);
2378                         return -1;
2379                 }
2380
2381                 /* pop mem */
2382                 cfi->stack_size -= 8;
2383                 if (cfa->base == CFI_SP)
2384                         cfa->offset -= 8;
2385
2386                 break;
2387
2388         default:
2389                 WARN_FUNC("unknown stack-related instruction",
2390                           insn->sec, insn->offset);
2391                 return -1;
2392         }
2393
2394         return 0;
2395 }
2396
2397 /*
2398  * The stack layouts of alternatives instructions can sometimes diverge when
2399  * they have stack modifications.  That's fine as long as the potential stack
2400  * layouts don't conflict at any given potential instruction boundary.
2401  *
2402  * Flatten the CFIs of the different alternative code streams (both original
2403  * and replacement) into a single shared CFI array which can be used to detect
2404  * conflicts and nicely feed a linear array of ORC entries to the unwinder.
2405  */
2406 static int propagate_alt_cfi(struct objtool_file *file, struct instruction *insn)
2407 {
2408         struct cfi_state **alt_cfi;
2409         int group_off;
2410
2411         if (!insn->alt_group)
2412                 return 0;
2413
2414         alt_cfi = insn->alt_group->cfi;
2415         group_off = insn->offset - insn->alt_group->first_insn->offset;
2416
2417         if (!alt_cfi[group_off]) {
2418                 alt_cfi[group_off] = &insn->cfi;
2419         } else {
2420                 if (memcmp(alt_cfi[group_off], &insn->cfi, sizeof(struct cfi_state))) {
2421                         WARN_FUNC("stack layout conflict in alternatives",
2422                                   insn->sec, insn->offset);
2423                         return -1;
2424                 }
2425         }
2426
2427         return 0;
2428 }
2429
2430 static int handle_insn_ops(struct instruction *insn, struct insn_state *state)
2431 {
2432         struct stack_op *op;
2433
2434         list_for_each_entry(op, &insn->stack_ops, list) {
2435
2436                 if (update_cfi_state(insn, &state->cfi, op))
2437                         return 1;
2438
2439                 if (!insn->alt_group)
2440                         continue;
2441
2442                 if (op->dest.type == OP_DEST_PUSHF) {
2443                         if (!state->uaccess_stack) {
2444                                 state->uaccess_stack = 1;
2445                         } else if (state->uaccess_stack >> 31) {
2446                                 WARN_FUNC("PUSHF stack exhausted",
2447                                           insn->sec, insn->offset);
2448                                 return 1;
2449                         }
2450                         state->uaccess_stack <<= 1;
2451                         state->uaccess_stack  |= state->uaccess;
2452                 }
2453
2454                 if (op->src.type == OP_SRC_POPF) {
2455                         if (state->uaccess_stack) {
2456                                 state->uaccess = state->uaccess_stack & 1;
2457                                 state->uaccess_stack >>= 1;
2458                                 if (state->uaccess_stack == 1)
2459                                         state->uaccess_stack = 0;
2460                         }
2461                 }
2462         }
2463
2464         return 0;
2465 }
2466
2467 static bool insn_cfi_match(struct instruction *insn, struct cfi_state *cfi2)
2468 {
2469         struct cfi_state *cfi1 = &insn->cfi;
2470         int i;
2471
2472         if (memcmp(&cfi1->cfa, &cfi2->cfa, sizeof(cfi1->cfa))) {
2473
2474                 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
2475                           insn->sec, insn->offset,
2476                           cfi1->cfa.base, cfi1->cfa.offset,
2477                           cfi2->cfa.base, cfi2->cfa.offset);
2478
2479         } else if (memcmp(&cfi1->regs, &cfi2->regs, sizeof(cfi1->regs))) {
2480                 for (i = 0; i < CFI_NUM_REGS; i++) {
2481                         if (!memcmp(&cfi1->regs[i], &cfi2->regs[i],
2482                                     sizeof(struct cfi_reg)))
2483                                 continue;
2484
2485                         WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
2486                                   insn->sec, insn->offset,
2487                                   i, cfi1->regs[i].base, cfi1->regs[i].offset,
2488                                   i, cfi2->regs[i].base, cfi2->regs[i].offset);
2489                         break;
2490                 }
2491
2492         } else if (cfi1->type != cfi2->type) {
2493
2494                 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
2495                           insn->sec, insn->offset, cfi1->type, cfi2->type);
2496
2497         } else if (cfi1->drap != cfi2->drap ||
2498                    (cfi1->drap && cfi1->drap_reg != cfi2->drap_reg) ||
2499                    (cfi1->drap && cfi1->drap_offset != cfi2->drap_offset)) {
2500
2501                 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
2502                           insn->sec, insn->offset,
2503                           cfi1->drap, cfi1->drap_reg, cfi1->drap_offset,
2504                           cfi2->drap, cfi2->drap_reg, cfi2->drap_offset);
2505
2506         } else
2507                 return true;
2508
2509         return false;
2510 }
2511
2512 static inline bool func_uaccess_safe(struct symbol *func)
2513 {
2514         if (func)
2515                 return func->uaccess_safe;
2516
2517         return false;
2518 }
2519
2520 static inline const char *call_dest_name(struct instruction *insn)
2521 {
2522         if (insn->call_dest)
2523                 return insn->call_dest->name;
2524
2525         return "{dynamic}";
2526 }
2527
2528 static inline bool noinstr_call_dest(struct symbol *func)
2529 {
2530         /*
2531          * We can't deal with indirect function calls at present;
2532          * assume they're instrumented.
2533          */
2534         if (!func)
2535                 return false;
2536
2537         /*
2538          * If the symbol is from a noinstr section; we good.
2539          */
2540         if (func->sec->noinstr)
2541                 return true;
2542
2543         /*
2544          * The __ubsan_handle_*() calls are like WARN(), they only happen when
2545          * something 'BAD' happened. At the risk of taking the machine down,
2546          * let them proceed to get the message out.
2547          */
2548         if (!strncmp(func->name, "__ubsan_handle_", 15))
2549                 return true;
2550
2551         return false;
2552 }
2553
2554 static int validate_call(struct instruction *insn, struct insn_state *state)
2555 {
2556         if (state->noinstr && state->instr <= 0 &&
2557             !noinstr_call_dest(insn->call_dest)) {
2558                 WARN_FUNC("call to %s() leaves .noinstr.text section",
2559                                 insn->sec, insn->offset, call_dest_name(insn));
2560                 return 1;
2561         }
2562
2563         if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
2564                 WARN_FUNC("call to %s() with UACCESS enabled",
2565                                 insn->sec, insn->offset, call_dest_name(insn));
2566                 return 1;
2567         }
2568
2569         if (state->df) {
2570                 WARN_FUNC("call to %s() with DF set",
2571                                 insn->sec, insn->offset, call_dest_name(insn));
2572                 return 1;
2573         }
2574
2575         return 0;
2576 }
2577
2578 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
2579 {
2580         if (has_modified_stack_frame(insn, state)) {
2581                 WARN_FUNC("sibling call from callable instruction with modified stack frame",
2582                                 insn->sec, insn->offset);
2583                 return 1;
2584         }
2585
2586         return validate_call(insn, state);
2587 }
2588
2589 static int validate_return(struct symbol *func, struct instruction *insn, struct insn_state *state)
2590 {
2591         if (state->noinstr && state->instr > 0) {
2592                 WARN_FUNC("return with instrumentation enabled",
2593                           insn->sec, insn->offset);
2594                 return 1;
2595         }
2596
2597         if (state->uaccess && !func_uaccess_safe(func)) {
2598                 WARN_FUNC("return with UACCESS enabled",
2599                           insn->sec, insn->offset);
2600                 return 1;
2601         }
2602
2603         if (!state->uaccess && func_uaccess_safe(func)) {
2604                 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function",
2605                           insn->sec, insn->offset);
2606                 return 1;
2607         }
2608
2609         if (state->df) {
2610                 WARN_FUNC("return with DF set",
2611                           insn->sec, insn->offset);
2612                 return 1;
2613         }
2614
2615         if (func && has_modified_stack_frame(insn, state)) {
2616                 WARN_FUNC("return with modified stack frame",
2617                           insn->sec, insn->offset);
2618                 return 1;
2619         }
2620
2621         if (state->cfi.bp_scratch) {
2622                 WARN_FUNC("BP used as a scratch register",
2623                           insn->sec, insn->offset);
2624                 return 1;
2625         }
2626
2627         return 0;
2628 }
2629
2630 static struct instruction *next_insn_to_validate(struct objtool_file *file,
2631                                                  struct instruction *insn)
2632 {
2633         struct alt_group *alt_group = insn->alt_group;
2634
2635         /*
2636          * Simulate the fact that alternatives are patched in-place.  When the
2637          * end of a replacement alt_group is reached, redirect objtool flow to
2638          * the end of the original alt_group.
2639          */
2640         if (alt_group && insn == alt_group->last_insn && alt_group->orig_group)
2641                 return next_insn_same_sec(file, alt_group->orig_group->last_insn);
2642
2643         return next_insn_same_sec(file, insn);
2644 }
2645
2646 /*
2647  * Follow the branch starting at the given instruction, and recursively follow
2648  * any other branches (jumps).  Meanwhile, track the frame pointer state at
2649  * each instruction and validate all the rules described in
2650  * tools/objtool/Documentation/stack-validation.txt.
2651  */
2652 static int validate_branch(struct objtool_file *file, struct symbol *func,
2653                            struct instruction *insn, struct insn_state state)
2654 {
2655         struct alternative *alt;
2656         struct instruction *next_insn;
2657         struct section *sec;
2658         u8 visited;
2659         int ret;
2660
2661         sec = insn->sec;
2662
2663         while (1) {
2664                 next_insn = next_insn_to_validate(file, insn);
2665
2666                 if (file->c_file && func && insn->func && func != insn->func->pfunc) {
2667                         WARN("%s() falls through to next function %s()",
2668                              func->name, insn->func->name);
2669                         return 1;
2670                 }
2671
2672                 if (func && insn->ignore) {
2673                         WARN_FUNC("BUG: why am I validating an ignored function?",
2674                                   sec, insn->offset);
2675                         return 1;
2676                 }
2677
2678                 visited = 1 << state.uaccess;
2679                 if (insn->visited) {
2680                         if (!insn->hint && !insn_cfi_match(insn, &state.cfi))
2681                                 return 1;
2682
2683                         if (insn->visited & visited)
2684                                 return 0;
2685                 }
2686
2687                 if (state.noinstr)
2688                         state.instr += insn->instr;
2689
2690                 if (insn->hint)
2691                         state.cfi = insn->cfi;
2692                 else
2693                         insn->cfi = state.cfi;
2694
2695                 insn->visited |= visited;
2696
2697                 if (propagate_alt_cfi(file, insn))
2698                         return 1;
2699
2700                 if (!insn->ignore_alts && !list_empty(&insn->alts)) {
2701                         bool skip_orig = false;
2702
2703                         list_for_each_entry(alt, &insn->alts, list) {
2704                                 if (alt->skip_orig)
2705                                         skip_orig = true;
2706
2707                                 ret = validate_branch(file, func, alt->insn, state);
2708                                 if (ret) {
2709                                         if (backtrace)
2710                                                 BT_FUNC("(alt)", insn);
2711                                         return ret;
2712                                 }
2713                         }
2714
2715                         if (skip_orig)
2716                                 return 0;
2717                 }
2718
2719                 if (handle_insn_ops(insn, &state))
2720                         return 1;
2721
2722                 switch (insn->type) {
2723
2724                 case INSN_RETURN:
2725                         return validate_return(func, insn, &state);
2726
2727                 case INSN_CALL:
2728                 case INSN_CALL_DYNAMIC:
2729                         ret = validate_call(insn, &state);
2730                         if (ret)
2731                                 return ret;
2732
2733                         if (!no_fp && func && !is_fentry_call(insn) &&
2734                             !has_valid_stack_frame(&state)) {
2735                                 WARN_FUNC("call without frame pointer save/setup",
2736                                           sec, insn->offset);
2737                                 return 1;
2738                         }
2739
2740                         if (dead_end_function(file, insn->call_dest))
2741                                 return 0;
2742
2743                         break;
2744
2745                 case INSN_JUMP_CONDITIONAL:
2746                 case INSN_JUMP_UNCONDITIONAL:
2747                         if (is_sibling_call(insn)) {
2748                                 ret = validate_sibling_call(insn, &state);
2749                                 if (ret)
2750                                         return ret;
2751
2752                         } else if (insn->jump_dest) {
2753                                 ret = validate_branch(file, func,
2754                                                       insn->jump_dest, state);
2755                                 if (ret) {
2756                                         if (backtrace)
2757                                                 BT_FUNC("(branch)", insn);
2758                                         return ret;
2759                                 }
2760                         }
2761
2762                         if (insn->type == INSN_JUMP_UNCONDITIONAL)
2763                                 return 0;
2764
2765                         break;
2766
2767                 case INSN_JUMP_DYNAMIC:
2768                 case INSN_JUMP_DYNAMIC_CONDITIONAL:
2769                         if (is_sibling_call(insn)) {
2770                                 ret = validate_sibling_call(insn, &state);
2771                                 if (ret)
2772                                         return ret;
2773                         }
2774
2775                         if (insn->type == INSN_JUMP_DYNAMIC)
2776                                 return 0;
2777
2778                         break;
2779
2780                 case INSN_CONTEXT_SWITCH:
2781                         if (func && (!next_insn || !next_insn->hint)) {
2782                                 WARN_FUNC("unsupported instruction in callable function",
2783                                           sec, insn->offset);
2784                                 return 1;
2785                         }
2786                         return 0;
2787
2788                 case INSN_STAC:
2789                         if (state.uaccess) {
2790                                 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2791                                 return 1;
2792                         }
2793
2794                         state.uaccess = true;
2795                         break;
2796
2797                 case INSN_CLAC:
2798                         if (!state.uaccess && func) {
2799                                 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2800                                 return 1;
2801                         }
2802
2803                         if (func_uaccess_safe(func) && !state.uaccess_stack) {
2804                                 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2805                                 return 1;
2806                         }
2807
2808                         state.uaccess = false;
2809                         break;
2810
2811                 case INSN_STD:
2812                         if (state.df) {
2813                                 WARN_FUNC("recursive STD", sec, insn->offset);
2814                                 return 1;
2815                         }
2816
2817                         state.df = true;
2818                         break;
2819
2820                 case INSN_CLD:
2821                         if (!state.df && func) {
2822                                 WARN_FUNC("redundant CLD", sec, insn->offset);
2823                                 return 1;
2824                         }
2825
2826                         state.df = false;
2827                         break;
2828
2829                 default:
2830                         break;
2831                 }
2832
2833                 if (insn->dead_end)
2834                         return 0;
2835
2836                 if (!next_insn) {
2837                         if (state.cfi.cfa.base == CFI_UNDEFINED)
2838                                 return 0;
2839                         WARN("%s: unexpected end of section", sec->name);
2840                         return 1;
2841                 }
2842
2843                 insn = next_insn;
2844         }
2845
2846         return 0;
2847 }
2848
2849 static int validate_unwind_hints(struct objtool_file *file, struct section *sec)
2850 {
2851         struct instruction *insn;
2852         struct insn_state state;
2853         int ret, warnings = 0;
2854
2855         if (!file->hints)
2856                 return 0;
2857
2858         init_insn_state(&state, sec);
2859
2860         if (sec) {
2861                 insn = find_insn(file, sec, 0);
2862                 if (!insn)
2863                         return 0;
2864         } else {
2865                 insn = list_first_entry(&file->insn_list, typeof(*insn), list);
2866         }
2867
2868         while (&insn->list != &file->insn_list && (!sec || insn->sec == sec)) {
2869                 if (insn->hint && !insn->visited) {
2870                         ret = validate_branch(file, insn->func, insn, state);
2871                         if (ret && backtrace)
2872                                 BT_FUNC("<=== (hint)", insn);
2873                         warnings += ret;
2874                 }
2875
2876                 insn = list_next_entry(insn, list);
2877         }
2878
2879         return warnings;
2880 }
2881
2882 static int validate_retpoline(struct objtool_file *file)
2883 {
2884         struct instruction *insn;
2885         int warnings = 0;
2886
2887         for_each_insn(file, insn) {
2888                 if (insn->type != INSN_JUMP_DYNAMIC &&
2889                     insn->type != INSN_CALL_DYNAMIC)
2890                         continue;
2891
2892                 if (insn->retpoline_safe)
2893                         continue;
2894
2895                 /*
2896                  * .init.text code is ran before userspace and thus doesn't
2897                  * strictly need retpolines, except for modules which are
2898                  * loaded late, they very much do need retpoline in their
2899                  * .init.text
2900                  */
2901                 if (!strcmp(insn->sec->name, ".init.text") && !module)
2902                         continue;
2903
2904                 WARN_FUNC("indirect %s found in RETPOLINE build",
2905                           insn->sec, insn->offset,
2906                           insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2907
2908                 warnings++;
2909         }
2910
2911         return warnings;
2912 }
2913
2914 static bool is_kasan_insn(struct instruction *insn)
2915 {
2916         return (insn->type == INSN_CALL &&
2917                 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2918 }
2919
2920 static bool is_ubsan_insn(struct instruction *insn)
2921 {
2922         return (insn->type == INSN_CALL &&
2923                 !strcmp(insn->call_dest->name,
2924                         "__ubsan_handle_builtin_unreachable"));
2925 }
2926
2927 static bool ignore_unreachable_insn(struct objtool_file *file, struct instruction *insn)
2928 {
2929         int i;
2930         struct instruction *prev_insn;
2931
2932         if (insn->ignore || insn->type == INSN_NOP)
2933                 return true;
2934
2935         /*
2936          * Ignore any unused exceptions.  This can happen when a whitelisted
2937          * function has an exception table entry.
2938          *
2939          * Also ignore alternative replacement instructions.  This can happen
2940          * when a whitelisted function uses one of the ALTERNATIVE macros.
2941          */
2942         if (!strcmp(insn->sec->name, ".fixup") ||
2943             !strcmp(insn->sec->name, ".altinstr_replacement") ||
2944             !strcmp(insn->sec->name, ".altinstr_aux"))
2945                 return true;
2946
2947         if (!insn->func)
2948                 return false;
2949
2950         /*
2951          * CONFIG_UBSAN_TRAP inserts a UD2 when it sees
2952          * __builtin_unreachable().  The BUG() macro has an unreachable() after
2953          * the UD2, which causes GCC's undefined trap logic to emit another UD2
2954          * (or occasionally a JMP to UD2).
2955          *
2956          * It may also insert a UD2 after calling a __noreturn function.
2957          */
2958         prev_insn = list_prev_entry(insn, list);
2959         if ((prev_insn->dead_end || dead_end_function(file, prev_insn->call_dest)) &&
2960             (insn->type == INSN_BUG ||
2961              (insn->type == INSN_JUMP_UNCONDITIONAL &&
2962               insn->jump_dest && insn->jump_dest->type == INSN_BUG)))
2963                 return true;
2964
2965         /*
2966          * Check if this (or a subsequent) instruction is related to
2967          * CONFIG_UBSAN or CONFIG_KASAN.
2968          *
2969          * End the search at 5 instructions to avoid going into the weeds.
2970          */
2971         for (i = 0; i < 5; i++) {
2972
2973                 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2974                         return true;
2975
2976                 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2977                         if (insn->jump_dest &&
2978                             insn->jump_dest->func == insn->func) {
2979                                 insn = insn->jump_dest;
2980                                 continue;
2981                         }
2982
2983                         break;
2984                 }
2985
2986                 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2987                         break;
2988
2989                 insn = list_next_entry(insn, list);
2990         }
2991
2992         return false;
2993 }
2994
2995 static int validate_symbol(struct objtool_file *file, struct section *sec,
2996                            struct symbol *sym, struct insn_state *state)
2997 {
2998         struct instruction *insn;
2999         int ret;
3000
3001         if (!sym->len) {
3002                 WARN("%s() is missing an ELF size annotation", sym->name);
3003                 return 1;
3004         }
3005
3006         if (sym->pfunc != sym || sym->alias != sym)
3007                 return 0;
3008
3009         insn = find_insn(file, sec, sym->offset);
3010         if (!insn || insn->ignore || insn->visited)
3011                 return 0;
3012
3013         state->uaccess = sym->uaccess_safe;
3014
3015         ret = validate_branch(file, insn->func, insn, *state);
3016         if (ret && backtrace)
3017                 BT_FUNC("<=== (sym)", insn);
3018         return ret;
3019 }
3020
3021 static int validate_section(struct objtool_file *file, struct section *sec)
3022 {
3023         struct insn_state state;
3024         struct symbol *func;
3025         int warnings = 0;
3026
3027         list_for_each_entry(func, &sec->symbol_list, list) {
3028                 if (func->type != STT_FUNC)
3029                         continue;
3030
3031                 init_insn_state(&state, sec);
3032                 set_func_state(&state.cfi);
3033
3034                 warnings += validate_symbol(file, sec, func, &state);
3035         }
3036
3037         return warnings;
3038 }
3039
3040 static int validate_vmlinux_functions(struct objtool_file *file)
3041 {
3042         struct section *sec;
3043         int warnings = 0;
3044
3045         sec = find_section_by_name(file->elf, ".noinstr.text");
3046         if (sec) {
3047                 warnings += validate_section(file, sec);
3048                 warnings += validate_unwind_hints(file, sec);
3049         }
3050
3051         sec = find_section_by_name(file->elf, ".entry.text");
3052         if (sec) {
3053                 warnings += validate_section(file, sec);
3054                 warnings += validate_unwind_hints(file, sec);
3055         }
3056
3057         return warnings;
3058 }
3059
3060 static int validate_functions(struct objtool_file *file)
3061 {
3062         struct section *sec;
3063         int warnings = 0;
3064
3065         for_each_sec(file, sec) {
3066                 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
3067                         continue;
3068
3069                 warnings += validate_section(file, sec);
3070         }
3071
3072         return warnings;
3073 }
3074
3075 static int validate_reachable_instructions(struct objtool_file *file)
3076 {
3077         struct instruction *insn;
3078
3079         if (file->ignore_unreachables)
3080                 return 0;
3081
3082         for_each_insn(file, insn) {
3083                 if (insn->visited || ignore_unreachable_insn(file, insn))
3084                         continue;
3085
3086                 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
3087                 return 1;
3088         }
3089
3090         return 0;
3091 }
3092
3093 int check(struct objtool_file *file)
3094 {
3095         int ret, warnings = 0;
3096
3097         arch_initial_func_cfi_state(&initial_func_cfi);
3098
3099         ret = decode_sections(file);
3100         if (ret < 0)
3101                 goto out;
3102         warnings += ret;
3103
3104         if (list_empty(&file->insn_list))
3105                 goto out;
3106
3107         if (vmlinux && !validate_dup) {
3108                 ret = validate_vmlinux_functions(file);
3109                 if (ret < 0)
3110                         goto out;
3111
3112                 warnings += ret;
3113                 goto out;
3114         }
3115
3116         if (retpoline) {
3117                 ret = validate_retpoline(file);
3118                 if (ret < 0)
3119                         return ret;
3120                 warnings += ret;
3121         }
3122
3123         ret = validate_functions(file);
3124         if (ret < 0)
3125                 goto out;
3126         warnings += ret;
3127
3128         ret = validate_unwind_hints(file, NULL);
3129         if (ret < 0)
3130                 goto out;
3131         warnings += ret;
3132
3133         if (!warnings) {
3134                 ret = validate_reachable_instructions(file);
3135                 if (ret < 0)
3136                         goto out;
3137                 warnings += ret;
3138         }
3139
3140         ret = create_static_call_sections(file);
3141         if (ret < 0)
3142                 goto out;
3143         warnings += ret;
3144
3145         if (mcount) {
3146                 ret = create_mcount_loc_sections(file);
3147                 if (ret < 0)
3148                         goto out;
3149                 warnings += ret;
3150         }
3151
3152 out:
3153         /*
3154          *  For now, don't fail the kernel build on fatal warnings.  These
3155          *  errors are still fairly common due to the growing matrix of
3156          *  supported toolchains and their recent pace of change.
3157          */
3158         return 0;
3159 }