]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blob - tools/objtool/check.c
objtool, kcsan: Add explicit check functions to uaccess whitelist
[mirror_ubuntu-hirsute-kernel.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6 #include <string.h>
7 #include <stdlib.h>
8
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18
19 #define FAKE_JUMP_OFFSET -1
20
21 #define C_JUMP_TABLE_SECTION ".rodata..c_jump_table"
22
23 struct alternative {
24 struct list_head list;
25 struct instruction *insn;
26 bool skip_orig;
27 };
28
29 const char *objname;
30 struct cfi_state initial_func_cfi;
31
32 struct instruction *find_insn(struct objtool_file *file,
33 struct section *sec, unsigned long offset)
34 {
35 struct instruction *insn;
36
37 hash_for_each_possible(file->insn_hash, insn, hash, offset)
38 if (insn->sec == sec && insn->offset == offset)
39 return insn;
40
41 return NULL;
42 }
43
44 static struct instruction *next_insn_same_sec(struct objtool_file *file,
45 struct instruction *insn)
46 {
47 struct instruction *next = list_next_entry(insn, list);
48
49 if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
50 return NULL;
51
52 return next;
53 }
54
55 static struct instruction *next_insn_same_func(struct objtool_file *file,
56 struct instruction *insn)
57 {
58 struct instruction *next = list_next_entry(insn, list);
59 struct symbol *func = insn->func;
60
61 if (!func)
62 return NULL;
63
64 if (&next->list != &file->insn_list && next->func == func)
65 return next;
66
67 /* Check if we're already in the subfunction: */
68 if (func == func->cfunc)
69 return NULL;
70
71 /* Move to the subfunction: */
72 return find_insn(file, func->cfunc->sec, func->cfunc->offset);
73 }
74
75 #define func_for_each_insn_all(file, func, insn) \
76 for (insn = find_insn(file, func->sec, func->offset); \
77 insn; \
78 insn = next_insn_same_func(file, insn))
79
80 #define func_for_each_insn(file, func, insn) \
81 for (insn = find_insn(file, func->sec, func->offset); \
82 insn && &insn->list != &file->insn_list && \
83 insn->sec == func->sec && \
84 insn->offset < func->offset + func->len; \
85 insn = list_next_entry(insn, list))
86
87 #define func_for_each_insn_continue_reverse(file, func, insn) \
88 for (insn = list_prev_entry(insn, list); \
89 &insn->list != &file->insn_list && \
90 insn->sec == func->sec && insn->offset >= func->offset; \
91 insn = list_prev_entry(insn, list))
92
93 #define sec_for_each_insn_from(file, insn) \
94 for (; insn; insn = next_insn_same_sec(file, insn))
95
96 #define sec_for_each_insn_continue(file, insn) \
97 for (insn = next_insn_same_sec(file, insn); insn; \
98 insn = next_insn_same_sec(file, insn))
99
100 static bool is_sibling_call(struct instruction *insn)
101 {
102 /* An indirect jump is either a sibling call or a jump to a table. */
103 if (insn->type == INSN_JUMP_DYNAMIC)
104 return list_empty(&insn->alts);
105
106 if (insn->type != INSN_JUMP_CONDITIONAL &&
107 insn->type != INSN_JUMP_UNCONDITIONAL)
108 return false;
109
110 /* add_jump_destinations() sets insn->call_dest for sibling calls. */
111 return !!insn->call_dest;
112 }
113
114 /*
115 * This checks to see if the given function is a "noreturn" function.
116 *
117 * For global functions which are outside the scope of this object file, we
118 * have to keep a manual list of them.
119 *
120 * For local functions, we have to detect them manually by simply looking for
121 * the lack of a return instruction.
122 */
123 static bool __dead_end_function(struct objtool_file *file, struct symbol *func,
124 int recursion)
125 {
126 int i;
127 struct instruction *insn;
128 bool empty = true;
129
130 /*
131 * Unfortunately these have to be hard coded because the noreturn
132 * attribute isn't provided in ELF data.
133 */
134 static const char * const global_noreturns[] = {
135 "__stack_chk_fail",
136 "panic",
137 "do_exit",
138 "do_task_dead",
139 "__module_put_and_exit",
140 "complete_and_exit",
141 "__reiserfs_panic",
142 "lbug_with_loc",
143 "fortify_panic",
144 "usercopy_abort",
145 "machine_real_restart",
146 "rewind_stack_do_exit",
147 "kunit_try_catch_throw",
148 };
149
150 if (!func)
151 return false;
152
153 if (func->bind == STB_WEAK)
154 return false;
155
156 if (func->bind == STB_GLOBAL)
157 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
158 if (!strcmp(func->name, global_noreturns[i]))
159 return true;
160
161 if (!func->len)
162 return false;
163
164 insn = find_insn(file, func->sec, func->offset);
165 if (!insn->func)
166 return false;
167
168 func_for_each_insn_all(file, func, insn) {
169 empty = false;
170
171 if (insn->type == INSN_RETURN)
172 return false;
173 }
174
175 if (empty)
176 return false;
177
178 /*
179 * A function can have a sibling call instead of a return. In that
180 * case, the function's dead-end status depends on whether the target
181 * of the sibling call returns.
182 */
183 func_for_each_insn_all(file, func, insn) {
184 if (is_sibling_call(insn)) {
185 struct instruction *dest = insn->jump_dest;
186
187 if (!dest)
188 /* sibling call to another file */
189 return false;
190
191 /* local sibling call */
192 if (recursion == 5) {
193 /*
194 * Infinite recursion: two functions have
195 * sibling calls to each other. This is a very
196 * rare case. It means they aren't dead ends.
197 */
198 return false;
199 }
200
201 return __dead_end_function(file, dest->func, recursion+1);
202 }
203 }
204
205 return true;
206 }
207
208 static bool dead_end_function(struct objtool_file *file, struct symbol *func)
209 {
210 return __dead_end_function(file, func, 0);
211 }
212
213 static void clear_insn_state(struct insn_state *state)
214 {
215 int i;
216
217 memset(state, 0, sizeof(*state));
218 state->cfa.base = CFI_UNDEFINED;
219 for (i = 0; i < CFI_NUM_REGS; i++) {
220 state->regs[i].base = CFI_UNDEFINED;
221 state->vals[i].base = CFI_UNDEFINED;
222 }
223 state->drap_reg = CFI_UNDEFINED;
224 state->drap_offset = -1;
225 }
226
227 /*
228 * Call the arch-specific instruction decoder for all the instructions and add
229 * them to the global instruction list.
230 */
231 static int decode_instructions(struct objtool_file *file)
232 {
233 struct section *sec;
234 struct symbol *func;
235 unsigned long offset;
236 struct instruction *insn;
237 int ret;
238
239 for_each_sec(file, sec) {
240
241 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
242 continue;
243
244 if (strcmp(sec->name, ".altinstr_replacement") &&
245 strcmp(sec->name, ".altinstr_aux") &&
246 strncmp(sec->name, ".discard.", 9))
247 sec->text = true;
248
249 for (offset = 0; offset < sec->len; offset += insn->len) {
250 insn = malloc(sizeof(*insn));
251 if (!insn) {
252 WARN("malloc failed");
253 return -1;
254 }
255 memset(insn, 0, sizeof(*insn));
256 INIT_LIST_HEAD(&insn->alts);
257 clear_insn_state(&insn->state);
258
259 insn->sec = sec;
260 insn->offset = offset;
261
262 ret = arch_decode_instruction(file->elf, sec, offset,
263 sec->len - offset,
264 &insn->len, &insn->type,
265 &insn->immediate,
266 &insn->stack_op);
267 if (ret)
268 goto err;
269
270 hash_add(file->insn_hash, &insn->hash, insn->offset);
271 list_add_tail(&insn->list, &file->insn_list);
272 }
273
274 list_for_each_entry(func, &sec->symbol_list, list) {
275 if (func->type != STT_FUNC || func->alias != func)
276 continue;
277
278 if (!find_insn(file, sec, func->offset)) {
279 WARN("%s(): can't find starting instruction",
280 func->name);
281 return -1;
282 }
283
284 func_for_each_insn(file, func, insn)
285 insn->func = func;
286 }
287 }
288
289 return 0;
290
291 err:
292 free(insn);
293 return ret;
294 }
295
296 /*
297 * Mark "ud2" instructions and manually annotated dead ends.
298 */
299 static int add_dead_ends(struct objtool_file *file)
300 {
301 struct section *sec;
302 struct rela *rela;
303 struct instruction *insn;
304 bool found;
305
306 /*
307 * By default, "ud2" is a dead end unless otherwise annotated, because
308 * GCC 7 inserts it for certain divide-by-zero cases.
309 */
310 for_each_insn(file, insn)
311 if (insn->type == INSN_BUG)
312 insn->dead_end = true;
313
314 /*
315 * Check for manually annotated dead ends.
316 */
317 sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
318 if (!sec)
319 goto reachable;
320
321 list_for_each_entry(rela, &sec->rela_list, list) {
322 if (rela->sym->type != STT_SECTION) {
323 WARN("unexpected relocation symbol type in %s", sec->name);
324 return -1;
325 }
326 insn = find_insn(file, rela->sym->sec, rela->addend);
327 if (insn)
328 insn = list_prev_entry(insn, list);
329 else if (rela->addend == rela->sym->sec->len) {
330 found = false;
331 list_for_each_entry_reverse(insn, &file->insn_list, list) {
332 if (insn->sec == rela->sym->sec) {
333 found = true;
334 break;
335 }
336 }
337
338 if (!found) {
339 WARN("can't find unreachable insn at %s+0x%x",
340 rela->sym->sec->name, rela->addend);
341 return -1;
342 }
343 } else {
344 WARN("can't find unreachable insn at %s+0x%x",
345 rela->sym->sec->name, rela->addend);
346 return -1;
347 }
348
349 insn->dead_end = true;
350 }
351
352 reachable:
353 /*
354 * These manually annotated reachable checks are needed for GCC 4.4,
355 * where the Linux unreachable() macro isn't supported. In that case
356 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
357 * not a dead end.
358 */
359 sec = find_section_by_name(file->elf, ".rela.discard.reachable");
360 if (!sec)
361 return 0;
362
363 list_for_each_entry(rela, &sec->rela_list, list) {
364 if (rela->sym->type != STT_SECTION) {
365 WARN("unexpected relocation symbol type in %s", sec->name);
366 return -1;
367 }
368 insn = find_insn(file, rela->sym->sec, rela->addend);
369 if (insn)
370 insn = list_prev_entry(insn, list);
371 else if (rela->addend == rela->sym->sec->len) {
372 found = false;
373 list_for_each_entry_reverse(insn, &file->insn_list, list) {
374 if (insn->sec == rela->sym->sec) {
375 found = true;
376 break;
377 }
378 }
379
380 if (!found) {
381 WARN("can't find reachable insn at %s+0x%x",
382 rela->sym->sec->name, rela->addend);
383 return -1;
384 }
385 } else {
386 WARN("can't find reachable insn at %s+0x%x",
387 rela->sym->sec->name, rela->addend);
388 return -1;
389 }
390
391 insn->dead_end = false;
392 }
393
394 return 0;
395 }
396
397 /*
398 * Warnings shouldn't be reported for ignored functions.
399 */
400 static void add_ignores(struct objtool_file *file)
401 {
402 struct instruction *insn;
403 struct section *sec;
404 struct symbol *func;
405 struct rela *rela;
406
407 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
408 if (!sec)
409 return;
410
411 list_for_each_entry(rela, &sec->rela_list, list) {
412 switch (rela->sym->type) {
413 case STT_FUNC:
414 func = rela->sym;
415 break;
416
417 case STT_SECTION:
418 func = find_symbol_by_offset(rela->sym->sec, rela->addend);
419 if (!func || func->type != STT_FUNC)
420 continue;
421 break;
422
423 default:
424 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
425 continue;
426 }
427
428 func_for_each_insn_all(file, func, insn)
429 insn->ignore = true;
430 }
431 }
432
433 /*
434 * This is a whitelist of functions that is allowed to be called with AC set.
435 * The list is meant to be minimal and only contains compiler instrumentation
436 * ABI and a few functions used to implement *_{to,from}_user() functions.
437 *
438 * These functions must not directly change AC, but may PUSHF/POPF.
439 */
440 static const char *uaccess_safe_builtin[] = {
441 /* KASAN */
442 "kasan_report",
443 "check_memory_region",
444 /* KASAN out-of-line */
445 "__asan_loadN_noabort",
446 "__asan_load1_noabort",
447 "__asan_load2_noabort",
448 "__asan_load4_noabort",
449 "__asan_load8_noabort",
450 "__asan_load16_noabort",
451 "__asan_storeN_noabort",
452 "__asan_store1_noabort",
453 "__asan_store2_noabort",
454 "__asan_store4_noabort",
455 "__asan_store8_noabort",
456 "__asan_store16_noabort",
457 /* KASAN in-line */
458 "__asan_report_load_n_noabort",
459 "__asan_report_load1_noabort",
460 "__asan_report_load2_noabort",
461 "__asan_report_load4_noabort",
462 "__asan_report_load8_noabort",
463 "__asan_report_load16_noabort",
464 "__asan_report_store_n_noabort",
465 "__asan_report_store1_noabort",
466 "__asan_report_store2_noabort",
467 "__asan_report_store4_noabort",
468 "__asan_report_store8_noabort",
469 "__asan_report_store16_noabort",
470 /* KCSAN */
471 "__kcsan_check_access",
472 "kcsan_found_watchpoint",
473 "kcsan_setup_watchpoint",
474 "kcsan_check_scoped_accesses",
475 /* KCSAN/TSAN */
476 "__tsan_func_entry",
477 "__tsan_func_exit",
478 "__tsan_read_range",
479 "__tsan_write_range",
480 "__tsan_read1",
481 "__tsan_read2",
482 "__tsan_read4",
483 "__tsan_read8",
484 "__tsan_read16",
485 "__tsan_write1",
486 "__tsan_write2",
487 "__tsan_write4",
488 "__tsan_write8",
489 "__tsan_write16",
490 /* KCOV */
491 "write_comp_data",
492 "__sanitizer_cov_trace_pc",
493 "__sanitizer_cov_trace_const_cmp1",
494 "__sanitizer_cov_trace_const_cmp2",
495 "__sanitizer_cov_trace_const_cmp4",
496 "__sanitizer_cov_trace_const_cmp8",
497 "__sanitizer_cov_trace_cmp1",
498 "__sanitizer_cov_trace_cmp2",
499 "__sanitizer_cov_trace_cmp4",
500 "__sanitizer_cov_trace_cmp8",
501 /* UBSAN */
502 "ubsan_type_mismatch_common",
503 "__ubsan_handle_type_mismatch",
504 "__ubsan_handle_type_mismatch_v1",
505 "__ubsan_handle_shift_out_of_bounds",
506 /* misc */
507 "csum_partial_copy_generic",
508 "__memcpy_mcsafe",
509 "mcsafe_handle_tail",
510 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
511 NULL
512 };
513
514 static void add_uaccess_safe(struct objtool_file *file)
515 {
516 struct symbol *func;
517 const char **name;
518
519 if (!uaccess)
520 return;
521
522 for (name = uaccess_safe_builtin; *name; name++) {
523 func = find_symbol_by_name(file->elf, *name);
524 if (!func)
525 continue;
526
527 func->uaccess_safe = true;
528 }
529 }
530
531 /*
532 * FIXME: For now, just ignore any alternatives which add retpolines. This is
533 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
534 * But it at least allows objtool to understand the control flow *around* the
535 * retpoline.
536 */
537 static int add_ignore_alternatives(struct objtool_file *file)
538 {
539 struct section *sec;
540 struct rela *rela;
541 struct instruction *insn;
542
543 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
544 if (!sec)
545 return 0;
546
547 list_for_each_entry(rela, &sec->rela_list, list) {
548 if (rela->sym->type != STT_SECTION) {
549 WARN("unexpected relocation symbol type in %s", sec->name);
550 return -1;
551 }
552
553 insn = find_insn(file, rela->sym->sec, rela->addend);
554 if (!insn) {
555 WARN("bad .discard.ignore_alts entry");
556 return -1;
557 }
558
559 insn->ignore_alts = true;
560 }
561
562 return 0;
563 }
564
565 /*
566 * Find the destination instructions for all jumps.
567 */
568 static int add_jump_destinations(struct objtool_file *file)
569 {
570 struct instruction *insn;
571 struct rela *rela;
572 struct section *dest_sec;
573 unsigned long dest_off;
574
575 for_each_insn(file, insn) {
576 if (insn->type != INSN_JUMP_CONDITIONAL &&
577 insn->type != INSN_JUMP_UNCONDITIONAL)
578 continue;
579
580 if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
581 continue;
582
583 rela = find_rela_by_dest_range(insn->sec, insn->offset,
584 insn->len);
585 if (!rela) {
586 dest_sec = insn->sec;
587 dest_off = insn->offset + insn->len + insn->immediate;
588 } else if (rela->sym->type == STT_SECTION) {
589 dest_sec = rela->sym->sec;
590 dest_off = rela->addend + 4;
591 } else if (rela->sym->sec->idx) {
592 dest_sec = rela->sym->sec;
593 dest_off = rela->sym->sym.st_value + rela->addend + 4;
594 } else if (strstr(rela->sym->name, "_indirect_thunk_")) {
595 /*
596 * Retpoline jumps are really dynamic jumps in
597 * disguise, so convert them accordingly.
598 */
599 if (insn->type == INSN_JUMP_UNCONDITIONAL)
600 insn->type = INSN_JUMP_DYNAMIC;
601 else
602 insn->type = INSN_JUMP_DYNAMIC_CONDITIONAL;
603
604 insn->retpoline_safe = true;
605 continue;
606 } else {
607 /* external sibling call */
608 insn->call_dest = rela->sym;
609 continue;
610 }
611
612 insn->jump_dest = find_insn(file, dest_sec, dest_off);
613 if (!insn->jump_dest) {
614
615 /*
616 * This is a special case where an alt instruction
617 * jumps past the end of the section. These are
618 * handled later in handle_group_alt().
619 */
620 if (!strcmp(insn->sec->name, ".altinstr_replacement"))
621 continue;
622
623 WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
624 insn->sec, insn->offset, dest_sec->name,
625 dest_off);
626 return -1;
627 }
628
629 /*
630 * Cross-function jump.
631 */
632 if (insn->func && insn->jump_dest->func &&
633 insn->func != insn->jump_dest->func) {
634
635 /*
636 * For GCC 8+, create parent/child links for any cold
637 * subfunctions. This is _mostly_ redundant with a
638 * similar initialization in read_symbols().
639 *
640 * If a function has aliases, we want the *first* such
641 * function in the symbol table to be the subfunction's
642 * parent. In that case we overwrite the
643 * initialization done in read_symbols().
644 *
645 * However this code can't completely replace the
646 * read_symbols() code because this doesn't detect the
647 * case where the parent function's only reference to a
648 * subfunction is through a jump table.
649 */
650 if (!strstr(insn->func->name, ".cold.") &&
651 strstr(insn->jump_dest->func->name, ".cold.")) {
652 insn->func->cfunc = insn->jump_dest->func;
653 insn->jump_dest->func->pfunc = insn->func;
654
655 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
656 insn->jump_dest->offset == insn->jump_dest->func->offset) {
657
658 /* internal sibling call */
659 insn->call_dest = insn->jump_dest->func;
660 }
661 }
662 }
663
664 return 0;
665 }
666
667 /*
668 * Find the destination instructions for all calls.
669 */
670 static int add_call_destinations(struct objtool_file *file)
671 {
672 struct instruction *insn;
673 unsigned long dest_off;
674 struct rela *rela;
675
676 for_each_insn(file, insn) {
677 if (insn->type != INSN_CALL)
678 continue;
679
680 rela = find_rela_by_dest_range(insn->sec, insn->offset,
681 insn->len);
682 if (!rela) {
683 dest_off = insn->offset + insn->len + insn->immediate;
684 insn->call_dest = find_symbol_by_offset(insn->sec,
685 dest_off);
686
687 if (!insn->call_dest && !insn->ignore) {
688 WARN_FUNC("unsupported intra-function call",
689 insn->sec, insn->offset);
690 if (retpoline)
691 WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
692 return -1;
693 }
694
695 } else if (rela->sym->type == STT_SECTION) {
696 insn->call_dest = find_symbol_by_offset(rela->sym->sec,
697 rela->addend+4);
698 if (!insn->call_dest ||
699 insn->call_dest->type != STT_FUNC) {
700 WARN_FUNC("can't find call dest symbol at %s+0x%x",
701 insn->sec, insn->offset,
702 rela->sym->sec->name,
703 rela->addend + 4);
704 return -1;
705 }
706 } else
707 insn->call_dest = rela->sym;
708 }
709
710 return 0;
711 }
712
713 /*
714 * The .alternatives section requires some extra special care, over and above
715 * what other special sections require:
716 *
717 * 1. Because alternatives are patched in-place, we need to insert a fake jump
718 * instruction at the end so that validate_branch() skips all the original
719 * replaced instructions when validating the new instruction path.
720 *
721 * 2. An added wrinkle is that the new instruction length might be zero. In
722 * that case the old instructions are replaced with noops. We simulate that
723 * by creating a fake jump as the only new instruction.
724 *
725 * 3. In some cases, the alternative section includes an instruction which
726 * conditionally jumps to the _end_ of the entry. We have to modify these
727 * jumps' destinations to point back to .text rather than the end of the
728 * entry in .altinstr_replacement.
729 */
730 static int handle_group_alt(struct objtool_file *file,
731 struct special_alt *special_alt,
732 struct instruction *orig_insn,
733 struct instruction **new_insn)
734 {
735 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
736 unsigned long dest_off;
737
738 last_orig_insn = NULL;
739 insn = orig_insn;
740 sec_for_each_insn_from(file, insn) {
741 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
742 break;
743
744 insn->alt_group = true;
745 last_orig_insn = insn;
746 }
747
748 if (next_insn_same_sec(file, last_orig_insn)) {
749 fake_jump = malloc(sizeof(*fake_jump));
750 if (!fake_jump) {
751 WARN("malloc failed");
752 return -1;
753 }
754 memset(fake_jump, 0, sizeof(*fake_jump));
755 INIT_LIST_HEAD(&fake_jump->alts);
756 clear_insn_state(&fake_jump->state);
757
758 fake_jump->sec = special_alt->new_sec;
759 fake_jump->offset = FAKE_JUMP_OFFSET;
760 fake_jump->type = INSN_JUMP_UNCONDITIONAL;
761 fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
762 fake_jump->func = orig_insn->func;
763 }
764
765 if (!special_alt->new_len) {
766 if (!fake_jump) {
767 WARN("%s: empty alternative at end of section",
768 special_alt->orig_sec->name);
769 return -1;
770 }
771
772 *new_insn = fake_jump;
773 return 0;
774 }
775
776 last_new_insn = NULL;
777 insn = *new_insn;
778 sec_for_each_insn_from(file, insn) {
779 if (insn->offset >= special_alt->new_off + special_alt->new_len)
780 break;
781
782 last_new_insn = insn;
783
784 insn->ignore = orig_insn->ignore_alts;
785 insn->func = orig_insn->func;
786
787 if (insn->type != INSN_JUMP_CONDITIONAL &&
788 insn->type != INSN_JUMP_UNCONDITIONAL)
789 continue;
790
791 if (!insn->immediate)
792 continue;
793
794 dest_off = insn->offset + insn->len + insn->immediate;
795 if (dest_off == special_alt->new_off + special_alt->new_len) {
796 if (!fake_jump) {
797 WARN("%s: alternative jump to end of section",
798 special_alt->orig_sec->name);
799 return -1;
800 }
801 insn->jump_dest = fake_jump;
802 }
803
804 if (!insn->jump_dest) {
805 WARN_FUNC("can't find alternative jump destination",
806 insn->sec, insn->offset);
807 return -1;
808 }
809 }
810
811 if (!last_new_insn) {
812 WARN_FUNC("can't find last new alternative instruction",
813 special_alt->new_sec, special_alt->new_off);
814 return -1;
815 }
816
817 if (fake_jump)
818 list_add(&fake_jump->list, &last_new_insn->list);
819
820 return 0;
821 }
822
823 /*
824 * A jump table entry can either convert a nop to a jump or a jump to a nop.
825 * If the original instruction is a jump, make the alt entry an effective nop
826 * by just skipping the original instruction.
827 */
828 static int handle_jump_alt(struct objtool_file *file,
829 struct special_alt *special_alt,
830 struct instruction *orig_insn,
831 struct instruction **new_insn)
832 {
833 if (orig_insn->type == INSN_NOP)
834 return 0;
835
836 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
837 WARN_FUNC("unsupported instruction at jump label",
838 orig_insn->sec, orig_insn->offset);
839 return -1;
840 }
841
842 *new_insn = list_next_entry(orig_insn, list);
843 return 0;
844 }
845
846 /*
847 * Read all the special sections which have alternate instructions which can be
848 * patched in or redirected to at runtime. Each instruction having alternate
849 * instruction(s) has them added to its insn->alts list, which will be
850 * traversed in validate_branch().
851 */
852 static int add_special_section_alts(struct objtool_file *file)
853 {
854 struct list_head special_alts;
855 struct instruction *orig_insn, *new_insn;
856 struct special_alt *special_alt, *tmp;
857 struct alternative *alt;
858 int ret;
859
860 ret = special_get_alts(file->elf, &special_alts);
861 if (ret)
862 return ret;
863
864 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
865
866 orig_insn = find_insn(file, special_alt->orig_sec,
867 special_alt->orig_off);
868 if (!orig_insn) {
869 WARN_FUNC("special: can't find orig instruction",
870 special_alt->orig_sec, special_alt->orig_off);
871 ret = -1;
872 goto out;
873 }
874
875 new_insn = NULL;
876 if (!special_alt->group || special_alt->new_len) {
877 new_insn = find_insn(file, special_alt->new_sec,
878 special_alt->new_off);
879 if (!new_insn) {
880 WARN_FUNC("special: can't find new instruction",
881 special_alt->new_sec,
882 special_alt->new_off);
883 ret = -1;
884 goto out;
885 }
886 }
887
888 if (special_alt->group) {
889 ret = handle_group_alt(file, special_alt, orig_insn,
890 &new_insn);
891 if (ret)
892 goto out;
893 } else if (special_alt->jump_or_nop) {
894 ret = handle_jump_alt(file, special_alt, orig_insn,
895 &new_insn);
896 if (ret)
897 goto out;
898 }
899
900 alt = malloc(sizeof(*alt));
901 if (!alt) {
902 WARN("malloc failed");
903 ret = -1;
904 goto out;
905 }
906
907 alt->insn = new_insn;
908 alt->skip_orig = special_alt->skip_orig;
909 orig_insn->ignore_alts |= special_alt->skip_alt;
910 list_add_tail(&alt->list, &orig_insn->alts);
911
912 list_del(&special_alt->list);
913 free(special_alt);
914 }
915
916 out:
917 return ret;
918 }
919
920 static int add_jump_table(struct objtool_file *file, struct instruction *insn,
921 struct rela *table)
922 {
923 struct rela *rela = table;
924 struct instruction *dest_insn;
925 struct alternative *alt;
926 struct symbol *pfunc = insn->func->pfunc;
927 unsigned int prev_offset = 0;
928
929 /*
930 * Each @rela is a switch table relocation which points to the target
931 * instruction.
932 */
933 list_for_each_entry_from(rela, &table->sec->rela_list, list) {
934
935 /* Check for the end of the table: */
936 if (rela != table && rela->jump_table_start)
937 break;
938
939 /* Make sure the table entries are consecutive: */
940 if (prev_offset && rela->offset != prev_offset + 8)
941 break;
942
943 /* Detect function pointers from contiguous objects: */
944 if (rela->sym->sec == pfunc->sec &&
945 rela->addend == pfunc->offset)
946 break;
947
948 dest_insn = find_insn(file, rela->sym->sec, rela->addend);
949 if (!dest_insn)
950 break;
951
952 /* Make sure the destination is in the same function: */
953 if (!dest_insn->func || dest_insn->func->pfunc != pfunc)
954 break;
955
956 alt = malloc(sizeof(*alt));
957 if (!alt) {
958 WARN("malloc failed");
959 return -1;
960 }
961
962 alt->insn = dest_insn;
963 list_add_tail(&alt->list, &insn->alts);
964 prev_offset = rela->offset;
965 }
966
967 if (!prev_offset) {
968 WARN_FUNC("can't find switch jump table",
969 insn->sec, insn->offset);
970 return -1;
971 }
972
973 return 0;
974 }
975
976 /*
977 * find_jump_table() - Given a dynamic jump, find the switch jump table in
978 * .rodata associated with it.
979 *
980 * There are 3 basic patterns:
981 *
982 * 1. jmpq *[rodata addr](,%reg,8)
983 *
984 * This is the most common case by far. It jumps to an address in a simple
985 * jump table which is stored in .rodata.
986 *
987 * 2. jmpq *[rodata addr](%rip)
988 *
989 * This is caused by a rare GCC quirk, currently only seen in three driver
990 * functions in the kernel, only with certain obscure non-distro configs.
991 *
992 * As part of an optimization, GCC makes a copy of an existing switch jump
993 * table, modifies it, and then hard-codes the jump (albeit with an indirect
994 * jump) to use a single entry in the table. The rest of the jump table and
995 * some of its jump targets remain as dead code.
996 *
997 * In such a case we can just crudely ignore all unreachable instruction
998 * warnings for the entire object file. Ideally we would just ignore them
999 * for the function, but that would require redesigning the code quite a
1000 * bit. And honestly that's just not worth doing: unreachable instruction
1001 * warnings are of questionable value anyway, and this is such a rare issue.
1002 *
1003 * 3. mov [rodata addr],%reg1
1004 * ... some instructions ...
1005 * jmpq *(%reg1,%reg2,8)
1006 *
1007 * This is a fairly uncommon pattern which is new for GCC 6. As of this
1008 * writing, there are 11 occurrences of it in the allmodconfig kernel.
1009 *
1010 * As of GCC 7 there are quite a few more of these and the 'in between' code
1011 * is significant. Esp. with KASAN enabled some of the code between the mov
1012 * and jmpq uses .rodata itself, which can confuse things.
1013 *
1014 * TODO: Once we have DWARF CFI and smarter instruction decoding logic,
1015 * ensure the same register is used in the mov and jump instructions.
1016 *
1017 * NOTE: RETPOLINE made it harder still to decode dynamic jumps.
1018 */
1019 static struct rela *find_jump_table(struct objtool_file *file,
1020 struct symbol *func,
1021 struct instruction *insn)
1022 {
1023 struct rela *text_rela, *table_rela;
1024 struct instruction *orig_insn = insn;
1025 struct section *table_sec;
1026 unsigned long table_offset;
1027
1028 /*
1029 * Backward search using the @first_jump_src links, these help avoid
1030 * much of the 'in between' code. Which avoids us getting confused by
1031 * it.
1032 */
1033 for (;
1034 &insn->list != &file->insn_list &&
1035 insn->sec == func->sec &&
1036 insn->offset >= func->offset;
1037
1038 insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1039
1040 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1041 break;
1042
1043 /* allow small jumps within the range */
1044 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1045 insn->jump_dest &&
1046 (insn->jump_dest->offset <= insn->offset ||
1047 insn->jump_dest->offset > orig_insn->offset))
1048 break;
1049
1050 /* look for a relocation which references .rodata */
1051 text_rela = find_rela_by_dest_range(insn->sec, insn->offset,
1052 insn->len);
1053 if (!text_rela || text_rela->sym->type != STT_SECTION ||
1054 !text_rela->sym->sec->rodata)
1055 continue;
1056
1057 table_offset = text_rela->addend;
1058 table_sec = text_rela->sym->sec;
1059
1060 if (text_rela->type == R_X86_64_PC32)
1061 table_offset += 4;
1062
1063 /*
1064 * Make sure the .rodata address isn't associated with a
1065 * symbol. GCC jump tables are anonymous data.
1066 *
1067 * Also support C jump tables which are in the same format as
1068 * switch jump tables. For objtool to recognize them, they
1069 * need to be placed in the C_JUMP_TABLE_SECTION section. They
1070 * have symbols associated with them.
1071 */
1072 if (find_symbol_containing(table_sec, table_offset) &&
1073 strcmp(table_sec->name, C_JUMP_TABLE_SECTION))
1074 continue;
1075
1076 /* Each table entry has a rela associated with it. */
1077 table_rela = find_rela_by_dest(table_sec, table_offset);
1078 if (!table_rela)
1079 continue;
1080
1081 /*
1082 * Use of RIP-relative switch jumps is quite rare, and
1083 * indicates a rare GCC quirk/bug which can leave dead code
1084 * behind.
1085 */
1086 if (text_rela->type == R_X86_64_PC32)
1087 file->ignore_unreachables = true;
1088
1089 return table_rela;
1090 }
1091
1092 return NULL;
1093 }
1094
1095 /*
1096 * First pass: Mark the head of each jump table so that in the next pass,
1097 * we know when a given jump table ends and the next one starts.
1098 */
1099 static void mark_func_jump_tables(struct objtool_file *file,
1100 struct symbol *func)
1101 {
1102 struct instruction *insn, *last = NULL;
1103 struct rela *rela;
1104
1105 func_for_each_insn_all(file, func, insn) {
1106 if (!last)
1107 last = insn;
1108
1109 /*
1110 * Store back-pointers for unconditional forward jumps such
1111 * that find_jump_table() can back-track using those and
1112 * avoid some potentially confusing code.
1113 */
1114 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1115 insn->offset > last->offset &&
1116 insn->jump_dest->offset > insn->offset &&
1117 !insn->jump_dest->first_jump_src) {
1118
1119 insn->jump_dest->first_jump_src = insn;
1120 last = insn->jump_dest;
1121 }
1122
1123 if (insn->type != INSN_JUMP_DYNAMIC)
1124 continue;
1125
1126 rela = find_jump_table(file, func, insn);
1127 if (rela) {
1128 rela->jump_table_start = true;
1129 insn->jump_table = rela;
1130 }
1131 }
1132 }
1133
1134 static int add_func_jump_tables(struct objtool_file *file,
1135 struct symbol *func)
1136 {
1137 struct instruction *insn;
1138 int ret;
1139
1140 func_for_each_insn_all(file, func, insn) {
1141 if (!insn->jump_table)
1142 continue;
1143
1144 ret = add_jump_table(file, insn, insn->jump_table);
1145 if (ret)
1146 return ret;
1147 }
1148
1149 return 0;
1150 }
1151
1152 /*
1153 * For some switch statements, gcc generates a jump table in the .rodata
1154 * section which contains a list of addresses within the function to jump to.
1155 * This finds these jump tables and adds them to the insn->alts lists.
1156 */
1157 static int add_jump_table_alts(struct objtool_file *file)
1158 {
1159 struct section *sec;
1160 struct symbol *func;
1161 int ret;
1162
1163 if (!file->rodata)
1164 return 0;
1165
1166 for_each_sec(file, sec) {
1167 list_for_each_entry(func, &sec->symbol_list, list) {
1168 if (func->type != STT_FUNC)
1169 continue;
1170
1171 mark_func_jump_tables(file, func);
1172 ret = add_func_jump_tables(file, func);
1173 if (ret)
1174 return ret;
1175 }
1176 }
1177
1178 return 0;
1179 }
1180
1181 static int read_unwind_hints(struct objtool_file *file)
1182 {
1183 struct section *sec, *relasec;
1184 struct rela *rela;
1185 struct unwind_hint *hint;
1186 struct instruction *insn;
1187 struct cfi_reg *cfa;
1188 int i;
1189
1190 sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1191 if (!sec)
1192 return 0;
1193
1194 relasec = sec->rela;
1195 if (!relasec) {
1196 WARN("missing .rela.discard.unwind_hints section");
1197 return -1;
1198 }
1199
1200 if (sec->len % sizeof(struct unwind_hint)) {
1201 WARN("struct unwind_hint size mismatch");
1202 return -1;
1203 }
1204
1205 file->hints = true;
1206
1207 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1208 hint = (struct unwind_hint *)sec->data->d_buf + i;
1209
1210 rela = find_rela_by_dest(sec, i * sizeof(*hint));
1211 if (!rela) {
1212 WARN("can't find rela for unwind_hints[%d]", i);
1213 return -1;
1214 }
1215
1216 insn = find_insn(file, rela->sym->sec, rela->addend);
1217 if (!insn) {
1218 WARN("can't find insn for unwind_hints[%d]", i);
1219 return -1;
1220 }
1221
1222 cfa = &insn->state.cfa;
1223
1224 if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1225 insn->save = true;
1226 continue;
1227
1228 } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1229 insn->restore = true;
1230 insn->hint = true;
1231 continue;
1232 }
1233
1234 insn->hint = true;
1235
1236 switch (hint->sp_reg) {
1237 case ORC_REG_UNDEFINED:
1238 cfa->base = CFI_UNDEFINED;
1239 break;
1240 case ORC_REG_SP:
1241 cfa->base = CFI_SP;
1242 break;
1243 case ORC_REG_BP:
1244 cfa->base = CFI_BP;
1245 break;
1246 case ORC_REG_SP_INDIRECT:
1247 cfa->base = CFI_SP_INDIRECT;
1248 break;
1249 case ORC_REG_R10:
1250 cfa->base = CFI_R10;
1251 break;
1252 case ORC_REG_R13:
1253 cfa->base = CFI_R13;
1254 break;
1255 case ORC_REG_DI:
1256 cfa->base = CFI_DI;
1257 break;
1258 case ORC_REG_DX:
1259 cfa->base = CFI_DX;
1260 break;
1261 default:
1262 WARN_FUNC("unsupported unwind_hint sp base reg %d",
1263 insn->sec, insn->offset, hint->sp_reg);
1264 return -1;
1265 }
1266
1267 cfa->offset = hint->sp_offset;
1268 insn->state.type = hint->type;
1269 insn->state.end = hint->end;
1270 }
1271
1272 return 0;
1273 }
1274
1275 static int read_retpoline_hints(struct objtool_file *file)
1276 {
1277 struct section *sec;
1278 struct instruction *insn;
1279 struct rela *rela;
1280
1281 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1282 if (!sec)
1283 return 0;
1284
1285 list_for_each_entry(rela, &sec->rela_list, list) {
1286 if (rela->sym->type != STT_SECTION) {
1287 WARN("unexpected relocation symbol type in %s", sec->name);
1288 return -1;
1289 }
1290
1291 insn = find_insn(file, rela->sym->sec, rela->addend);
1292 if (!insn) {
1293 WARN("bad .discard.retpoline_safe entry");
1294 return -1;
1295 }
1296
1297 if (insn->type != INSN_JUMP_DYNAMIC &&
1298 insn->type != INSN_CALL_DYNAMIC) {
1299 WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1300 insn->sec, insn->offset);
1301 return -1;
1302 }
1303
1304 insn->retpoline_safe = true;
1305 }
1306
1307 return 0;
1308 }
1309
1310 static void mark_rodata(struct objtool_file *file)
1311 {
1312 struct section *sec;
1313 bool found = false;
1314
1315 /*
1316 * Search for the following rodata sections, each of which can
1317 * potentially contain jump tables:
1318 *
1319 * - .rodata: can contain GCC switch tables
1320 * - .rodata.<func>: same, if -fdata-sections is being used
1321 * - .rodata..c_jump_table: contains C annotated jump tables
1322 *
1323 * .rodata.str1.* sections are ignored; they don't contain jump tables.
1324 */
1325 for_each_sec(file, sec) {
1326 if ((!strncmp(sec->name, ".rodata", 7) && !strstr(sec->name, ".str1.")) ||
1327 !strcmp(sec->name, C_JUMP_TABLE_SECTION)) {
1328 sec->rodata = true;
1329 found = true;
1330 }
1331 }
1332
1333 file->rodata = found;
1334 }
1335
1336 static int decode_sections(struct objtool_file *file)
1337 {
1338 int ret;
1339
1340 mark_rodata(file);
1341
1342 ret = decode_instructions(file);
1343 if (ret)
1344 return ret;
1345
1346 ret = add_dead_ends(file);
1347 if (ret)
1348 return ret;
1349
1350 add_ignores(file);
1351 add_uaccess_safe(file);
1352
1353 ret = add_ignore_alternatives(file);
1354 if (ret)
1355 return ret;
1356
1357 ret = add_jump_destinations(file);
1358 if (ret)
1359 return ret;
1360
1361 ret = add_special_section_alts(file);
1362 if (ret)
1363 return ret;
1364
1365 ret = add_call_destinations(file);
1366 if (ret)
1367 return ret;
1368
1369 ret = add_jump_table_alts(file);
1370 if (ret)
1371 return ret;
1372
1373 ret = read_unwind_hints(file);
1374 if (ret)
1375 return ret;
1376
1377 ret = read_retpoline_hints(file);
1378 if (ret)
1379 return ret;
1380
1381 return 0;
1382 }
1383
1384 static bool is_fentry_call(struct instruction *insn)
1385 {
1386 if (insn->type == INSN_CALL &&
1387 insn->call_dest->type == STT_NOTYPE &&
1388 !strcmp(insn->call_dest->name, "__fentry__"))
1389 return true;
1390
1391 return false;
1392 }
1393
1394 static bool has_modified_stack_frame(struct insn_state *state)
1395 {
1396 int i;
1397
1398 if (state->cfa.base != initial_func_cfi.cfa.base ||
1399 state->cfa.offset != initial_func_cfi.cfa.offset ||
1400 state->stack_size != initial_func_cfi.cfa.offset ||
1401 state->drap)
1402 return true;
1403
1404 for (i = 0; i < CFI_NUM_REGS; i++)
1405 if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1406 state->regs[i].offset != initial_func_cfi.regs[i].offset)
1407 return true;
1408
1409 return false;
1410 }
1411
1412 static bool has_valid_stack_frame(struct insn_state *state)
1413 {
1414 if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1415 state->regs[CFI_BP].offset == -16)
1416 return true;
1417
1418 if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1419 return true;
1420
1421 return false;
1422 }
1423
1424 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1425 {
1426 struct cfi_reg *cfa = &state->cfa;
1427 struct stack_op *op = &insn->stack_op;
1428
1429 if (cfa->base != CFI_SP)
1430 return 0;
1431
1432 /* push */
1433 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1434 cfa->offset += 8;
1435
1436 /* pop */
1437 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1438 cfa->offset -= 8;
1439
1440 /* add immediate to sp */
1441 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1442 op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1443 cfa->offset -= op->src.offset;
1444
1445 return 0;
1446 }
1447
1448 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1449 int offset)
1450 {
1451 if (arch_callee_saved_reg(reg) &&
1452 state->regs[reg].base == CFI_UNDEFINED) {
1453 state->regs[reg].base = base;
1454 state->regs[reg].offset = offset;
1455 }
1456 }
1457
1458 static void restore_reg(struct insn_state *state, unsigned char reg)
1459 {
1460 state->regs[reg].base = CFI_UNDEFINED;
1461 state->regs[reg].offset = 0;
1462 }
1463
1464 /*
1465 * A note about DRAP stack alignment:
1466 *
1467 * GCC has the concept of a DRAP register, which is used to help keep track of
1468 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP
1469 * register. The typical DRAP pattern is:
1470 *
1471 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10
1472 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp
1473 * 41 ff 72 f8 pushq -0x8(%r10)
1474 * 55 push %rbp
1475 * 48 89 e5 mov %rsp,%rbp
1476 * (more pushes)
1477 * 41 52 push %r10
1478 * ...
1479 * 41 5a pop %r10
1480 * (more pops)
1481 * 5d pop %rbp
1482 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1483 * c3 retq
1484 *
1485 * There are some variations in the epilogues, like:
1486 *
1487 * 5b pop %rbx
1488 * 41 5a pop %r10
1489 * 41 5c pop %r12
1490 * 41 5d pop %r13
1491 * 41 5e pop %r14
1492 * c9 leaveq
1493 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1494 * c3 retq
1495 *
1496 * and:
1497 *
1498 * 4c 8b 55 e8 mov -0x18(%rbp),%r10
1499 * 48 8b 5d e0 mov -0x20(%rbp),%rbx
1500 * 4c 8b 65 f0 mov -0x10(%rbp),%r12
1501 * 4c 8b 6d f8 mov -0x8(%rbp),%r13
1502 * c9 leaveq
1503 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1504 * c3 retq
1505 *
1506 * Sometimes r13 is used as the DRAP register, in which case it's saved and
1507 * restored beforehand:
1508 *
1509 * 41 55 push %r13
1510 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13
1511 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp
1512 * ...
1513 * 49 8d 65 f0 lea -0x10(%r13),%rsp
1514 * 41 5d pop %r13
1515 * c3 retq
1516 */
1517 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1518 {
1519 struct stack_op *op = &insn->stack_op;
1520 struct cfi_reg *cfa = &state->cfa;
1521 struct cfi_reg *regs = state->regs;
1522
1523 /* stack operations don't make sense with an undefined CFA */
1524 if (cfa->base == CFI_UNDEFINED) {
1525 if (insn->func) {
1526 WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1527 return -1;
1528 }
1529 return 0;
1530 }
1531
1532 if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1533 return update_insn_state_regs(insn, state);
1534
1535 switch (op->dest.type) {
1536
1537 case OP_DEST_REG:
1538 switch (op->src.type) {
1539
1540 case OP_SRC_REG:
1541 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1542 cfa->base == CFI_SP &&
1543 regs[CFI_BP].base == CFI_CFA &&
1544 regs[CFI_BP].offset == -cfa->offset) {
1545
1546 /* mov %rsp, %rbp */
1547 cfa->base = op->dest.reg;
1548 state->bp_scratch = false;
1549 }
1550
1551 else if (op->src.reg == CFI_SP &&
1552 op->dest.reg == CFI_BP && state->drap) {
1553
1554 /* drap: mov %rsp, %rbp */
1555 regs[CFI_BP].base = CFI_BP;
1556 regs[CFI_BP].offset = -state->stack_size;
1557 state->bp_scratch = false;
1558 }
1559
1560 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1561
1562 /*
1563 * mov %rsp, %reg
1564 *
1565 * This is needed for the rare case where GCC
1566 * does:
1567 *
1568 * mov %rsp, %rax
1569 * ...
1570 * mov %rax, %rsp
1571 */
1572 state->vals[op->dest.reg].base = CFI_CFA;
1573 state->vals[op->dest.reg].offset = -state->stack_size;
1574 }
1575
1576 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1577 cfa->base == CFI_BP) {
1578
1579 /*
1580 * mov %rbp, %rsp
1581 *
1582 * Restore the original stack pointer (Clang).
1583 */
1584 state->stack_size = -state->regs[CFI_BP].offset;
1585 }
1586
1587 else if (op->dest.reg == cfa->base) {
1588
1589 /* mov %reg, %rsp */
1590 if (cfa->base == CFI_SP &&
1591 state->vals[op->src.reg].base == CFI_CFA) {
1592
1593 /*
1594 * This is needed for the rare case
1595 * where GCC does something dumb like:
1596 *
1597 * lea 0x8(%rsp), %rcx
1598 * ...
1599 * mov %rcx, %rsp
1600 */
1601 cfa->offset = -state->vals[op->src.reg].offset;
1602 state->stack_size = cfa->offset;
1603
1604 } else {
1605 cfa->base = CFI_UNDEFINED;
1606 cfa->offset = 0;
1607 }
1608 }
1609
1610 break;
1611
1612 case OP_SRC_ADD:
1613 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1614
1615 /* add imm, %rsp */
1616 state->stack_size -= op->src.offset;
1617 if (cfa->base == CFI_SP)
1618 cfa->offset -= op->src.offset;
1619 break;
1620 }
1621
1622 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1623
1624 /* lea disp(%rbp), %rsp */
1625 state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1626 break;
1627 }
1628
1629 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1630
1631 /* drap: lea disp(%rsp), %drap */
1632 state->drap_reg = op->dest.reg;
1633
1634 /*
1635 * lea disp(%rsp), %reg
1636 *
1637 * This is needed for the rare case where GCC
1638 * does something dumb like:
1639 *
1640 * lea 0x8(%rsp), %rcx
1641 * ...
1642 * mov %rcx, %rsp
1643 */
1644 state->vals[op->dest.reg].base = CFI_CFA;
1645 state->vals[op->dest.reg].offset = \
1646 -state->stack_size + op->src.offset;
1647
1648 break;
1649 }
1650
1651 if (state->drap && op->dest.reg == CFI_SP &&
1652 op->src.reg == state->drap_reg) {
1653
1654 /* drap: lea disp(%drap), %rsp */
1655 cfa->base = CFI_SP;
1656 cfa->offset = state->stack_size = -op->src.offset;
1657 state->drap_reg = CFI_UNDEFINED;
1658 state->drap = false;
1659 break;
1660 }
1661
1662 if (op->dest.reg == state->cfa.base) {
1663 WARN_FUNC("unsupported stack register modification",
1664 insn->sec, insn->offset);
1665 return -1;
1666 }
1667
1668 break;
1669
1670 case OP_SRC_AND:
1671 if (op->dest.reg != CFI_SP ||
1672 (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1673 (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1674 WARN_FUNC("unsupported stack pointer realignment",
1675 insn->sec, insn->offset);
1676 return -1;
1677 }
1678
1679 if (state->drap_reg != CFI_UNDEFINED) {
1680 /* drap: and imm, %rsp */
1681 cfa->base = state->drap_reg;
1682 cfa->offset = state->stack_size = 0;
1683 state->drap = true;
1684 }
1685
1686 /*
1687 * Older versions of GCC (4.8ish) realign the stack
1688 * without DRAP, with a frame pointer.
1689 */
1690
1691 break;
1692
1693 case OP_SRC_POP:
1694 case OP_SRC_POPF:
1695 if (!state->drap && op->dest.type == OP_DEST_REG &&
1696 op->dest.reg == cfa->base) {
1697
1698 /* pop %rbp */
1699 cfa->base = CFI_SP;
1700 }
1701
1702 if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1703 op->dest.type == OP_DEST_REG &&
1704 op->dest.reg == state->drap_reg &&
1705 state->drap_offset == -state->stack_size) {
1706
1707 /* drap: pop %drap */
1708 cfa->base = state->drap_reg;
1709 cfa->offset = 0;
1710 state->drap_offset = -1;
1711
1712 } else if (regs[op->dest.reg].offset == -state->stack_size) {
1713
1714 /* pop %reg */
1715 restore_reg(state, op->dest.reg);
1716 }
1717
1718 state->stack_size -= 8;
1719 if (cfa->base == CFI_SP)
1720 cfa->offset -= 8;
1721
1722 break;
1723
1724 case OP_SRC_REG_INDIRECT:
1725 if (state->drap && op->src.reg == CFI_BP &&
1726 op->src.offset == state->drap_offset) {
1727
1728 /* drap: mov disp(%rbp), %drap */
1729 cfa->base = state->drap_reg;
1730 cfa->offset = 0;
1731 state->drap_offset = -1;
1732 }
1733
1734 if (state->drap && op->src.reg == CFI_BP &&
1735 op->src.offset == regs[op->dest.reg].offset) {
1736
1737 /* drap: mov disp(%rbp), %reg */
1738 restore_reg(state, op->dest.reg);
1739
1740 } else if (op->src.reg == cfa->base &&
1741 op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1742
1743 /* mov disp(%rbp), %reg */
1744 /* mov disp(%rsp), %reg */
1745 restore_reg(state, op->dest.reg);
1746 }
1747
1748 break;
1749
1750 default:
1751 WARN_FUNC("unknown stack-related instruction",
1752 insn->sec, insn->offset);
1753 return -1;
1754 }
1755
1756 break;
1757
1758 case OP_DEST_PUSH:
1759 case OP_DEST_PUSHF:
1760 state->stack_size += 8;
1761 if (cfa->base == CFI_SP)
1762 cfa->offset += 8;
1763
1764 if (op->src.type != OP_SRC_REG)
1765 break;
1766
1767 if (state->drap) {
1768 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1769
1770 /* drap: push %drap */
1771 cfa->base = CFI_BP_INDIRECT;
1772 cfa->offset = -state->stack_size;
1773
1774 /* save drap so we know when to restore it */
1775 state->drap_offset = -state->stack_size;
1776
1777 } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1778
1779 /* drap: push %rbp */
1780 state->stack_size = 0;
1781
1782 } else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1783
1784 /* drap: push %reg */
1785 save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1786 }
1787
1788 } else {
1789
1790 /* push %reg */
1791 save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1792 }
1793
1794 /* detect when asm code uses rbp as a scratch register */
1795 if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1796 cfa->base != CFI_BP)
1797 state->bp_scratch = true;
1798 break;
1799
1800 case OP_DEST_REG_INDIRECT:
1801
1802 if (state->drap) {
1803 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1804
1805 /* drap: mov %drap, disp(%rbp) */
1806 cfa->base = CFI_BP_INDIRECT;
1807 cfa->offset = op->dest.offset;
1808
1809 /* save drap offset so we know when to restore it */
1810 state->drap_offset = op->dest.offset;
1811 }
1812
1813 else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1814
1815 /* drap: mov reg, disp(%rbp) */
1816 save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1817 }
1818
1819 } else if (op->dest.reg == cfa->base) {
1820
1821 /* mov reg, disp(%rbp) */
1822 /* mov reg, disp(%rsp) */
1823 save_reg(state, op->src.reg, CFI_CFA,
1824 op->dest.offset - state->cfa.offset);
1825 }
1826
1827 break;
1828
1829 case OP_DEST_LEAVE:
1830 if ((!state->drap && cfa->base != CFI_BP) ||
1831 (state->drap && cfa->base != state->drap_reg)) {
1832 WARN_FUNC("leave instruction with modified stack frame",
1833 insn->sec, insn->offset);
1834 return -1;
1835 }
1836
1837 /* leave (mov %rbp, %rsp; pop %rbp) */
1838
1839 state->stack_size = -state->regs[CFI_BP].offset - 8;
1840 restore_reg(state, CFI_BP);
1841
1842 if (!state->drap) {
1843 cfa->base = CFI_SP;
1844 cfa->offset -= 8;
1845 }
1846
1847 break;
1848
1849 case OP_DEST_MEM:
1850 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1851 WARN_FUNC("unknown stack-related memory operation",
1852 insn->sec, insn->offset);
1853 return -1;
1854 }
1855
1856 /* pop mem */
1857 state->stack_size -= 8;
1858 if (cfa->base == CFI_SP)
1859 cfa->offset -= 8;
1860
1861 break;
1862
1863 default:
1864 WARN_FUNC("unknown stack-related instruction",
1865 insn->sec, insn->offset);
1866 return -1;
1867 }
1868
1869 return 0;
1870 }
1871
1872 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1873 {
1874 struct insn_state *state1 = &insn->state, *state2 = state;
1875 int i;
1876
1877 if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1878 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1879 insn->sec, insn->offset,
1880 state1->cfa.base, state1->cfa.offset,
1881 state2->cfa.base, state2->cfa.offset);
1882
1883 } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1884 for (i = 0; i < CFI_NUM_REGS; i++) {
1885 if (!memcmp(&state1->regs[i], &state2->regs[i],
1886 sizeof(struct cfi_reg)))
1887 continue;
1888
1889 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1890 insn->sec, insn->offset,
1891 i, state1->regs[i].base, state1->regs[i].offset,
1892 i, state2->regs[i].base, state2->regs[i].offset);
1893 break;
1894 }
1895
1896 } else if (state1->type != state2->type) {
1897 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1898 insn->sec, insn->offset, state1->type, state2->type);
1899
1900 } else if (state1->drap != state2->drap ||
1901 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1902 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1903 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1904 insn->sec, insn->offset,
1905 state1->drap, state1->drap_reg, state1->drap_offset,
1906 state2->drap, state2->drap_reg, state2->drap_offset);
1907
1908 } else
1909 return true;
1910
1911 return false;
1912 }
1913
1914 static inline bool func_uaccess_safe(struct symbol *func)
1915 {
1916 if (func)
1917 return func->uaccess_safe;
1918
1919 return false;
1920 }
1921
1922 static inline const char *call_dest_name(struct instruction *insn)
1923 {
1924 if (insn->call_dest)
1925 return insn->call_dest->name;
1926
1927 return "{dynamic}";
1928 }
1929
1930 static int validate_call(struct instruction *insn, struct insn_state *state)
1931 {
1932 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1933 WARN_FUNC("call to %s() with UACCESS enabled",
1934 insn->sec, insn->offset, call_dest_name(insn));
1935 return 1;
1936 }
1937
1938 if (state->df) {
1939 WARN_FUNC("call to %s() with DF set",
1940 insn->sec, insn->offset, call_dest_name(insn));
1941 return 1;
1942 }
1943
1944 return 0;
1945 }
1946
1947 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1948 {
1949 if (has_modified_stack_frame(state)) {
1950 WARN_FUNC("sibling call from callable instruction with modified stack frame",
1951 insn->sec, insn->offset);
1952 return 1;
1953 }
1954
1955 return validate_call(insn, state);
1956 }
1957
1958 /*
1959 * Follow the branch starting at the given instruction, and recursively follow
1960 * any other branches (jumps). Meanwhile, track the frame pointer state at
1961 * each instruction and validate all the rules described in
1962 * tools/objtool/Documentation/stack-validation.txt.
1963 */
1964 static int validate_branch(struct objtool_file *file, struct symbol *func,
1965 struct instruction *first, struct insn_state state)
1966 {
1967 struct alternative *alt;
1968 struct instruction *insn, *next_insn;
1969 struct section *sec;
1970 u8 visited;
1971 int ret;
1972
1973 insn = first;
1974 sec = insn->sec;
1975
1976 if (insn->alt_group && list_empty(&insn->alts)) {
1977 WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
1978 sec, insn->offset);
1979 return 1;
1980 }
1981
1982 while (1) {
1983 next_insn = next_insn_same_sec(file, insn);
1984
1985 if (file->c_file && func && insn->func && func != insn->func->pfunc) {
1986 WARN("%s() falls through to next function %s()",
1987 func->name, insn->func->name);
1988 return 1;
1989 }
1990
1991 if (func && insn->ignore) {
1992 WARN_FUNC("BUG: why am I validating an ignored function?",
1993 sec, insn->offset);
1994 return 1;
1995 }
1996
1997 visited = 1 << state.uaccess;
1998 if (insn->visited) {
1999 if (!insn->hint && !insn_state_match(insn, &state))
2000 return 1;
2001
2002 if (insn->visited & visited)
2003 return 0;
2004 }
2005
2006 if (insn->hint) {
2007 if (insn->restore) {
2008 struct instruction *save_insn, *i;
2009
2010 i = insn;
2011 save_insn = NULL;
2012 func_for_each_insn_continue_reverse(file, func, i) {
2013 if (i->save) {
2014 save_insn = i;
2015 break;
2016 }
2017 }
2018
2019 if (!save_insn) {
2020 WARN_FUNC("no corresponding CFI save for CFI restore",
2021 sec, insn->offset);
2022 return 1;
2023 }
2024
2025 if (!save_insn->visited) {
2026 /*
2027 * Oops, no state to copy yet.
2028 * Hopefully we can reach this
2029 * instruction from another branch
2030 * after the save insn has been
2031 * visited.
2032 */
2033 if (insn == first)
2034 return 0;
2035
2036 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
2037 sec, insn->offset);
2038 return 1;
2039 }
2040
2041 insn->state = save_insn->state;
2042 }
2043
2044 state = insn->state;
2045
2046 } else
2047 insn->state = state;
2048
2049 insn->visited |= visited;
2050
2051 if (!insn->ignore_alts) {
2052 bool skip_orig = false;
2053
2054 list_for_each_entry(alt, &insn->alts, list) {
2055 if (alt->skip_orig)
2056 skip_orig = true;
2057
2058 ret = validate_branch(file, func, alt->insn, state);
2059 if (ret) {
2060 if (backtrace)
2061 BT_FUNC("(alt)", insn);
2062 return ret;
2063 }
2064 }
2065
2066 if (skip_orig)
2067 return 0;
2068 }
2069
2070 switch (insn->type) {
2071
2072 case INSN_RETURN:
2073 if (state.uaccess && !func_uaccess_safe(func)) {
2074 WARN_FUNC("return with UACCESS enabled", sec, insn->offset);
2075 return 1;
2076 }
2077
2078 if (!state.uaccess && func_uaccess_safe(func)) {
2079 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset);
2080 return 1;
2081 }
2082
2083 if (state.df) {
2084 WARN_FUNC("return with DF set", sec, insn->offset);
2085 return 1;
2086 }
2087
2088 if (func && has_modified_stack_frame(&state)) {
2089 WARN_FUNC("return with modified stack frame",
2090 sec, insn->offset);
2091 return 1;
2092 }
2093
2094 if (state.bp_scratch) {
2095 WARN("%s uses BP as a scratch register",
2096 func->name);
2097 return 1;
2098 }
2099
2100 return 0;
2101
2102 case INSN_CALL:
2103 case INSN_CALL_DYNAMIC:
2104 ret = validate_call(insn, &state);
2105 if (ret)
2106 return ret;
2107
2108 if (!no_fp && func && !is_fentry_call(insn) &&
2109 !has_valid_stack_frame(&state)) {
2110 WARN_FUNC("call without frame pointer save/setup",
2111 sec, insn->offset);
2112 return 1;
2113 }
2114
2115 if (dead_end_function(file, insn->call_dest))
2116 return 0;
2117
2118 break;
2119
2120 case INSN_JUMP_CONDITIONAL:
2121 case INSN_JUMP_UNCONDITIONAL:
2122 if (func && is_sibling_call(insn)) {
2123 ret = validate_sibling_call(insn, &state);
2124 if (ret)
2125 return ret;
2126
2127 } else if (insn->jump_dest) {
2128 ret = validate_branch(file, func,
2129 insn->jump_dest, state);
2130 if (ret) {
2131 if (backtrace)
2132 BT_FUNC("(branch)", insn);
2133 return ret;
2134 }
2135 }
2136
2137 if (insn->type == INSN_JUMP_UNCONDITIONAL)
2138 return 0;
2139
2140 break;
2141
2142 case INSN_JUMP_DYNAMIC:
2143 case INSN_JUMP_DYNAMIC_CONDITIONAL:
2144 if (func && is_sibling_call(insn)) {
2145 ret = validate_sibling_call(insn, &state);
2146 if (ret)
2147 return ret;
2148 }
2149
2150 if (insn->type == INSN_JUMP_DYNAMIC)
2151 return 0;
2152
2153 break;
2154
2155 case INSN_CONTEXT_SWITCH:
2156 if (func && (!next_insn || !next_insn->hint)) {
2157 WARN_FUNC("unsupported instruction in callable function",
2158 sec, insn->offset);
2159 return 1;
2160 }
2161 return 0;
2162
2163 case INSN_STACK:
2164 if (update_insn_state(insn, &state))
2165 return 1;
2166
2167 if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2168 if (!state.uaccess_stack) {
2169 state.uaccess_stack = 1;
2170 } else if (state.uaccess_stack >> 31) {
2171 WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2172 return 1;
2173 }
2174 state.uaccess_stack <<= 1;
2175 state.uaccess_stack |= state.uaccess;
2176 }
2177
2178 if (insn->stack_op.src.type == OP_SRC_POPF) {
2179 if (state.uaccess_stack) {
2180 state.uaccess = state.uaccess_stack & 1;
2181 state.uaccess_stack >>= 1;
2182 if (state.uaccess_stack == 1)
2183 state.uaccess_stack = 0;
2184 }
2185 }
2186
2187 break;
2188
2189 case INSN_STAC:
2190 if (state.uaccess) {
2191 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2192 return 1;
2193 }
2194
2195 state.uaccess = true;
2196 break;
2197
2198 case INSN_CLAC:
2199 if (!state.uaccess && func) {
2200 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2201 return 1;
2202 }
2203
2204 if (func_uaccess_safe(func) && !state.uaccess_stack) {
2205 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2206 return 1;
2207 }
2208
2209 state.uaccess = false;
2210 break;
2211
2212 case INSN_STD:
2213 if (state.df)
2214 WARN_FUNC("recursive STD", sec, insn->offset);
2215
2216 state.df = true;
2217 break;
2218
2219 case INSN_CLD:
2220 if (!state.df && func)
2221 WARN_FUNC("redundant CLD", sec, insn->offset);
2222
2223 state.df = false;
2224 break;
2225
2226 default:
2227 break;
2228 }
2229
2230 if (insn->dead_end)
2231 return 0;
2232
2233 if (!next_insn) {
2234 if (state.cfa.base == CFI_UNDEFINED)
2235 return 0;
2236 WARN("%s: unexpected end of section", sec->name);
2237 return 1;
2238 }
2239
2240 insn = next_insn;
2241 }
2242
2243 return 0;
2244 }
2245
2246 static int validate_unwind_hints(struct objtool_file *file)
2247 {
2248 struct instruction *insn;
2249 int ret, warnings = 0;
2250 struct insn_state state;
2251
2252 if (!file->hints)
2253 return 0;
2254
2255 clear_insn_state(&state);
2256
2257 for_each_insn(file, insn) {
2258 if (insn->hint && !insn->visited) {
2259 ret = validate_branch(file, insn->func, insn, state);
2260 if (ret && backtrace)
2261 BT_FUNC("<=== (hint)", insn);
2262 warnings += ret;
2263 }
2264 }
2265
2266 return warnings;
2267 }
2268
2269 static int validate_retpoline(struct objtool_file *file)
2270 {
2271 struct instruction *insn;
2272 int warnings = 0;
2273
2274 for_each_insn(file, insn) {
2275 if (insn->type != INSN_JUMP_DYNAMIC &&
2276 insn->type != INSN_CALL_DYNAMIC)
2277 continue;
2278
2279 if (insn->retpoline_safe)
2280 continue;
2281
2282 /*
2283 * .init.text code is ran before userspace and thus doesn't
2284 * strictly need retpolines, except for modules which are
2285 * loaded late, they very much do need retpoline in their
2286 * .init.text
2287 */
2288 if (!strcmp(insn->sec->name, ".init.text") && !module)
2289 continue;
2290
2291 WARN_FUNC("indirect %s found in RETPOLINE build",
2292 insn->sec, insn->offset,
2293 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2294
2295 warnings++;
2296 }
2297
2298 return warnings;
2299 }
2300
2301 static bool is_kasan_insn(struct instruction *insn)
2302 {
2303 return (insn->type == INSN_CALL &&
2304 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2305 }
2306
2307 static bool is_ubsan_insn(struct instruction *insn)
2308 {
2309 return (insn->type == INSN_CALL &&
2310 !strcmp(insn->call_dest->name,
2311 "__ubsan_handle_builtin_unreachable"));
2312 }
2313
2314 static bool ignore_unreachable_insn(struct instruction *insn)
2315 {
2316 int i;
2317
2318 if (insn->ignore || insn->type == INSN_NOP)
2319 return true;
2320
2321 /*
2322 * Ignore any unused exceptions. This can happen when a whitelisted
2323 * function has an exception table entry.
2324 *
2325 * Also ignore alternative replacement instructions. This can happen
2326 * when a whitelisted function uses one of the ALTERNATIVE macros.
2327 */
2328 if (!strcmp(insn->sec->name, ".fixup") ||
2329 !strcmp(insn->sec->name, ".altinstr_replacement") ||
2330 !strcmp(insn->sec->name, ".altinstr_aux"))
2331 return true;
2332
2333 /*
2334 * Check if this (or a subsequent) instruction is related to
2335 * CONFIG_UBSAN or CONFIG_KASAN.
2336 *
2337 * End the search at 5 instructions to avoid going into the weeds.
2338 */
2339 if (!insn->func)
2340 return false;
2341 for (i = 0; i < 5; i++) {
2342
2343 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2344 return true;
2345
2346 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2347 if (insn->jump_dest &&
2348 insn->jump_dest->func == insn->func) {
2349 insn = insn->jump_dest;
2350 continue;
2351 }
2352
2353 break;
2354 }
2355
2356 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2357 break;
2358
2359 insn = list_next_entry(insn, list);
2360 }
2361
2362 return false;
2363 }
2364
2365 static int validate_functions(struct objtool_file *file)
2366 {
2367 struct section *sec;
2368 struct symbol *func;
2369 struct instruction *insn;
2370 struct insn_state state;
2371 int ret, warnings = 0;
2372
2373 clear_insn_state(&state);
2374
2375 state.cfa = initial_func_cfi.cfa;
2376 memcpy(&state.regs, &initial_func_cfi.regs,
2377 CFI_NUM_REGS * sizeof(struct cfi_reg));
2378 state.stack_size = initial_func_cfi.cfa.offset;
2379
2380 for_each_sec(file, sec) {
2381 list_for_each_entry(func, &sec->symbol_list, list) {
2382 if (func->type != STT_FUNC)
2383 continue;
2384
2385 if (!func->len) {
2386 WARN("%s() is missing an ELF size annotation",
2387 func->name);
2388 warnings++;
2389 }
2390
2391 if (func->pfunc != func || func->alias != func)
2392 continue;
2393
2394 insn = find_insn(file, sec, func->offset);
2395 if (!insn || insn->ignore || insn->visited)
2396 continue;
2397
2398 state.uaccess = func->uaccess_safe;
2399
2400 ret = validate_branch(file, func, insn, state);
2401 if (ret && backtrace)
2402 BT_FUNC("<=== (func)", insn);
2403 warnings += ret;
2404 }
2405 }
2406
2407 return warnings;
2408 }
2409
2410 static int validate_reachable_instructions(struct objtool_file *file)
2411 {
2412 struct instruction *insn;
2413
2414 if (file->ignore_unreachables)
2415 return 0;
2416
2417 for_each_insn(file, insn) {
2418 if (insn->visited || ignore_unreachable_insn(insn))
2419 continue;
2420
2421 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2422 return 1;
2423 }
2424
2425 return 0;
2426 }
2427
2428 static void cleanup(struct objtool_file *file)
2429 {
2430 struct instruction *insn, *tmpinsn;
2431 struct alternative *alt, *tmpalt;
2432
2433 list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) {
2434 list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) {
2435 list_del(&alt->list);
2436 free(alt);
2437 }
2438 list_del(&insn->list);
2439 hash_del(&insn->hash);
2440 free(insn);
2441 }
2442 elf_close(file->elf);
2443 }
2444
2445 static struct objtool_file file;
2446
2447 int check(const char *_objname, bool orc)
2448 {
2449 int ret, warnings = 0;
2450
2451 objname = _objname;
2452
2453 file.elf = elf_read(objname, orc ? O_RDWR : O_RDONLY);
2454 if (!file.elf)
2455 return 1;
2456
2457 INIT_LIST_HEAD(&file.insn_list);
2458 hash_init(file.insn_hash);
2459 file.c_file = find_section_by_name(file.elf, ".comment");
2460 file.ignore_unreachables = no_unreachable;
2461 file.hints = false;
2462
2463 arch_initial_func_cfi_state(&initial_func_cfi);
2464
2465 ret = decode_sections(&file);
2466 if (ret < 0)
2467 goto out;
2468 warnings += ret;
2469
2470 if (list_empty(&file.insn_list))
2471 goto out;
2472
2473 if (retpoline) {
2474 ret = validate_retpoline(&file);
2475 if (ret < 0)
2476 return ret;
2477 warnings += ret;
2478 }
2479
2480 ret = validate_functions(&file);
2481 if (ret < 0)
2482 goto out;
2483 warnings += ret;
2484
2485 ret = validate_unwind_hints(&file);
2486 if (ret < 0)
2487 goto out;
2488 warnings += ret;
2489
2490 if (!warnings) {
2491 ret = validate_reachable_instructions(&file);
2492 if (ret < 0)
2493 goto out;
2494 warnings += ret;
2495 }
2496
2497 if (orc) {
2498 ret = create_orc(&file);
2499 if (ret < 0)
2500 goto out;
2501
2502 ret = create_orc_sections(&file);
2503 if (ret < 0)
2504 goto out;
2505
2506 ret = elf_write(file.elf);
2507 if (ret < 0)
2508 goto out;
2509 }
2510
2511 out:
2512 cleanup(&file);
2513
2514 /* ignore warnings for now until we get all the code cleaned up */
2515 if (ret || warnings)
2516 return 0;
2517 return 0;
2518 }