]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blob - tools/objtool/check.c
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 13
[mirror_ubuntu-focal-kernel.git] / tools / objtool / check.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2015-2017 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6 #include <string.h>
7 #include <stdlib.h>
8
9 #include "builtin.h"
10 #include "check.h"
11 #include "elf.h"
12 #include "special.h"
13 #include "arch.h"
14 #include "warn.h"
15
16 #include <linux/hashtable.h>
17 #include <linux/kernel.h>
18
19 #define FAKE_JUMP_OFFSET -1
20
21 struct alternative {
22 struct list_head list;
23 struct instruction *insn;
24 bool skip_orig;
25 };
26
27 const char *objname;
28 struct cfi_state initial_func_cfi;
29
30 struct instruction *find_insn(struct objtool_file *file,
31 struct section *sec, unsigned long offset)
32 {
33 struct instruction *insn;
34
35 hash_for_each_possible(file->insn_hash, insn, hash, offset)
36 if (insn->sec == sec && insn->offset == offset)
37 return insn;
38
39 return NULL;
40 }
41
42 static struct instruction *next_insn_same_sec(struct objtool_file *file,
43 struct instruction *insn)
44 {
45 struct instruction *next = list_next_entry(insn, list);
46
47 if (!next || &next->list == &file->insn_list || next->sec != insn->sec)
48 return NULL;
49
50 return next;
51 }
52
53 static struct instruction *next_insn_same_func(struct objtool_file *file,
54 struct instruction *insn)
55 {
56 struct instruction *next = list_next_entry(insn, list);
57 struct symbol *func = insn->func;
58
59 if (!func)
60 return NULL;
61
62 if (&next->list != &file->insn_list && next->func == func)
63 return next;
64
65 /* Check if we're already in the subfunction: */
66 if (func == func->cfunc)
67 return NULL;
68
69 /* Move to the subfunction: */
70 return find_insn(file, func->cfunc->sec, func->cfunc->offset);
71 }
72
73 #define func_for_each_insn_all(file, func, insn) \
74 for (insn = find_insn(file, func->sec, func->offset); \
75 insn; \
76 insn = next_insn_same_func(file, insn))
77
78 #define func_for_each_insn(file, func, insn) \
79 for (insn = find_insn(file, func->sec, func->offset); \
80 insn && &insn->list != &file->insn_list && \
81 insn->sec == func->sec && \
82 insn->offset < func->offset + func->len; \
83 insn = list_next_entry(insn, list))
84
85 #define func_for_each_insn_continue_reverse(file, func, insn) \
86 for (insn = list_prev_entry(insn, list); \
87 &insn->list != &file->insn_list && \
88 insn->sec == func->sec && insn->offset >= func->offset; \
89 insn = list_prev_entry(insn, list))
90
91 #define sec_for_each_insn_from(file, insn) \
92 for (; insn; insn = next_insn_same_sec(file, insn))
93
94 #define sec_for_each_insn_continue(file, insn) \
95 for (insn = next_insn_same_sec(file, insn); insn; \
96 insn = next_insn_same_sec(file, insn))
97
98 /*
99 * This checks to see if the given function is a "noreturn" function.
100 *
101 * For global functions which are outside the scope of this object file, we
102 * have to keep a manual list of them.
103 *
104 * For local functions, we have to detect them manually by simply looking for
105 * the lack of a return instruction.
106 *
107 * Returns:
108 * -1: error
109 * 0: no dead end
110 * 1: dead end
111 */
112 static int __dead_end_function(struct objtool_file *file, struct symbol *func,
113 int recursion)
114 {
115 int i;
116 struct instruction *insn;
117 bool empty = true;
118
119 /*
120 * Unfortunately these have to be hard coded because the noreturn
121 * attribute isn't provided in ELF data.
122 */
123 static const char * const global_noreturns[] = {
124 "__stack_chk_fail",
125 "panic",
126 "do_exit",
127 "do_task_dead",
128 "__module_put_and_exit",
129 "complete_and_exit",
130 "kvm_spurious_fault",
131 "__reiserfs_panic",
132 "lbug_with_loc",
133 "fortify_panic",
134 "usercopy_abort",
135 "machine_real_restart",
136 "rewind_stack_do_exit",
137 };
138
139 if (func->bind == STB_WEAK)
140 return 0;
141
142 if (func->bind == STB_GLOBAL)
143 for (i = 0; i < ARRAY_SIZE(global_noreturns); i++)
144 if (!strcmp(func->name, global_noreturns[i]))
145 return 1;
146
147 if (!func->len)
148 return 0;
149
150 insn = find_insn(file, func->sec, func->offset);
151 if (!insn->func)
152 return 0;
153
154 func_for_each_insn_all(file, func, insn) {
155 empty = false;
156
157 if (insn->type == INSN_RETURN)
158 return 0;
159 }
160
161 if (empty)
162 return 0;
163
164 /*
165 * A function can have a sibling call instead of a return. In that
166 * case, the function's dead-end status depends on whether the target
167 * of the sibling call returns.
168 */
169 func_for_each_insn_all(file, func, insn) {
170 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
171 struct instruction *dest = insn->jump_dest;
172
173 if (!dest)
174 /* sibling call to another file */
175 return 0;
176
177 if (dest->func && dest->func->pfunc != insn->func->pfunc) {
178
179 /* local sibling call */
180 if (recursion == 5) {
181 /*
182 * Infinite recursion: two functions
183 * have sibling calls to each other.
184 * This is a very rare case. It means
185 * they aren't dead ends.
186 */
187 return 0;
188 }
189
190 return __dead_end_function(file, dest->func,
191 recursion + 1);
192 }
193 }
194
195 if (insn->type == INSN_JUMP_DYNAMIC && list_empty(&insn->alts))
196 /* sibling call */
197 return 0;
198 }
199
200 return 1;
201 }
202
203 static int dead_end_function(struct objtool_file *file, struct symbol *func)
204 {
205 return __dead_end_function(file, func, 0);
206 }
207
208 static void clear_insn_state(struct insn_state *state)
209 {
210 int i;
211
212 memset(state, 0, sizeof(*state));
213 state->cfa.base = CFI_UNDEFINED;
214 for (i = 0; i < CFI_NUM_REGS; i++) {
215 state->regs[i].base = CFI_UNDEFINED;
216 state->vals[i].base = CFI_UNDEFINED;
217 }
218 state->drap_reg = CFI_UNDEFINED;
219 state->drap_offset = -1;
220 }
221
222 /*
223 * Call the arch-specific instruction decoder for all the instructions and add
224 * them to the global instruction list.
225 */
226 static int decode_instructions(struct objtool_file *file)
227 {
228 struct section *sec;
229 struct symbol *func;
230 unsigned long offset;
231 struct instruction *insn;
232 int ret;
233
234 for_each_sec(file, sec) {
235
236 if (!(sec->sh.sh_flags & SHF_EXECINSTR))
237 continue;
238
239 if (strcmp(sec->name, ".altinstr_replacement") &&
240 strcmp(sec->name, ".altinstr_aux") &&
241 strncmp(sec->name, ".discard.", 9))
242 sec->text = true;
243
244 for (offset = 0; offset < sec->len; offset += insn->len) {
245 insn = malloc(sizeof(*insn));
246 if (!insn) {
247 WARN("malloc failed");
248 return -1;
249 }
250 memset(insn, 0, sizeof(*insn));
251 INIT_LIST_HEAD(&insn->alts);
252 clear_insn_state(&insn->state);
253
254 insn->sec = sec;
255 insn->offset = offset;
256
257 ret = arch_decode_instruction(file->elf, sec, offset,
258 sec->len - offset,
259 &insn->len, &insn->type,
260 &insn->immediate,
261 &insn->stack_op);
262 if (ret)
263 goto err;
264
265 if (!insn->type || insn->type > INSN_LAST) {
266 WARN_FUNC("invalid instruction type %d",
267 insn->sec, insn->offset, insn->type);
268 ret = -1;
269 goto err;
270 }
271
272 hash_add(file->insn_hash, &insn->hash, insn->offset);
273 list_add_tail(&insn->list, &file->insn_list);
274 }
275
276 list_for_each_entry(func, &sec->symbol_list, list) {
277 if (func->type != STT_FUNC)
278 continue;
279
280 if (!find_insn(file, sec, func->offset)) {
281 WARN("%s(): can't find starting instruction",
282 func->name);
283 return -1;
284 }
285
286 func_for_each_insn(file, func, insn)
287 if (!insn->func)
288 insn->func = func;
289 }
290 }
291
292 return 0;
293
294 err:
295 free(insn);
296 return ret;
297 }
298
299 /*
300 * Mark "ud2" instructions and manually annotated dead ends.
301 */
302 static int add_dead_ends(struct objtool_file *file)
303 {
304 struct section *sec;
305 struct rela *rela;
306 struct instruction *insn;
307 bool found;
308
309 /*
310 * By default, "ud2" is a dead end unless otherwise annotated, because
311 * GCC 7 inserts it for certain divide-by-zero cases.
312 */
313 for_each_insn(file, insn)
314 if (insn->type == INSN_BUG)
315 insn->dead_end = true;
316
317 /*
318 * Check for manually annotated dead ends.
319 */
320 sec = find_section_by_name(file->elf, ".rela.discard.unreachable");
321 if (!sec)
322 goto reachable;
323
324 list_for_each_entry(rela, &sec->rela_list, list) {
325 if (rela->sym->type != STT_SECTION) {
326 WARN("unexpected relocation symbol type in %s", sec->name);
327 return -1;
328 }
329 insn = find_insn(file, rela->sym->sec, rela->addend);
330 if (insn)
331 insn = list_prev_entry(insn, list);
332 else if (rela->addend == rela->sym->sec->len) {
333 found = false;
334 list_for_each_entry_reverse(insn, &file->insn_list, list) {
335 if (insn->sec == rela->sym->sec) {
336 found = true;
337 break;
338 }
339 }
340
341 if (!found) {
342 WARN("can't find unreachable insn at %s+0x%x",
343 rela->sym->sec->name, rela->addend);
344 return -1;
345 }
346 } else {
347 WARN("can't find unreachable insn at %s+0x%x",
348 rela->sym->sec->name, rela->addend);
349 return -1;
350 }
351
352 insn->dead_end = true;
353 }
354
355 reachable:
356 /*
357 * These manually annotated reachable checks are needed for GCC 4.4,
358 * where the Linux unreachable() macro isn't supported. In that case
359 * GCC doesn't know the "ud2" is fatal, so it generates code as if it's
360 * not a dead end.
361 */
362 sec = find_section_by_name(file->elf, ".rela.discard.reachable");
363 if (!sec)
364 return 0;
365
366 list_for_each_entry(rela, &sec->rela_list, list) {
367 if (rela->sym->type != STT_SECTION) {
368 WARN("unexpected relocation symbol type in %s", sec->name);
369 return -1;
370 }
371 insn = find_insn(file, rela->sym->sec, rela->addend);
372 if (insn)
373 insn = list_prev_entry(insn, list);
374 else if (rela->addend == rela->sym->sec->len) {
375 found = false;
376 list_for_each_entry_reverse(insn, &file->insn_list, list) {
377 if (insn->sec == rela->sym->sec) {
378 found = true;
379 break;
380 }
381 }
382
383 if (!found) {
384 WARN("can't find reachable insn at %s+0x%x",
385 rela->sym->sec->name, rela->addend);
386 return -1;
387 }
388 } else {
389 WARN("can't find reachable insn at %s+0x%x",
390 rela->sym->sec->name, rela->addend);
391 return -1;
392 }
393
394 insn->dead_end = false;
395 }
396
397 return 0;
398 }
399
400 /*
401 * Warnings shouldn't be reported for ignored functions.
402 */
403 static void add_ignores(struct objtool_file *file)
404 {
405 struct instruction *insn;
406 struct section *sec;
407 struct symbol *func;
408 struct rela *rela;
409
410 sec = find_section_by_name(file->elf, ".rela.discard.func_stack_frame_non_standard");
411 if (!sec)
412 return;
413
414 list_for_each_entry(rela, &sec->rela_list, list) {
415 switch (rela->sym->type) {
416 case STT_FUNC:
417 func = rela->sym;
418 break;
419
420 case STT_SECTION:
421 func = find_symbol_by_offset(rela->sym->sec, rela->addend);
422 if (!func || func->type != STT_FUNC)
423 continue;
424 break;
425
426 default:
427 WARN("unexpected relocation symbol type in %s: %d", sec->name, rela->sym->type);
428 continue;
429 }
430
431 func_for_each_insn_all(file, func, insn)
432 insn->ignore = true;
433 }
434 }
435
436 /*
437 * This is a whitelist of functions that is allowed to be called with AC set.
438 * The list is meant to be minimal and only contains compiler instrumentation
439 * ABI and a few functions used to implement *_{to,from}_user() functions.
440 *
441 * These functions must not directly change AC, but may PUSHF/POPF.
442 */
443 static const char *uaccess_safe_builtin[] = {
444 /* KASAN */
445 "kasan_report",
446 "check_memory_region",
447 /* KASAN out-of-line */
448 "__asan_loadN_noabort",
449 "__asan_load1_noabort",
450 "__asan_load2_noabort",
451 "__asan_load4_noabort",
452 "__asan_load8_noabort",
453 "__asan_load16_noabort",
454 "__asan_storeN_noabort",
455 "__asan_store1_noabort",
456 "__asan_store2_noabort",
457 "__asan_store4_noabort",
458 "__asan_store8_noabort",
459 "__asan_store16_noabort",
460 /* KASAN in-line */
461 "__asan_report_load_n_noabort",
462 "__asan_report_load1_noabort",
463 "__asan_report_load2_noabort",
464 "__asan_report_load4_noabort",
465 "__asan_report_load8_noabort",
466 "__asan_report_load16_noabort",
467 "__asan_report_store_n_noabort",
468 "__asan_report_store1_noabort",
469 "__asan_report_store2_noabort",
470 "__asan_report_store4_noabort",
471 "__asan_report_store8_noabort",
472 "__asan_report_store16_noabort",
473 /* KCOV */
474 "write_comp_data",
475 "__sanitizer_cov_trace_pc",
476 "__sanitizer_cov_trace_const_cmp1",
477 "__sanitizer_cov_trace_const_cmp2",
478 "__sanitizer_cov_trace_const_cmp4",
479 "__sanitizer_cov_trace_const_cmp8",
480 "__sanitizer_cov_trace_cmp1",
481 "__sanitizer_cov_trace_cmp2",
482 "__sanitizer_cov_trace_cmp4",
483 "__sanitizer_cov_trace_cmp8",
484 /* UBSAN */
485 "ubsan_type_mismatch_common",
486 "__ubsan_handle_type_mismatch",
487 "__ubsan_handle_type_mismatch_v1",
488 /* misc */
489 "csum_partial_copy_generic",
490 "__memcpy_mcsafe",
491 "ftrace_likely_update", /* CONFIG_TRACE_BRANCH_PROFILING */
492 NULL
493 };
494
495 static void add_uaccess_safe(struct objtool_file *file)
496 {
497 struct symbol *func;
498 const char **name;
499
500 if (!uaccess)
501 return;
502
503 for (name = uaccess_safe_builtin; *name; name++) {
504 func = find_symbol_by_name(file->elf, *name);
505 if (!func)
506 continue;
507
508 func->alias->uaccess_safe = true;
509 }
510 }
511
512 /*
513 * FIXME: For now, just ignore any alternatives which add retpolines. This is
514 * a temporary hack, as it doesn't allow ORC to unwind from inside a retpoline.
515 * But it at least allows objtool to understand the control flow *around* the
516 * retpoline.
517 */
518 static int add_ignore_alternatives(struct objtool_file *file)
519 {
520 struct section *sec;
521 struct rela *rela;
522 struct instruction *insn;
523
524 sec = find_section_by_name(file->elf, ".rela.discard.ignore_alts");
525 if (!sec)
526 return 0;
527
528 list_for_each_entry(rela, &sec->rela_list, list) {
529 if (rela->sym->type != STT_SECTION) {
530 WARN("unexpected relocation symbol type in %s", sec->name);
531 return -1;
532 }
533
534 insn = find_insn(file, rela->sym->sec, rela->addend);
535 if (!insn) {
536 WARN("bad .discard.ignore_alts entry");
537 return -1;
538 }
539
540 insn->ignore_alts = true;
541 }
542
543 return 0;
544 }
545
546 /*
547 * Find the destination instructions for all jumps.
548 */
549 static int add_jump_destinations(struct objtool_file *file)
550 {
551 struct instruction *insn;
552 struct rela *rela;
553 struct section *dest_sec;
554 unsigned long dest_off;
555
556 for_each_insn(file, insn) {
557 if (insn->type != INSN_JUMP_CONDITIONAL &&
558 insn->type != INSN_JUMP_UNCONDITIONAL)
559 continue;
560
561 if (insn->ignore || insn->offset == FAKE_JUMP_OFFSET)
562 continue;
563
564 rela = find_rela_by_dest_range(insn->sec, insn->offset,
565 insn->len);
566 if (!rela) {
567 dest_sec = insn->sec;
568 dest_off = insn->offset + insn->len + insn->immediate;
569 } else if (rela->sym->type == STT_SECTION) {
570 dest_sec = rela->sym->sec;
571 dest_off = rela->addend + 4;
572 } else if (rela->sym->sec->idx) {
573 dest_sec = rela->sym->sec;
574 dest_off = rela->sym->sym.st_value + rela->addend + 4;
575 } else if (strstr(rela->sym->name, "_indirect_thunk_")) {
576 /*
577 * Retpoline jumps are really dynamic jumps in
578 * disguise, so convert them accordingly.
579 */
580 insn->type = INSN_JUMP_DYNAMIC;
581 insn->retpoline_safe = true;
582 continue;
583 } else {
584 /* sibling call */
585 insn->call_dest = rela->sym;
586 insn->jump_dest = NULL;
587 continue;
588 }
589
590 insn->jump_dest = find_insn(file, dest_sec, dest_off);
591 if (!insn->jump_dest) {
592
593 /*
594 * This is a special case where an alt instruction
595 * jumps past the end of the section. These are
596 * handled later in handle_group_alt().
597 */
598 if (!strcmp(insn->sec->name, ".altinstr_replacement"))
599 continue;
600
601 WARN_FUNC("can't find jump dest instruction at %s+0x%lx",
602 insn->sec, insn->offset, dest_sec->name,
603 dest_off);
604 return -1;
605 }
606
607 /*
608 * Cross-function jump.
609 */
610 if (insn->func && insn->jump_dest->func &&
611 insn->func != insn->jump_dest->func) {
612
613 /*
614 * For GCC 8+, create parent/child links for any cold
615 * subfunctions. This is _mostly_ redundant with a
616 * similar initialization in read_symbols().
617 *
618 * If a function has aliases, we want the *first* such
619 * function in the symbol table to be the subfunction's
620 * parent. In that case we overwrite the
621 * initialization done in read_symbols().
622 *
623 * However this code can't completely replace the
624 * read_symbols() code because this doesn't detect the
625 * case where the parent function's only reference to a
626 * subfunction is through a switch table.
627 */
628 if (!strstr(insn->func->name, ".cold.") &&
629 strstr(insn->jump_dest->func->name, ".cold.")) {
630 insn->func->cfunc = insn->jump_dest->func;
631 insn->jump_dest->func->pfunc = insn->func;
632
633 } else if (insn->jump_dest->func->pfunc != insn->func->pfunc &&
634 insn->jump_dest->offset == insn->jump_dest->func->offset) {
635
636 /* sibling class */
637 insn->call_dest = insn->jump_dest->func;
638 insn->jump_dest = NULL;
639 }
640 }
641 }
642
643 return 0;
644 }
645
646 /*
647 * Find the destination instructions for all calls.
648 */
649 static int add_call_destinations(struct objtool_file *file)
650 {
651 struct instruction *insn;
652 unsigned long dest_off;
653 struct rela *rela;
654
655 for_each_insn(file, insn) {
656 if (insn->type != INSN_CALL)
657 continue;
658
659 rela = find_rela_by_dest_range(insn->sec, insn->offset,
660 insn->len);
661 if (!rela) {
662 dest_off = insn->offset + insn->len + insn->immediate;
663 insn->call_dest = find_symbol_by_offset(insn->sec,
664 dest_off);
665
666 if (!insn->call_dest && !insn->ignore) {
667 WARN_FUNC("unsupported intra-function call",
668 insn->sec, insn->offset);
669 if (retpoline)
670 WARN("If this is a retpoline, please patch it in with alternatives and annotate it with ANNOTATE_NOSPEC_ALTERNATIVE.");
671 return -1;
672 }
673
674 } else if (rela->sym->type == STT_SECTION) {
675 insn->call_dest = find_symbol_by_offset(rela->sym->sec,
676 rela->addend+4);
677 if (!insn->call_dest ||
678 insn->call_dest->type != STT_FUNC) {
679 WARN_FUNC("can't find call dest symbol at %s+0x%x",
680 insn->sec, insn->offset,
681 rela->sym->sec->name,
682 rela->addend + 4);
683 return -1;
684 }
685 } else
686 insn->call_dest = rela->sym;
687 }
688
689 return 0;
690 }
691
692 /*
693 * The .alternatives section requires some extra special care, over and above
694 * what other special sections require:
695 *
696 * 1. Because alternatives are patched in-place, we need to insert a fake jump
697 * instruction at the end so that validate_branch() skips all the original
698 * replaced instructions when validating the new instruction path.
699 *
700 * 2. An added wrinkle is that the new instruction length might be zero. In
701 * that case the old instructions are replaced with noops. We simulate that
702 * by creating a fake jump as the only new instruction.
703 *
704 * 3. In some cases, the alternative section includes an instruction which
705 * conditionally jumps to the _end_ of the entry. We have to modify these
706 * jumps' destinations to point back to .text rather than the end of the
707 * entry in .altinstr_replacement.
708 */
709 static int handle_group_alt(struct objtool_file *file,
710 struct special_alt *special_alt,
711 struct instruction *orig_insn,
712 struct instruction **new_insn)
713 {
714 struct instruction *last_orig_insn, *last_new_insn, *insn, *fake_jump = NULL;
715 unsigned long dest_off;
716
717 last_orig_insn = NULL;
718 insn = orig_insn;
719 sec_for_each_insn_from(file, insn) {
720 if (insn->offset >= special_alt->orig_off + special_alt->orig_len)
721 break;
722
723 insn->alt_group = true;
724 last_orig_insn = insn;
725 }
726
727 if (next_insn_same_sec(file, last_orig_insn)) {
728 fake_jump = malloc(sizeof(*fake_jump));
729 if (!fake_jump) {
730 WARN("malloc failed");
731 return -1;
732 }
733 memset(fake_jump, 0, sizeof(*fake_jump));
734 INIT_LIST_HEAD(&fake_jump->alts);
735 clear_insn_state(&fake_jump->state);
736
737 fake_jump->sec = special_alt->new_sec;
738 fake_jump->offset = FAKE_JUMP_OFFSET;
739 fake_jump->type = INSN_JUMP_UNCONDITIONAL;
740 fake_jump->jump_dest = list_next_entry(last_orig_insn, list);
741 fake_jump->func = orig_insn->func;
742 }
743
744 if (!special_alt->new_len) {
745 if (!fake_jump) {
746 WARN("%s: empty alternative at end of section",
747 special_alt->orig_sec->name);
748 return -1;
749 }
750
751 *new_insn = fake_jump;
752 return 0;
753 }
754
755 last_new_insn = NULL;
756 insn = *new_insn;
757 sec_for_each_insn_from(file, insn) {
758 if (insn->offset >= special_alt->new_off + special_alt->new_len)
759 break;
760
761 last_new_insn = insn;
762
763 insn->ignore = orig_insn->ignore_alts;
764 insn->func = orig_insn->func;
765
766 if (insn->type != INSN_JUMP_CONDITIONAL &&
767 insn->type != INSN_JUMP_UNCONDITIONAL)
768 continue;
769
770 if (!insn->immediate)
771 continue;
772
773 dest_off = insn->offset + insn->len + insn->immediate;
774 if (dest_off == special_alt->new_off + special_alt->new_len) {
775 if (!fake_jump) {
776 WARN("%s: alternative jump to end of section",
777 special_alt->orig_sec->name);
778 return -1;
779 }
780 insn->jump_dest = fake_jump;
781 }
782
783 if (!insn->jump_dest) {
784 WARN_FUNC("can't find alternative jump destination",
785 insn->sec, insn->offset);
786 return -1;
787 }
788 }
789
790 if (!last_new_insn) {
791 WARN_FUNC("can't find last new alternative instruction",
792 special_alt->new_sec, special_alt->new_off);
793 return -1;
794 }
795
796 if (fake_jump)
797 list_add(&fake_jump->list, &last_new_insn->list);
798
799 return 0;
800 }
801
802 /*
803 * A jump table entry can either convert a nop to a jump or a jump to a nop.
804 * If the original instruction is a jump, make the alt entry an effective nop
805 * by just skipping the original instruction.
806 */
807 static int handle_jump_alt(struct objtool_file *file,
808 struct special_alt *special_alt,
809 struct instruction *orig_insn,
810 struct instruction **new_insn)
811 {
812 if (orig_insn->type == INSN_NOP)
813 return 0;
814
815 if (orig_insn->type != INSN_JUMP_UNCONDITIONAL) {
816 WARN_FUNC("unsupported instruction at jump label",
817 orig_insn->sec, orig_insn->offset);
818 return -1;
819 }
820
821 *new_insn = list_next_entry(orig_insn, list);
822 return 0;
823 }
824
825 /*
826 * Read all the special sections which have alternate instructions which can be
827 * patched in or redirected to at runtime. Each instruction having alternate
828 * instruction(s) has them added to its insn->alts list, which will be
829 * traversed in validate_branch().
830 */
831 static int add_special_section_alts(struct objtool_file *file)
832 {
833 struct list_head special_alts;
834 struct instruction *orig_insn, *new_insn;
835 struct special_alt *special_alt, *tmp;
836 struct alternative *alt;
837 int ret;
838
839 ret = special_get_alts(file->elf, &special_alts);
840 if (ret)
841 return ret;
842
843 list_for_each_entry_safe(special_alt, tmp, &special_alts, list) {
844
845 orig_insn = find_insn(file, special_alt->orig_sec,
846 special_alt->orig_off);
847 if (!orig_insn) {
848 WARN_FUNC("special: can't find orig instruction",
849 special_alt->orig_sec, special_alt->orig_off);
850 ret = -1;
851 goto out;
852 }
853
854 new_insn = NULL;
855 if (!special_alt->group || special_alt->new_len) {
856 new_insn = find_insn(file, special_alt->new_sec,
857 special_alt->new_off);
858 if (!new_insn) {
859 WARN_FUNC("special: can't find new instruction",
860 special_alt->new_sec,
861 special_alt->new_off);
862 ret = -1;
863 goto out;
864 }
865 }
866
867 if (special_alt->group) {
868 ret = handle_group_alt(file, special_alt, orig_insn,
869 &new_insn);
870 if (ret)
871 goto out;
872 } else if (special_alt->jump_or_nop) {
873 ret = handle_jump_alt(file, special_alt, orig_insn,
874 &new_insn);
875 if (ret)
876 goto out;
877 }
878
879 alt = malloc(sizeof(*alt));
880 if (!alt) {
881 WARN("malloc failed");
882 ret = -1;
883 goto out;
884 }
885
886 alt->insn = new_insn;
887 alt->skip_orig = special_alt->skip_orig;
888 orig_insn->ignore_alts |= special_alt->skip_alt;
889 list_add_tail(&alt->list, &orig_insn->alts);
890
891 list_del(&special_alt->list);
892 free(special_alt);
893 }
894
895 out:
896 return ret;
897 }
898
899 static int add_switch_table(struct objtool_file *file, struct instruction *insn,
900 struct rela *table, struct rela *next_table)
901 {
902 struct rela *rela = table;
903 struct instruction *alt_insn;
904 struct alternative *alt;
905 struct symbol *pfunc = insn->func->pfunc;
906 unsigned int prev_offset = 0;
907
908 list_for_each_entry_from(rela, &table->rela_sec->rela_list, list) {
909 if (rela == next_table)
910 break;
911
912 /* Make sure the switch table entries are consecutive: */
913 if (prev_offset && rela->offset != prev_offset + 8)
914 break;
915
916 /* Detect function pointers from contiguous objects: */
917 if (rela->sym->sec == pfunc->sec &&
918 rela->addend == pfunc->offset)
919 break;
920
921 alt_insn = find_insn(file, rela->sym->sec, rela->addend);
922 if (!alt_insn)
923 break;
924
925 /* Make sure the jmp dest is in the function or subfunction: */
926 if (alt_insn->func->pfunc != pfunc)
927 break;
928
929 alt = malloc(sizeof(*alt));
930 if (!alt) {
931 WARN("malloc failed");
932 return -1;
933 }
934
935 alt->insn = alt_insn;
936 list_add_tail(&alt->list, &insn->alts);
937 prev_offset = rela->offset;
938 }
939
940 if (!prev_offset) {
941 WARN_FUNC("can't find switch jump table",
942 insn->sec, insn->offset);
943 return -1;
944 }
945
946 return 0;
947 }
948
949 /*
950 * find_switch_table() - Given a dynamic jump, find the switch jump table in
951 * .rodata associated with it.
952 *
953 * There are 3 basic patterns:
954 *
955 * 1. jmpq *[rodata addr](,%reg,8)
956 *
957 * This is the most common case by far. It jumps to an address in a simple
958 * jump table which is stored in .rodata.
959 *
960 * 2. jmpq *[rodata addr](%rip)
961 *
962 * This is caused by a rare GCC quirk, currently only seen in three driver
963 * functions in the kernel, only with certain obscure non-distro configs.
964 *
965 * As part of an optimization, GCC makes a copy of an existing switch jump
966 * table, modifies it, and then hard-codes the jump (albeit with an indirect
967 * jump) to use a single entry in the table. The rest of the jump table and
968 * some of its jump targets remain as dead code.
969 *
970 * In such a case we can just crudely ignore all unreachable instruction
971 * warnings for the entire object file. Ideally we would just ignore them
972 * for the function, but that would require redesigning the code quite a
973 * bit. And honestly that's just not worth doing: unreachable instruction
974 * warnings are of questionable value anyway, and this is such a rare issue.
975 *
976 * 3. mov [rodata addr],%reg1
977 * ... some instructions ...
978 * jmpq *(%reg1,%reg2,8)
979 *
980 * This is a fairly uncommon pattern which is new for GCC 6. As of this
981 * writing, there are 11 occurrences of it in the allmodconfig kernel.
982 *
983 * As of GCC 7 there are quite a few more of these and the 'in between' code
984 * is significant. Esp. with KASAN enabled some of the code between the mov
985 * and jmpq uses .rodata itself, which can confuse things.
986 *
987 * TODO: Once we have DWARF CFI and smarter instruction decoding logic,
988 * ensure the same register is used in the mov and jump instructions.
989 *
990 * NOTE: RETPOLINE made it harder still to decode dynamic jumps.
991 */
992 static struct rela *find_switch_table(struct objtool_file *file,
993 struct symbol *func,
994 struct instruction *insn)
995 {
996 struct rela *text_rela, *rodata_rela;
997 struct instruction *orig_insn = insn;
998 struct section *rodata_sec;
999 unsigned long table_offset;
1000
1001 /*
1002 * Backward search using the @first_jump_src links, these help avoid
1003 * much of the 'in between' code. Which avoids us getting confused by
1004 * it.
1005 */
1006 for (;
1007 &insn->list != &file->insn_list &&
1008 insn->sec == func->sec &&
1009 insn->offset >= func->offset;
1010
1011 insn = insn->first_jump_src ?: list_prev_entry(insn, list)) {
1012
1013 if (insn != orig_insn && insn->type == INSN_JUMP_DYNAMIC)
1014 break;
1015
1016 /* allow small jumps within the range */
1017 if (insn->type == INSN_JUMP_UNCONDITIONAL &&
1018 insn->jump_dest &&
1019 (insn->jump_dest->offset <= insn->offset ||
1020 insn->jump_dest->offset > orig_insn->offset))
1021 break;
1022
1023 /* look for a relocation which references .rodata */
1024 text_rela = find_rela_by_dest_range(insn->sec, insn->offset,
1025 insn->len);
1026 if (!text_rela || text_rela->sym->type != STT_SECTION ||
1027 !text_rela->sym->sec->rodata)
1028 continue;
1029
1030 table_offset = text_rela->addend;
1031 rodata_sec = text_rela->sym->sec;
1032
1033 if (text_rela->type == R_X86_64_PC32)
1034 table_offset += 4;
1035
1036 /*
1037 * Make sure the .rodata address isn't associated with a
1038 * symbol. gcc jump tables are anonymous data.
1039 */
1040 if (find_symbol_containing(rodata_sec, table_offset))
1041 continue;
1042
1043 rodata_rela = find_rela_by_dest(rodata_sec, table_offset);
1044 if (rodata_rela) {
1045 /*
1046 * Use of RIP-relative switch jumps is quite rare, and
1047 * indicates a rare GCC quirk/bug which can leave dead
1048 * code behind.
1049 */
1050 if (text_rela->type == R_X86_64_PC32)
1051 file->ignore_unreachables = true;
1052
1053 return rodata_rela;
1054 }
1055 }
1056
1057 return NULL;
1058 }
1059
1060
1061 static int add_func_switch_tables(struct objtool_file *file,
1062 struct symbol *func)
1063 {
1064 struct instruction *insn, *last = NULL, *prev_jump = NULL;
1065 struct rela *rela, *prev_rela = NULL;
1066 int ret;
1067
1068 func_for_each_insn_all(file, func, insn) {
1069 if (!last)
1070 last = insn;
1071
1072 /*
1073 * Store back-pointers for unconditional forward jumps such
1074 * that find_switch_table() can back-track using those and
1075 * avoid some potentially confusing code.
1076 */
1077 if (insn->type == INSN_JUMP_UNCONDITIONAL && insn->jump_dest &&
1078 insn->offset > last->offset &&
1079 insn->jump_dest->offset > insn->offset &&
1080 !insn->jump_dest->first_jump_src) {
1081
1082 insn->jump_dest->first_jump_src = insn;
1083 last = insn->jump_dest;
1084 }
1085
1086 if (insn->type != INSN_JUMP_DYNAMIC)
1087 continue;
1088
1089 rela = find_switch_table(file, func, insn);
1090 if (!rela)
1091 continue;
1092
1093 /*
1094 * We found a switch table, but we don't know yet how big it
1095 * is. Don't add it until we reach the end of the function or
1096 * the beginning of another switch table in the same function.
1097 */
1098 if (prev_jump) {
1099 ret = add_switch_table(file, prev_jump, prev_rela, rela);
1100 if (ret)
1101 return ret;
1102 }
1103
1104 prev_jump = insn;
1105 prev_rela = rela;
1106 }
1107
1108 if (prev_jump) {
1109 ret = add_switch_table(file, prev_jump, prev_rela, NULL);
1110 if (ret)
1111 return ret;
1112 }
1113
1114 return 0;
1115 }
1116
1117 /*
1118 * For some switch statements, gcc generates a jump table in the .rodata
1119 * section which contains a list of addresses within the function to jump to.
1120 * This finds these jump tables and adds them to the insn->alts lists.
1121 */
1122 static int add_switch_table_alts(struct objtool_file *file)
1123 {
1124 struct section *sec;
1125 struct symbol *func;
1126 int ret;
1127
1128 if (!file->rodata)
1129 return 0;
1130
1131 for_each_sec(file, sec) {
1132 list_for_each_entry(func, &sec->symbol_list, list) {
1133 if (func->type != STT_FUNC)
1134 continue;
1135
1136 ret = add_func_switch_tables(file, func);
1137 if (ret)
1138 return ret;
1139 }
1140 }
1141
1142 return 0;
1143 }
1144
1145 static int read_unwind_hints(struct objtool_file *file)
1146 {
1147 struct section *sec, *relasec;
1148 struct rela *rela;
1149 struct unwind_hint *hint;
1150 struct instruction *insn;
1151 struct cfi_reg *cfa;
1152 int i;
1153
1154 sec = find_section_by_name(file->elf, ".discard.unwind_hints");
1155 if (!sec)
1156 return 0;
1157
1158 relasec = sec->rela;
1159 if (!relasec) {
1160 WARN("missing .rela.discard.unwind_hints section");
1161 return -1;
1162 }
1163
1164 if (sec->len % sizeof(struct unwind_hint)) {
1165 WARN("struct unwind_hint size mismatch");
1166 return -1;
1167 }
1168
1169 file->hints = true;
1170
1171 for (i = 0; i < sec->len / sizeof(struct unwind_hint); i++) {
1172 hint = (struct unwind_hint *)sec->data->d_buf + i;
1173
1174 rela = find_rela_by_dest(sec, i * sizeof(*hint));
1175 if (!rela) {
1176 WARN("can't find rela for unwind_hints[%d]", i);
1177 return -1;
1178 }
1179
1180 insn = find_insn(file, rela->sym->sec, rela->addend);
1181 if (!insn) {
1182 WARN("can't find insn for unwind_hints[%d]", i);
1183 return -1;
1184 }
1185
1186 cfa = &insn->state.cfa;
1187
1188 if (hint->type == UNWIND_HINT_TYPE_SAVE) {
1189 insn->save = true;
1190 continue;
1191
1192 } else if (hint->type == UNWIND_HINT_TYPE_RESTORE) {
1193 insn->restore = true;
1194 insn->hint = true;
1195 continue;
1196 }
1197
1198 insn->hint = true;
1199
1200 switch (hint->sp_reg) {
1201 case ORC_REG_UNDEFINED:
1202 cfa->base = CFI_UNDEFINED;
1203 break;
1204 case ORC_REG_SP:
1205 cfa->base = CFI_SP;
1206 break;
1207 case ORC_REG_BP:
1208 cfa->base = CFI_BP;
1209 break;
1210 case ORC_REG_SP_INDIRECT:
1211 cfa->base = CFI_SP_INDIRECT;
1212 break;
1213 case ORC_REG_R10:
1214 cfa->base = CFI_R10;
1215 break;
1216 case ORC_REG_R13:
1217 cfa->base = CFI_R13;
1218 break;
1219 case ORC_REG_DI:
1220 cfa->base = CFI_DI;
1221 break;
1222 case ORC_REG_DX:
1223 cfa->base = CFI_DX;
1224 break;
1225 default:
1226 WARN_FUNC("unsupported unwind_hint sp base reg %d",
1227 insn->sec, insn->offset, hint->sp_reg);
1228 return -1;
1229 }
1230
1231 cfa->offset = hint->sp_offset;
1232 insn->state.type = hint->type;
1233 insn->state.end = hint->end;
1234 }
1235
1236 return 0;
1237 }
1238
1239 static int read_retpoline_hints(struct objtool_file *file)
1240 {
1241 struct section *sec;
1242 struct instruction *insn;
1243 struct rela *rela;
1244
1245 sec = find_section_by_name(file->elf, ".rela.discard.retpoline_safe");
1246 if (!sec)
1247 return 0;
1248
1249 list_for_each_entry(rela, &sec->rela_list, list) {
1250 if (rela->sym->type != STT_SECTION) {
1251 WARN("unexpected relocation symbol type in %s", sec->name);
1252 return -1;
1253 }
1254
1255 insn = find_insn(file, rela->sym->sec, rela->addend);
1256 if (!insn) {
1257 WARN("bad .discard.retpoline_safe entry");
1258 return -1;
1259 }
1260
1261 if (insn->type != INSN_JUMP_DYNAMIC &&
1262 insn->type != INSN_CALL_DYNAMIC) {
1263 WARN_FUNC("retpoline_safe hint not an indirect jump/call",
1264 insn->sec, insn->offset);
1265 return -1;
1266 }
1267
1268 insn->retpoline_safe = true;
1269 }
1270
1271 return 0;
1272 }
1273
1274 static void mark_rodata(struct objtool_file *file)
1275 {
1276 struct section *sec;
1277 bool found = false;
1278
1279 /*
1280 * This searches for the .rodata section or multiple .rodata.func_name
1281 * sections if -fdata-sections is being used. The .str.1.1 and .str.1.8
1282 * rodata sections are ignored as they don't contain jump tables.
1283 */
1284 for_each_sec(file, sec) {
1285 if (!strncmp(sec->name, ".rodata", 7) &&
1286 !strstr(sec->name, ".str1.")) {
1287 sec->rodata = true;
1288 found = true;
1289 }
1290 }
1291
1292 file->rodata = found;
1293 }
1294
1295 static int decode_sections(struct objtool_file *file)
1296 {
1297 int ret;
1298
1299 mark_rodata(file);
1300
1301 ret = decode_instructions(file);
1302 if (ret)
1303 return ret;
1304
1305 ret = add_dead_ends(file);
1306 if (ret)
1307 return ret;
1308
1309 add_ignores(file);
1310 add_uaccess_safe(file);
1311
1312 ret = add_ignore_alternatives(file);
1313 if (ret)
1314 return ret;
1315
1316 ret = add_jump_destinations(file);
1317 if (ret)
1318 return ret;
1319
1320 ret = add_special_section_alts(file);
1321 if (ret)
1322 return ret;
1323
1324 ret = add_call_destinations(file);
1325 if (ret)
1326 return ret;
1327
1328 ret = add_switch_table_alts(file);
1329 if (ret)
1330 return ret;
1331
1332 ret = read_unwind_hints(file);
1333 if (ret)
1334 return ret;
1335
1336 ret = read_retpoline_hints(file);
1337 if (ret)
1338 return ret;
1339
1340 return 0;
1341 }
1342
1343 static bool is_fentry_call(struct instruction *insn)
1344 {
1345 if (insn->type == INSN_CALL &&
1346 insn->call_dest->type == STT_NOTYPE &&
1347 !strcmp(insn->call_dest->name, "__fentry__"))
1348 return true;
1349
1350 return false;
1351 }
1352
1353 static bool has_modified_stack_frame(struct insn_state *state)
1354 {
1355 int i;
1356
1357 if (state->cfa.base != initial_func_cfi.cfa.base ||
1358 state->cfa.offset != initial_func_cfi.cfa.offset ||
1359 state->stack_size != initial_func_cfi.cfa.offset ||
1360 state->drap)
1361 return true;
1362
1363 for (i = 0; i < CFI_NUM_REGS; i++)
1364 if (state->regs[i].base != initial_func_cfi.regs[i].base ||
1365 state->regs[i].offset != initial_func_cfi.regs[i].offset)
1366 return true;
1367
1368 return false;
1369 }
1370
1371 static bool has_valid_stack_frame(struct insn_state *state)
1372 {
1373 if (state->cfa.base == CFI_BP && state->regs[CFI_BP].base == CFI_CFA &&
1374 state->regs[CFI_BP].offset == -16)
1375 return true;
1376
1377 if (state->drap && state->regs[CFI_BP].base == CFI_BP)
1378 return true;
1379
1380 return false;
1381 }
1382
1383 static int update_insn_state_regs(struct instruction *insn, struct insn_state *state)
1384 {
1385 struct cfi_reg *cfa = &state->cfa;
1386 struct stack_op *op = &insn->stack_op;
1387
1388 if (cfa->base != CFI_SP)
1389 return 0;
1390
1391 /* push */
1392 if (op->dest.type == OP_DEST_PUSH || op->dest.type == OP_DEST_PUSHF)
1393 cfa->offset += 8;
1394
1395 /* pop */
1396 if (op->src.type == OP_SRC_POP || op->src.type == OP_SRC_POPF)
1397 cfa->offset -= 8;
1398
1399 /* add immediate to sp */
1400 if (op->dest.type == OP_DEST_REG && op->src.type == OP_SRC_ADD &&
1401 op->dest.reg == CFI_SP && op->src.reg == CFI_SP)
1402 cfa->offset -= op->src.offset;
1403
1404 return 0;
1405 }
1406
1407 static void save_reg(struct insn_state *state, unsigned char reg, int base,
1408 int offset)
1409 {
1410 if (arch_callee_saved_reg(reg) &&
1411 state->regs[reg].base == CFI_UNDEFINED) {
1412 state->regs[reg].base = base;
1413 state->regs[reg].offset = offset;
1414 }
1415 }
1416
1417 static void restore_reg(struct insn_state *state, unsigned char reg)
1418 {
1419 state->regs[reg].base = CFI_UNDEFINED;
1420 state->regs[reg].offset = 0;
1421 }
1422
1423 /*
1424 * A note about DRAP stack alignment:
1425 *
1426 * GCC has the concept of a DRAP register, which is used to help keep track of
1427 * the stack pointer when aligning the stack. r10 or r13 is used as the DRAP
1428 * register. The typical DRAP pattern is:
1429 *
1430 * 4c 8d 54 24 08 lea 0x8(%rsp),%r10
1431 * 48 83 e4 c0 and $0xffffffffffffffc0,%rsp
1432 * 41 ff 72 f8 pushq -0x8(%r10)
1433 * 55 push %rbp
1434 * 48 89 e5 mov %rsp,%rbp
1435 * (more pushes)
1436 * 41 52 push %r10
1437 * ...
1438 * 41 5a pop %r10
1439 * (more pops)
1440 * 5d pop %rbp
1441 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1442 * c3 retq
1443 *
1444 * There are some variations in the epilogues, like:
1445 *
1446 * 5b pop %rbx
1447 * 41 5a pop %r10
1448 * 41 5c pop %r12
1449 * 41 5d pop %r13
1450 * 41 5e pop %r14
1451 * c9 leaveq
1452 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1453 * c3 retq
1454 *
1455 * and:
1456 *
1457 * 4c 8b 55 e8 mov -0x18(%rbp),%r10
1458 * 48 8b 5d e0 mov -0x20(%rbp),%rbx
1459 * 4c 8b 65 f0 mov -0x10(%rbp),%r12
1460 * 4c 8b 6d f8 mov -0x8(%rbp),%r13
1461 * c9 leaveq
1462 * 49 8d 62 f8 lea -0x8(%r10),%rsp
1463 * c3 retq
1464 *
1465 * Sometimes r13 is used as the DRAP register, in which case it's saved and
1466 * restored beforehand:
1467 *
1468 * 41 55 push %r13
1469 * 4c 8d 6c 24 10 lea 0x10(%rsp),%r13
1470 * 48 83 e4 f0 and $0xfffffffffffffff0,%rsp
1471 * ...
1472 * 49 8d 65 f0 lea -0x10(%r13),%rsp
1473 * 41 5d pop %r13
1474 * c3 retq
1475 */
1476 static int update_insn_state(struct instruction *insn, struct insn_state *state)
1477 {
1478 struct stack_op *op = &insn->stack_op;
1479 struct cfi_reg *cfa = &state->cfa;
1480 struct cfi_reg *regs = state->regs;
1481
1482 /* stack operations don't make sense with an undefined CFA */
1483 if (cfa->base == CFI_UNDEFINED) {
1484 if (insn->func) {
1485 WARN_FUNC("undefined stack state", insn->sec, insn->offset);
1486 return -1;
1487 }
1488 return 0;
1489 }
1490
1491 if (state->type == ORC_TYPE_REGS || state->type == ORC_TYPE_REGS_IRET)
1492 return update_insn_state_regs(insn, state);
1493
1494 switch (op->dest.type) {
1495
1496 case OP_DEST_REG:
1497 switch (op->src.type) {
1498
1499 case OP_SRC_REG:
1500 if (op->src.reg == CFI_SP && op->dest.reg == CFI_BP &&
1501 cfa->base == CFI_SP &&
1502 regs[CFI_BP].base == CFI_CFA &&
1503 regs[CFI_BP].offset == -cfa->offset) {
1504
1505 /* mov %rsp, %rbp */
1506 cfa->base = op->dest.reg;
1507 state->bp_scratch = false;
1508 }
1509
1510 else if (op->src.reg == CFI_SP &&
1511 op->dest.reg == CFI_BP && state->drap) {
1512
1513 /* drap: mov %rsp, %rbp */
1514 regs[CFI_BP].base = CFI_BP;
1515 regs[CFI_BP].offset = -state->stack_size;
1516 state->bp_scratch = false;
1517 }
1518
1519 else if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1520
1521 /*
1522 * mov %rsp, %reg
1523 *
1524 * This is needed for the rare case where GCC
1525 * does:
1526 *
1527 * mov %rsp, %rax
1528 * ...
1529 * mov %rax, %rsp
1530 */
1531 state->vals[op->dest.reg].base = CFI_CFA;
1532 state->vals[op->dest.reg].offset = -state->stack_size;
1533 }
1534
1535 else if (op->src.reg == CFI_BP && op->dest.reg == CFI_SP &&
1536 cfa->base == CFI_BP) {
1537
1538 /*
1539 * mov %rbp, %rsp
1540 *
1541 * Restore the original stack pointer (Clang).
1542 */
1543 state->stack_size = -state->regs[CFI_BP].offset;
1544 }
1545
1546 else if (op->dest.reg == cfa->base) {
1547
1548 /* mov %reg, %rsp */
1549 if (cfa->base == CFI_SP &&
1550 state->vals[op->src.reg].base == CFI_CFA) {
1551
1552 /*
1553 * This is needed for the rare case
1554 * where GCC does something dumb like:
1555 *
1556 * lea 0x8(%rsp), %rcx
1557 * ...
1558 * mov %rcx, %rsp
1559 */
1560 cfa->offset = -state->vals[op->src.reg].offset;
1561 state->stack_size = cfa->offset;
1562
1563 } else {
1564 cfa->base = CFI_UNDEFINED;
1565 cfa->offset = 0;
1566 }
1567 }
1568
1569 break;
1570
1571 case OP_SRC_ADD:
1572 if (op->dest.reg == CFI_SP && op->src.reg == CFI_SP) {
1573
1574 /* add imm, %rsp */
1575 state->stack_size -= op->src.offset;
1576 if (cfa->base == CFI_SP)
1577 cfa->offset -= op->src.offset;
1578 break;
1579 }
1580
1581 if (op->dest.reg == CFI_SP && op->src.reg == CFI_BP) {
1582
1583 /* lea disp(%rbp), %rsp */
1584 state->stack_size = -(op->src.offset + regs[CFI_BP].offset);
1585 break;
1586 }
1587
1588 if (op->src.reg == CFI_SP && cfa->base == CFI_SP) {
1589
1590 /* drap: lea disp(%rsp), %drap */
1591 state->drap_reg = op->dest.reg;
1592
1593 /*
1594 * lea disp(%rsp), %reg
1595 *
1596 * This is needed for the rare case where GCC
1597 * does something dumb like:
1598 *
1599 * lea 0x8(%rsp), %rcx
1600 * ...
1601 * mov %rcx, %rsp
1602 */
1603 state->vals[op->dest.reg].base = CFI_CFA;
1604 state->vals[op->dest.reg].offset = \
1605 -state->stack_size + op->src.offset;
1606
1607 break;
1608 }
1609
1610 if (state->drap && op->dest.reg == CFI_SP &&
1611 op->src.reg == state->drap_reg) {
1612
1613 /* drap: lea disp(%drap), %rsp */
1614 cfa->base = CFI_SP;
1615 cfa->offset = state->stack_size = -op->src.offset;
1616 state->drap_reg = CFI_UNDEFINED;
1617 state->drap = false;
1618 break;
1619 }
1620
1621 if (op->dest.reg == state->cfa.base) {
1622 WARN_FUNC("unsupported stack register modification",
1623 insn->sec, insn->offset);
1624 return -1;
1625 }
1626
1627 break;
1628
1629 case OP_SRC_AND:
1630 if (op->dest.reg != CFI_SP ||
1631 (state->drap_reg != CFI_UNDEFINED && cfa->base != CFI_SP) ||
1632 (state->drap_reg == CFI_UNDEFINED && cfa->base != CFI_BP)) {
1633 WARN_FUNC("unsupported stack pointer realignment",
1634 insn->sec, insn->offset);
1635 return -1;
1636 }
1637
1638 if (state->drap_reg != CFI_UNDEFINED) {
1639 /* drap: and imm, %rsp */
1640 cfa->base = state->drap_reg;
1641 cfa->offset = state->stack_size = 0;
1642 state->drap = true;
1643 }
1644
1645 /*
1646 * Older versions of GCC (4.8ish) realign the stack
1647 * without DRAP, with a frame pointer.
1648 */
1649
1650 break;
1651
1652 case OP_SRC_POP:
1653 case OP_SRC_POPF:
1654 if (!state->drap && op->dest.type == OP_DEST_REG &&
1655 op->dest.reg == cfa->base) {
1656
1657 /* pop %rbp */
1658 cfa->base = CFI_SP;
1659 }
1660
1661 if (state->drap && cfa->base == CFI_BP_INDIRECT &&
1662 op->dest.type == OP_DEST_REG &&
1663 op->dest.reg == state->drap_reg &&
1664 state->drap_offset == -state->stack_size) {
1665
1666 /* drap: pop %drap */
1667 cfa->base = state->drap_reg;
1668 cfa->offset = 0;
1669 state->drap_offset = -1;
1670
1671 } else if (regs[op->dest.reg].offset == -state->stack_size) {
1672
1673 /* pop %reg */
1674 restore_reg(state, op->dest.reg);
1675 }
1676
1677 state->stack_size -= 8;
1678 if (cfa->base == CFI_SP)
1679 cfa->offset -= 8;
1680
1681 break;
1682
1683 case OP_SRC_REG_INDIRECT:
1684 if (state->drap && op->src.reg == CFI_BP &&
1685 op->src.offset == state->drap_offset) {
1686
1687 /* drap: mov disp(%rbp), %drap */
1688 cfa->base = state->drap_reg;
1689 cfa->offset = 0;
1690 state->drap_offset = -1;
1691 }
1692
1693 if (state->drap && op->src.reg == CFI_BP &&
1694 op->src.offset == regs[op->dest.reg].offset) {
1695
1696 /* drap: mov disp(%rbp), %reg */
1697 restore_reg(state, op->dest.reg);
1698
1699 } else if (op->src.reg == cfa->base &&
1700 op->src.offset == regs[op->dest.reg].offset + cfa->offset) {
1701
1702 /* mov disp(%rbp), %reg */
1703 /* mov disp(%rsp), %reg */
1704 restore_reg(state, op->dest.reg);
1705 }
1706
1707 break;
1708
1709 default:
1710 WARN_FUNC("unknown stack-related instruction",
1711 insn->sec, insn->offset);
1712 return -1;
1713 }
1714
1715 break;
1716
1717 case OP_DEST_PUSH:
1718 case OP_DEST_PUSHF:
1719 state->stack_size += 8;
1720 if (cfa->base == CFI_SP)
1721 cfa->offset += 8;
1722
1723 if (op->src.type != OP_SRC_REG)
1724 break;
1725
1726 if (state->drap) {
1727 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1728
1729 /* drap: push %drap */
1730 cfa->base = CFI_BP_INDIRECT;
1731 cfa->offset = -state->stack_size;
1732
1733 /* save drap so we know when to restore it */
1734 state->drap_offset = -state->stack_size;
1735
1736 } else if (op->src.reg == CFI_BP && cfa->base == state->drap_reg) {
1737
1738 /* drap: push %rbp */
1739 state->stack_size = 0;
1740
1741 } else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1742
1743 /* drap: push %reg */
1744 save_reg(state, op->src.reg, CFI_BP, -state->stack_size);
1745 }
1746
1747 } else {
1748
1749 /* push %reg */
1750 save_reg(state, op->src.reg, CFI_CFA, -state->stack_size);
1751 }
1752
1753 /* detect when asm code uses rbp as a scratch register */
1754 if (!no_fp && insn->func && op->src.reg == CFI_BP &&
1755 cfa->base != CFI_BP)
1756 state->bp_scratch = true;
1757 break;
1758
1759 case OP_DEST_REG_INDIRECT:
1760
1761 if (state->drap) {
1762 if (op->src.reg == cfa->base && op->src.reg == state->drap_reg) {
1763
1764 /* drap: mov %drap, disp(%rbp) */
1765 cfa->base = CFI_BP_INDIRECT;
1766 cfa->offset = op->dest.offset;
1767
1768 /* save drap offset so we know when to restore it */
1769 state->drap_offset = op->dest.offset;
1770 }
1771
1772 else if (regs[op->src.reg].base == CFI_UNDEFINED) {
1773
1774 /* drap: mov reg, disp(%rbp) */
1775 save_reg(state, op->src.reg, CFI_BP, op->dest.offset);
1776 }
1777
1778 } else if (op->dest.reg == cfa->base) {
1779
1780 /* mov reg, disp(%rbp) */
1781 /* mov reg, disp(%rsp) */
1782 save_reg(state, op->src.reg, CFI_CFA,
1783 op->dest.offset - state->cfa.offset);
1784 }
1785
1786 break;
1787
1788 case OP_DEST_LEAVE:
1789 if ((!state->drap && cfa->base != CFI_BP) ||
1790 (state->drap && cfa->base != state->drap_reg)) {
1791 WARN_FUNC("leave instruction with modified stack frame",
1792 insn->sec, insn->offset);
1793 return -1;
1794 }
1795
1796 /* leave (mov %rbp, %rsp; pop %rbp) */
1797
1798 state->stack_size = -state->regs[CFI_BP].offset - 8;
1799 restore_reg(state, CFI_BP);
1800
1801 if (!state->drap) {
1802 cfa->base = CFI_SP;
1803 cfa->offset -= 8;
1804 }
1805
1806 break;
1807
1808 case OP_DEST_MEM:
1809 if (op->src.type != OP_SRC_POP && op->src.type != OP_SRC_POPF) {
1810 WARN_FUNC("unknown stack-related memory operation",
1811 insn->sec, insn->offset);
1812 return -1;
1813 }
1814
1815 /* pop mem */
1816 state->stack_size -= 8;
1817 if (cfa->base == CFI_SP)
1818 cfa->offset -= 8;
1819
1820 break;
1821
1822 default:
1823 WARN_FUNC("unknown stack-related instruction",
1824 insn->sec, insn->offset);
1825 return -1;
1826 }
1827
1828 return 0;
1829 }
1830
1831 static bool insn_state_match(struct instruction *insn, struct insn_state *state)
1832 {
1833 struct insn_state *state1 = &insn->state, *state2 = state;
1834 int i;
1835
1836 if (memcmp(&state1->cfa, &state2->cfa, sizeof(state1->cfa))) {
1837 WARN_FUNC("stack state mismatch: cfa1=%d%+d cfa2=%d%+d",
1838 insn->sec, insn->offset,
1839 state1->cfa.base, state1->cfa.offset,
1840 state2->cfa.base, state2->cfa.offset);
1841
1842 } else if (memcmp(&state1->regs, &state2->regs, sizeof(state1->regs))) {
1843 for (i = 0; i < CFI_NUM_REGS; i++) {
1844 if (!memcmp(&state1->regs[i], &state2->regs[i],
1845 sizeof(struct cfi_reg)))
1846 continue;
1847
1848 WARN_FUNC("stack state mismatch: reg1[%d]=%d%+d reg2[%d]=%d%+d",
1849 insn->sec, insn->offset,
1850 i, state1->regs[i].base, state1->regs[i].offset,
1851 i, state2->regs[i].base, state2->regs[i].offset);
1852 break;
1853 }
1854
1855 } else if (state1->type != state2->type) {
1856 WARN_FUNC("stack state mismatch: type1=%d type2=%d",
1857 insn->sec, insn->offset, state1->type, state2->type);
1858
1859 } else if (state1->drap != state2->drap ||
1860 (state1->drap && state1->drap_reg != state2->drap_reg) ||
1861 (state1->drap && state1->drap_offset != state2->drap_offset)) {
1862 WARN_FUNC("stack state mismatch: drap1=%d(%d,%d) drap2=%d(%d,%d)",
1863 insn->sec, insn->offset,
1864 state1->drap, state1->drap_reg, state1->drap_offset,
1865 state2->drap, state2->drap_reg, state2->drap_offset);
1866
1867 } else
1868 return true;
1869
1870 return false;
1871 }
1872
1873 static inline bool func_uaccess_safe(struct symbol *func)
1874 {
1875 if (func)
1876 return func->alias->uaccess_safe;
1877
1878 return false;
1879 }
1880
1881 static inline const char *insn_dest_name(struct instruction *insn)
1882 {
1883 if (insn->call_dest)
1884 return insn->call_dest->name;
1885
1886 return "{dynamic}";
1887 }
1888
1889 static int validate_call(struct instruction *insn, struct insn_state *state)
1890 {
1891 if (state->uaccess && !func_uaccess_safe(insn->call_dest)) {
1892 WARN_FUNC("call to %s() with UACCESS enabled",
1893 insn->sec, insn->offset, insn_dest_name(insn));
1894 return 1;
1895 }
1896
1897 if (state->df) {
1898 WARN_FUNC("call to %s() with DF set",
1899 insn->sec, insn->offset, insn_dest_name(insn));
1900 return 1;
1901 }
1902
1903 return 0;
1904 }
1905
1906 static int validate_sibling_call(struct instruction *insn, struct insn_state *state)
1907 {
1908 if (has_modified_stack_frame(state)) {
1909 WARN_FUNC("sibling call from callable instruction with modified stack frame",
1910 insn->sec, insn->offset);
1911 return 1;
1912 }
1913
1914 return validate_call(insn, state);
1915 }
1916
1917 /*
1918 * Follow the branch starting at the given instruction, and recursively follow
1919 * any other branches (jumps). Meanwhile, track the frame pointer state at
1920 * each instruction and validate all the rules described in
1921 * tools/objtool/Documentation/stack-validation.txt.
1922 */
1923 static int validate_branch(struct objtool_file *file, struct instruction *first,
1924 struct insn_state state)
1925 {
1926 struct alternative *alt;
1927 struct instruction *insn, *next_insn;
1928 struct section *sec;
1929 struct symbol *func = NULL;
1930 int ret;
1931
1932 insn = first;
1933 sec = insn->sec;
1934
1935 if (insn->alt_group && list_empty(&insn->alts)) {
1936 WARN_FUNC("don't know how to handle branch to middle of alternative instruction group",
1937 sec, insn->offset);
1938 return 1;
1939 }
1940
1941 while (1) {
1942 next_insn = next_insn_same_sec(file, insn);
1943
1944 if (file->c_file && func && insn->func && func != insn->func->pfunc) {
1945 WARN("%s() falls through to next function %s()",
1946 func->name, insn->func->name);
1947 return 1;
1948 }
1949
1950 if (insn->func)
1951 func = insn->func->pfunc;
1952
1953 if (func && insn->ignore) {
1954 WARN_FUNC("BUG: why am I validating an ignored function?",
1955 sec, insn->offset);
1956 return 1;
1957 }
1958
1959 if (insn->visited) {
1960 if (!insn->hint && !insn_state_match(insn, &state))
1961 return 1;
1962
1963 /* If we were here with AC=0, but now have AC=1, go again */
1964 if (insn->state.uaccess || !state.uaccess)
1965 return 0;
1966 }
1967
1968 if (insn->hint) {
1969 if (insn->restore) {
1970 struct instruction *save_insn, *i;
1971
1972 i = insn;
1973 save_insn = NULL;
1974 func_for_each_insn_continue_reverse(file, insn->func, i) {
1975 if (i->save) {
1976 save_insn = i;
1977 break;
1978 }
1979 }
1980
1981 if (!save_insn) {
1982 WARN_FUNC("no corresponding CFI save for CFI restore",
1983 sec, insn->offset);
1984 return 1;
1985 }
1986
1987 if (!save_insn->visited) {
1988 /*
1989 * Oops, no state to copy yet.
1990 * Hopefully we can reach this
1991 * instruction from another branch
1992 * after the save insn has been
1993 * visited.
1994 */
1995 if (insn == first)
1996 return 0;
1997
1998 WARN_FUNC("objtool isn't smart enough to handle this CFI save/restore combo",
1999 sec, insn->offset);
2000 return 1;
2001 }
2002
2003 insn->state = save_insn->state;
2004 }
2005
2006 state = insn->state;
2007
2008 } else
2009 insn->state = state;
2010
2011 insn->visited = true;
2012
2013 if (!insn->ignore_alts) {
2014 bool skip_orig = false;
2015
2016 list_for_each_entry(alt, &insn->alts, list) {
2017 if (alt->skip_orig)
2018 skip_orig = true;
2019
2020 ret = validate_branch(file, alt->insn, state);
2021 if (ret) {
2022 if (backtrace)
2023 BT_FUNC("(alt)", insn);
2024 return ret;
2025 }
2026 }
2027
2028 if (skip_orig)
2029 return 0;
2030 }
2031
2032 switch (insn->type) {
2033
2034 case INSN_RETURN:
2035 if (state.uaccess && !func_uaccess_safe(func)) {
2036 WARN_FUNC("return with UACCESS enabled", sec, insn->offset);
2037 return 1;
2038 }
2039
2040 if (!state.uaccess && func_uaccess_safe(func)) {
2041 WARN_FUNC("return with UACCESS disabled from a UACCESS-safe function", sec, insn->offset);
2042 return 1;
2043 }
2044
2045 if (state.df) {
2046 WARN_FUNC("return with DF set", sec, insn->offset);
2047 return 1;
2048 }
2049
2050 if (func && has_modified_stack_frame(&state)) {
2051 WARN_FUNC("return with modified stack frame",
2052 sec, insn->offset);
2053 return 1;
2054 }
2055
2056 if (state.bp_scratch) {
2057 WARN("%s uses BP as a scratch register",
2058 insn->func->name);
2059 return 1;
2060 }
2061
2062 return 0;
2063
2064 case INSN_CALL:
2065 case INSN_CALL_DYNAMIC:
2066 ret = validate_call(insn, &state);
2067 if (ret)
2068 return ret;
2069
2070 if (insn->type == INSN_CALL) {
2071 if (is_fentry_call(insn))
2072 break;
2073
2074 ret = dead_end_function(file, insn->call_dest);
2075 if (ret == 1)
2076 return 0;
2077 if (ret == -1)
2078 return 1;
2079 }
2080
2081 if (!no_fp && func && !has_valid_stack_frame(&state)) {
2082 WARN_FUNC("call without frame pointer save/setup",
2083 sec, insn->offset);
2084 return 1;
2085 }
2086 break;
2087
2088 case INSN_JUMP_CONDITIONAL:
2089 case INSN_JUMP_UNCONDITIONAL:
2090 if (func && !insn->jump_dest) {
2091 ret = validate_sibling_call(insn, &state);
2092 if (ret)
2093 return ret;
2094
2095 } else if (insn->jump_dest &&
2096 (!func || !insn->jump_dest->func ||
2097 insn->jump_dest->func->pfunc == func)) {
2098 ret = validate_branch(file, insn->jump_dest,
2099 state);
2100 if (ret) {
2101 if (backtrace)
2102 BT_FUNC("(branch)", insn);
2103 return ret;
2104 }
2105 }
2106
2107 if (insn->type == INSN_JUMP_UNCONDITIONAL)
2108 return 0;
2109
2110 break;
2111
2112 case INSN_JUMP_DYNAMIC:
2113 if (func && list_empty(&insn->alts)) {
2114 ret = validate_sibling_call(insn, &state);
2115 if (ret)
2116 return ret;
2117 }
2118
2119 return 0;
2120
2121 case INSN_CONTEXT_SWITCH:
2122 if (func && (!next_insn || !next_insn->hint)) {
2123 WARN_FUNC("unsupported instruction in callable function",
2124 sec, insn->offset);
2125 return 1;
2126 }
2127 return 0;
2128
2129 case INSN_STACK:
2130 if (update_insn_state(insn, &state))
2131 return 1;
2132
2133 if (insn->stack_op.dest.type == OP_DEST_PUSHF) {
2134 if (!state.uaccess_stack) {
2135 state.uaccess_stack = 1;
2136 } else if (state.uaccess_stack >> 31) {
2137 WARN_FUNC("PUSHF stack exhausted", sec, insn->offset);
2138 return 1;
2139 }
2140 state.uaccess_stack <<= 1;
2141 state.uaccess_stack |= state.uaccess;
2142 }
2143
2144 if (insn->stack_op.src.type == OP_SRC_POPF) {
2145 if (state.uaccess_stack) {
2146 state.uaccess = state.uaccess_stack & 1;
2147 state.uaccess_stack >>= 1;
2148 if (state.uaccess_stack == 1)
2149 state.uaccess_stack = 0;
2150 }
2151 }
2152
2153 break;
2154
2155 case INSN_STAC:
2156 if (state.uaccess) {
2157 WARN_FUNC("recursive UACCESS enable", sec, insn->offset);
2158 return 1;
2159 }
2160
2161 state.uaccess = true;
2162 break;
2163
2164 case INSN_CLAC:
2165 if (!state.uaccess && insn->func) {
2166 WARN_FUNC("redundant UACCESS disable", sec, insn->offset);
2167 return 1;
2168 }
2169
2170 if (func_uaccess_safe(func) && !state.uaccess_stack) {
2171 WARN_FUNC("UACCESS-safe disables UACCESS", sec, insn->offset);
2172 return 1;
2173 }
2174
2175 state.uaccess = false;
2176 break;
2177
2178 case INSN_STD:
2179 if (state.df)
2180 WARN_FUNC("recursive STD", sec, insn->offset);
2181
2182 state.df = true;
2183 break;
2184
2185 case INSN_CLD:
2186 if (!state.df && insn->func)
2187 WARN_FUNC("redundant CLD", sec, insn->offset);
2188
2189 state.df = false;
2190 break;
2191
2192 default:
2193 break;
2194 }
2195
2196 if (insn->dead_end)
2197 return 0;
2198
2199 if (!next_insn) {
2200 if (state.cfa.base == CFI_UNDEFINED)
2201 return 0;
2202 WARN("%s: unexpected end of section", sec->name);
2203 return 1;
2204 }
2205
2206 insn = next_insn;
2207 }
2208
2209 return 0;
2210 }
2211
2212 static int validate_unwind_hints(struct objtool_file *file)
2213 {
2214 struct instruction *insn;
2215 int ret, warnings = 0;
2216 struct insn_state state;
2217
2218 if (!file->hints)
2219 return 0;
2220
2221 clear_insn_state(&state);
2222
2223 for_each_insn(file, insn) {
2224 if (insn->hint && !insn->visited) {
2225 ret = validate_branch(file, insn, state);
2226 if (ret && backtrace)
2227 BT_FUNC("<=== (hint)", insn);
2228 warnings += ret;
2229 }
2230 }
2231
2232 return warnings;
2233 }
2234
2235 static int validate_retpoline(struct objtool_file *file)
2236 {
2237 struct instruction *insn;
2238 int warnings = 0;
2239
2240 for_each_insn(file, insn) {
2241 if (insn->type != INSN_JUMP_DYNAMIC &&
2242 insn->type != INSN_CALL_DYNAMIC)
2243 continue;
2244
2245 if (insn->retpoline_safe)
2246 continue;
2247
2248 /*
2249 * .init.text code is ran before userspace and thus doesn't
2250 * strictly need retpolines, except for modules which are
2251 * loaded late, they very much do need retpoline in their
2252 * .init.text
2253 */
2254 if (!strcmp(insn->sec->name, ".init.text") && !module)
2255 continue;
2256
2257 WARN_FUNC("indirect %s found in RETPOLINE build",
2258 insn->sec, insn->offset,
2259 insn->type == INSN_JUMP_DYNAMIC ? "jump" : "call");
2260
2261 warnings++;
2262 }
2263
2264 return warnings;
2265 }
2266
2267 static bool is_kasan_insn(struct instruction *insn)
2268 {
2269 return (insn->type == INSN_CALL &&
2270 !strcmp(insn->call_dest->name, "__asan_handle_no_return"));
2271 }
2272
2273 static bool is_ubsan_insn(struct instruction *insn)
2274 {
2275 return (insn->type == INSN_CALL &&
2276 !strcmp(insn->call_dest->name,
2277 "__ubsan_handle_builtin_unreachable"));
2278 }
2279
2280 static bool ignore_unreachable_insn(struct instruction *insn)
2281 {
2282 int i;
2283
2284 if (insn->ignore || insn->type == INSN_NOP)
2285 return true;
2286
2287 /*
2288 * Ignore any unused exceptions. This can happen when a whitelisted
2289 * function has an exception table entry.
2290 *
2291 * Also ignore alternative replacement instructions. This can happen
2292 * when a whitelisted function uses one of the ALTERNATIVE macros.
2293 */
2294 if (!strcmp(insn->sec->name, ".fixup") ||
2295 !strcmp(insn->sec->name, ".altinstr_replacement") ||
2296 !strcmp(insn->sec->name, ".altinstr_aux"))
2297 return true;
2298
2299 /*
2300 * Check if this (or a subsequent) instruction is related to
2301 * CONFIG_UBSAN or CONFIG_KASAN.
2302 *
2303 * End the search at 5 instructions to avoid going into the weeds.
2304 */
2305 if (!insn->func)
2306 return false;
2307 for (i = 0; i < 5; i++) {
2308
2309 if (is_kasan_insn(insn) || is_ubsan_insn(insn))
2310 return true;
2311
2312 if (insn->type == INSN_JUMP_UNCONDITIONAL) {
2313 if (insn->jump_dest &&
2314 insn->jump_dest->func == insn->func) {
2315 insn = insn->jump_dest;
2316 continue;
2317 }
2318
2319 break;
2320 }
2321
2322 if (insn->offset + insn->len >= insn->func->offset + insn->func->len)
2323 break;
2324
2325 insn = list_next_entry(insn, list);
2326 }
2327
2328 return false;
2329 }
2330
2331 static int validate_functions(struct objtool_file *file)
2332 {
2333 struct section *sec;
2334 struct symbol *func;
2335 struct instruction *insn;
2336 struct insn_state state;
2337 int ret, warnings = 0;
2338
2339 clear_insn_state(&state);
2340
2341 state.cfa = initial_func_cfi.cfa;
2342 memcpy(&state.regs, &initial_func_cfi.regs,
2343 CFI_NUM_REGS * sizeof(struct cfi_reg));
2344 state.stack_size = initial_func_cfi.cfa.offset;
2345
2346 for_each_sec(file, sec) {
2347 list_for_each_entry(func, &sec->symbol_list, list) {
2348 if (func->type != STT_FUNC || func->pfunc != func)
2349 continue;
2350
2351 insn = find_insn(file, sec, func->offset);
2352 if (!insn || insn->ignore)
2353 continue;
2354
2355 state.uaccess = func->alias->uaccess_safe;
2356
2357 ret = validate_branch(file, insn, state);
2358 if (ret && backtrace)
2359 BT_FUNC("<=== (func)", insn);
2360 warnings += ret;
2361 }
2362 }
2363
2364 return warnings;
2365 }
2366
2367 static int validate_reachable_instructions(struct objtool_file *file)
2368 {
2369 struct instruction *insn;
2370
2371 if (file->ignore_unreachables)
2372 return 0;
2373
2374 for_each_insn(file, insn) {
2375 if (insn->visited || ignore_unreachable_insn(insn))
2376 continue;
2377
2378 WARN_FUNC("unreachable instruction", insn->sec, insn->offset);
2379 return 1;
2380 }
2381
2382 return 0;
2383 }
2384
2385 static void cleanup(struct objtool_file *file)
2386 {
2387 struct instruction *insn, *tmpinsn;
2388 struct alternative *alt, *tmpalt;
2389
2390 list_for_each_entry_safe(insn, tmpinsn, &file->insn_list, list) {
2391 list_for_each_entry_safe(alt, tmpalt, &insn->alts, list) {
2392 list_del(&alt->list);
2393 free(alt);
2394 }
2395 list_del(&insn->list);
2396 hash_del(&insn->hash);
2397 free(insn);
2398 }
2399 elf_close(file->elf);
2400 }
2401
2402 static struct objtool_file file;
2403
2404 int check(const char *_objname, bool orc)
2405 {
2406 int ret, warnings = 0;
2407
2408 objname = _objname;
2409
2410 file.elf = elf_open(objname, orc ? O_RDWR : O_RDONLY);
2411 if (!file.elf)
2412 return 1;
2413
2414 INIT_LIST_HEAD(&file.insn_list);
2415 hash_init(file.insn_hash);
2416 file.c_file = find_section_by_name(file.elf, ".comment");
2417 file.ignore_unreachables = no_unreachable;
2418 file.hints = false;
2419
2420 arch_initial_func_cfi_state(&initial_func_cfi);
2421
2422 ret = decode_sections(&file);
2423 if (ret < 0)
2424 goto out;
2425 warnings += ret;
2426
2427 if (list_empty(&file.insn_list))
2428 goto out;
2429
2430 if (retpoline) {
2431 ret = validate_retpoline(&file);
2432 if (ret < 0)
2433 return ret;
2434 warnings += ret;
2435 }
2436
2437 ret = validate_functions(&file);
2438 if (ret < 0)
2439 goto out;
2440 warnings += ret;
2441
2442 ret = validate_unwind_hints(&file);
2443 if (ret < 0)
2444 goto out;
2445 warnings += ret;
2446
2447 if (!warnings) {
2448 ret = validate_reachable_instructions(&file);
2449 if (ret < 0)
2450 goto out;
2451 warnings += ret;
2452 }
2453
2454 if (orc) {
2455 ret = create_orc(&file);
2456 if (ret < 0)
2457 goto out;
2458
2459 ret = create_orc_sections(&file);
2460 if (ret < 0)
2461 goto out;
2462
2463 ret = elf_write(file.elf);
2464 if (ret < 0)
2465 goto out;
2466 }
2467
2468 out:
2469 cleanup(&file);
2470
2471 /* ignore warnings for now until we get all the code cleaned up */
2472 if (ret || warnings)
2473 return 0;
2474 return 0;
2475 }