]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - tools/objtool/arch/x86/decode.c
x86,objtool: Create .return_sites
[mirror_ubuntu-jammy-kernel.git] / tools / objtool / arch / x86 / decode.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6 #include <stdio.h>
7 #include <stdlib.h>
8
9 #define unlikely(cond) (cond)
10 #include <asm/insn.h>
11 #include "../../../arch/x86/lib/inat.c"
12 #include "../../../arch/x86/lib/insn.c"
13
14 #define CONFIG_64BIT 1
15 #include <asm/nops.h>
16
17 #include <asm/orc_types.h>
18 #include <objtool/check.h>
19 #include <objtool/elf.h>
20 #include <objtool/arch.h>
21 #include <objtool/warn.h>
22 #include <objtool/endianness.h>
23 #include <arch/elf.h>
24
25 static int is_x86_64(const struct elf *elf)
26 {
27 switch (elf->ehdr.e_machine) {
28 case EM_X86_64:
29 return 1;
30 case EM_386:
31 return 0;
32 default:
33 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
34 return -1;
35 }
36 }
37
38 bool arch_callee_saved_reg(unsigned char reg)
39 {
40 switch (reg) {
41 case CFI_BP:
42 case CFI_BX:
43 case CFI_R12:
44 case CFI_R13:
45 case CFI_R14:
46 case CFI_R15:
47 return true;
48
49 case CFI_AX:
50 case CFI_CX:
51 case CFI_DX:
52 case CFI_SI:
53 case CFI_DI:
54 case CFI_SP:
55 case CFI_R8:
56 case CFI_R9:
57 case CFI_R10:
58 case CFI_R11:
59 case CFI_RA:
60 default:
61 return false;
62 }
63 }
64
65 unsigned long arch_dest_reloc_offset(int addend)
66 {
67 return addend + 4;
68 }
69
70 unsigned long arch_jump_destination(struct instruction *insn)
71 {
72 return insn->offset + insn->len + insn->immediate;
73 }
74
75 #define ADD_OP(op) \
76 if (!(op = calloc(1, sizeof(*op)))) \
77 return -1; \
78 else for (list_add_tail(&op->list, ops_list); op; op = NULL)
79
80 /*
81 * Helpers to decode ModRM/SIB:
82 *
83 * r/m| AX CX DX BX | SP | BP | SI DI |
84 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
85 * Mod+----------------+-----+-----+---------+
86 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
87 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
88 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
89 * 11 | r/ m |
90 */
91
92 #define mod_is_mem() (modrm_mod != 3)
93 #define mod_is_reg() (modrm_mod == 3)
94
95 #define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
96 #define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
97
98 #define rm_is(reg) (have_SIB() ? \
99 sib_base == (reg) && sib_index == CFI_SP : \
100 modrm_rm == (reg))
101
102 #define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
103 #define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
104
105 int arch_decode_instruction(const struct elf *elf, const struct section *sec,
106 unsigned long offset, unsigned int maxlen,
107 unsigned int *len, enum insn_type *type,
108 unsigned long *immediate,
109 struct list_head *ops_list)
110 {
111 struct insn insn;
112 int x86_64, ret;
113 unsigned char op1, op2,
114 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
115 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
116 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
117 struct stack_op *op = NULL;
118 struct symbol *sym;
119 u64 imm;
120
121 x86_64 = is_x86_64(elf);
122 if (x86_64 == -1)
123 return -1;
124
125 ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
126 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
127 if (ret < 0) {
128 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
129 return -1;
130 }
131
132 *len = insn.length;
133 *type = INSN_OTHER;
134
135 if (insn.vex_prefix.nbytes)
136 return 0;
137
138 op1 = insn.opcode.bytes[0];
139 op2 = insn.opcode.bytes[1];
140
141 if (insn.rex_prefix.nbytes) {
142 rex = insn.rex_prefix.bytes[0];
143 rex_w = X86_REX_W(rex) >> 3;
144 rex_r = X86_REX_R(rex) >> 2;
145 rex_x = X86_REX_X(rex) >> 1;
146 rex_b = X86_REX_B(rex);
147 }
148
149 if (insn.modrm.nbytes) {
150 modrm = insn.modrm.bytes[0];
151 modrm_mod = X86_MODRM_MOD(modrm);
152 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
153 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
154 }
155
156 if (insn.sib.nbytes) {
157 sib = insn.sib.bytes[0];
158 /* sib_scale = X86_SIB_SCALE(sib); */
159 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
160 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
161 }
162
163 switch (op1) {
164
165 case 0x1:
166 case 0x29:
167 if (rex_w && rm_is_reg(CFI_SP)) {
168
169 /* add/sub reg, %rsp */
170 ADD_OP(op) {
171 op->src.type = OP_SRC_ADD;
172 op->src.reg = modrm_reg;
173 op->dest.type = OP_DEST_REG;
174 op->dest.reg = CFI_SP;
175 }
176 }
177 break;
178
179 case 0x50 ... 0x57:
180
181 /* push reg */
182 ADD_OP(op) {
183 op->src.type = OP_SRC_REG;
184 op->src.reg = (op1 & 0x7) + 8*rex_b;
185 op->dest.type = OP_DEST_PUSH;
186 }
187
188 break;
189
190 case 0x58 ... 0x5f:
191
192 /* pop reg */
193 ADD_OP(op) {
194 op->src.type = OP_SRC_POP;
195 op->dest.type = OP_DEST_REG;
196 op->dest.reg = (op1 & 0x7) + 8*rex_b;
197 }
198
199 break;
200
201 case 0x68:
202 case 0x6a:
203 /* push immediate */
204 ADD_OP(op) {
205 op->src.type = OP_SRC_CONST;
206 op->dest.type = OP_DEST_PUSH;
207 }
208 break;
209
210 case 0x70 ... 0x7f:
211 *type = INSN_JUMP_CONDITIONAL;
212 break;
213
214 case 0x80 ... 0x83:
215 /*
216 * 1000 00sw : mod OP r/m : immediate
217 *
218 * s - sign extend immediate
219 * w - imm8 / imm32
220 *
221 * OP: 000 ADD 100 AND
222 * 001 OR 101 SUB
223 * 010 ADC 110 XOR
224 * 011 SBB 111 CMP
225 */
226
227 /* 64bit only */
228 if (!rex_w)
229 break;
230
231 /* %rsp target only */
232 if (!rm_is_reg(CFI_SP))
233 break;
234
235 imm = insn.immediate.value;
236 if (op1 & 2) { /* sign extend */
237 if (op1 & 1) { /* imm32 */
238 imm <<= 32;
239 imm = (s64)imm >> 32;
240 } else { /* imm8 */
241 imm <<= 56;
242 imm = (s64)imm >> 56;
243 }
244 }
245
246 switch (modrm_reg & 7) {
247 case 5:
248 imm = -imm;
249 /* fallthrough */
250 case 0:
251 /* add/sub imm, %rsp */
252 ADD_OP(op) {
253 op->src.type = OP_SRC_ADD;
254 op->src.reg = CFI_SP;
255 op->src.offset = imm;
256 op->dest.type = OP_DEST_REG;
257 op->dest.reg = CFI_SP;
258 }
259 break;
260
261 case 4:
262 /* and imm, %rsp */
263 ADD_OP(op) {
264 op->src.type = OP_SRC_AND;
265 op->src.reg = CFI_SP;
266 op->src.offset = insn.immediate.value;
267 op->dest.type = OP_DEST_REG;
268 op->dest.reg = CFI_SP;
269 }
270 break;
271
272 default:
273 /* WARN ? */
274 break;
275 }
276
277 break;
278
279 case 0x89:
280 if (!rex_w)
281 break;
282
283 if (modrm_reg == CFI_SP) {
284
285 if (mod_is_reg()) {
286 /* mov %rsp, reg */
287 ADD_OP(op) {
288 op->src.type = OP_SRC_REG;
289 op->src.reg = CFI_SP;
290 op->dest.type = OP_DEST_REG;
291 op->dest.reg = modrm_rm;
292 }
293 break;
294
295 } else {
296 /* skip RIP relative displacement */
297 if (is_RIP())
298 break;
299
300 /* skip nontrivial SIB */
301 if (have_SIB()) {
302 modrm_rm = sib_base;
303 if (sib_index != CFI_SP)
304 break;
305 }
306
307 /* mov %rsp, disp(%reg) */
308 ADD_OP(op) {
309 op->src.type = OP_SRC_REG;
310 op->src.reg = CFI_SP;
311 op->dest.type = OP_DEST_REG_INDIRECT;
312 op->dest.reg = modrm_rm;
313 op->dest.offset = insn.displacement.value;
314 }
315 break;
316 }
317
318 break;
319 }
320
321 if (rm_is_reg(CFI_SP)) {
322
323 /* mov reg, %rsp */
324 ADD_OP(op) {
325 op->src.type = OP_SRC_REG;
326 op->src.reg = modrm_reg;
327 op->dest.type = OP_DEST_REG;
328 op->dest.reg = CFI_SP;
329 }
330 break;
331 }
332
333 /* fallthrough */
334 case 0x88:
335 if (!rex_w)
336 break;
337
338 if (rm_is_mem(CFI_BP)) {
339
340 /* mov reg, disp(%rbp) */
341 ADD_OP(op) {
342 op->src.type = OP_SRC_REG;
343 op->src.reg = modrm_reg;
344 op->dest.type = OP_DEST_REG_INDIRECT;
345 op->dest.reg = CFI_BP;
346 op->dest.offset = insn.displacement.value;
347 }
348 break;
349 }
350
351 if (rm_is_mem(CFI_SP)) {
352
353 /* mov reg, disp(%rsp) */
354 ADD_OP(op) {
355 op->src.type = OP_SRC_REG;
356 op->src.reg = modrm_reg;
357 op->dest.type = OP_DEST_REG_INDIRECT;
358 op->dest.reg = CFI_SP;
359 op->dest.offset = insn.displacement.value;
360 }
361 break;
362 }
363
364 break;
365
366 case 0x8b:
367 if (!rex_w)
368 break;
369
370 if (rm_is_mem(CFI_BP)) {
371
372 /* mov disp(%rbp), reg */
373 ADD_OP(op) {
374 op->src.type = OP_SRC_REG_INDIRECT;
375 op->src.reg = CFI_BP;
376 op->src.offset = insn.displacement.value;
377 op->dest.type = OP_DEST_REG;
378 op->dest.reg = modrm_reg;
379 }
380 break;
381 }
382
383 if (rm_is_mem(CFI_SP)) {
384
385 /* mov disp(%rsp), reg */
386 ADD_OP(op) {
387 op->src.type = OP_SRC_REG_INDIRECT;
388 op->src.reg = CFI_SP;
389 op->src.offset = insn.displacement.value;
390 op->dest.type = OP_DEST_REG;
391 op->dest.reg = modrm_reg;
392 }
393 break;
394 }
395
396 break;
397
398 case 0x8d:
399 if (mod_is_reg()) {
400 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
401 break;
402 }
403
404 /* skip non 64bit ops */
405 if (!rex_w)
406 break;
407
408 /* skip RIP relative displacement */
409 if (is_RIP())
410 break;
411
412 /* skip nontrivial SIB */
413 if (have_SIB()) {
414 modrm_rm = sib_base;
415 if (sib_index != CFI_SP)
416 break;
417 }
418
419 /* lea disp(%src), %dst */
420 ADD_OP(op) {
421 op->src.offset = insn.displacement.value;
422 if (!op->src.offset) {
423 /* lea (%src), %dst */
424 op->src.type = OP_SRC_REG;
425 } else {
426 /* lea disp(%src), %dst */
427 op->src.type = OP_SRC_ADD;
428 }
429 op->src.reg = modrm_rm;
430 op->dest.type = OP_DEST_REG;
431 op->dest.reg = modrm_reg;
432 }
433 break;
434
435 case 0x8f:
436 /* pop to mem */
437 ADD_OP(op) {
438 op->src.type = OP_SRC_POP;
439 op->dest.type = OP_DEST_MEM;
440 }
441 break;
442
443 case 0x90:
444 *type = INSN_NOP;
445 break;
446
447 case 0x9c:
448 /* pushf */
449 ADD_OP(op) {
450 op->src.type = OP_SRC_CONST;
451 op->dest.type = OP_DEST_PUSHF;
452 }
453 break;
454
455 case 0x9d:
456 /* popf */
457 ADD_OP(op) {
458 op->src.type = OP_SRC_POPF;
459 op->dest.type = OP_DEST_MEM;
460 }
461 break;
462
463 case 0x0f:
464
465 if (op2 == 0x01) {
466
467 if (modrm == 0xca)
468 *type = INSN_CLAC;
469 else if (modrm == 0xcb)
470 *type = INSN_STAC;
471
472 } else if (op2 >= 0x80 && op2 <= 0x8f) {
473
474 *type = INSN_JUMP_CONDITIONAL;
475
476 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
477 op2 == 0x35) {
478
479 /* sysenter, sysret */
480 *type = INSN_CONTEXT_SWITCH;
481
482 } else if (op2 == 0x0b || op2 == 0xb9) {
483
484 /* ud2 */
485 *type = INSN_BUG;
486
487 } else if (op2 == 0x0d || op2 == 0x1f) {
488
489 /* nopl/nopw */
490 *type = INSN_NOP;
491
492 } else if (op2 == 0xa0 || op2 == 0xa8) {
493
494 /* push fs/gs */
495 ADD_OP(op) {
496 op->src.type = OP_SRC_CONST;
497 op->dest.type = OP_DEST_PUSH;
498 }
499
500 } else if (op2 == 0xa1 || op2 == 0xa9) {
501
502 /* pop fs/gs */
503 ADD_OP(op) {
504 op->src.type = OP_SRC_POP;
505 op->dest.type = OP_DEST_MEM;
506 }
507 }
508
509 break;
510
511 case 0xc9:
512 /*
513 * leave
514 *
515 * equivalent to:
516 * mov bp, sp
517 * pop bp
518 */
519 ADD_OP(op) {
520 op->src.type = OP_SRC_REG;
521 op->src.reg = CFI_BP;
522 op->dest.type = OP_DEST_REG;
523 op->dest.reg = CFI_SP;
524 }
525 ADD_OP(op) {
526 op->src.type = OP_SRC_POP;
527 op->dest.type = OP_DEST_REG;
528 op->dest.reg = CFI_BP;
529 }
530 break;
531
532 case 0xcc:
533 /* int3 */
534 *type = INSN_TRAP;
535 break;
536
537 case 0xe3:
538 /* jecxz/jrcxz */
539 *type = INSN_JUMP_CONDITIONAL;
540 break;
541
542 case 0xe9:
543 case 0xeb:
544 *type = INSN_JUMP_UNCONDITIONAL;
545 break;
546
547 case 0xc2:
548 case 0xc3:
549 *type = INSN_RETURN;
550 break;
551
552 case 0xcf: /* iret */
553 /*
554 * Handle sync_core(), which has an IRET to self.
555 * All other IRET are in STT_NONE entry code.
556 */
557 sym = find_symbol_containing(sec, offset);
558 if (sym && sym->type == STT_FUNC) {
559 ADD_OP(op) {
560 /* add $40, %rsp */
561 op->src.type = OP_SRC_ADD;
562 op->src.reg = CFI_SP;
563 op->src.offset = 5*8;
564 op->dest.type = OP_DEST_REG;
565 op->dest.reg = CFI_SP;
566 }
567 break;
568 }
569
570 /* fallthrough */
571
572 case 0xca: /* retf */
573 case 0xcb: /* retf */
574 *type = INSN_CONTEXT_SWITCH;
575 break;
576
577 case 0xe8:
578 *type = INSN_CALL;
579 /*
580 * For the impact on the stack, a CALL behaves like
581 * a PUSH of an immediate value (the return address).
582 */
583 ADD_OP(op) {
584 op->src.type = OP_SRC_CONST;
585 op->dest.type = OP_DEST_PUSH;
586 }
587 break;
588
589 case 0xfc:
590 *type = INSN_CLD;
591 break;
592
593 case 0xfd:
594 *type = INSN_STD;
595 break;
596
597 case 0xff:
598 if (modrm_reg == 2 || modrm_reg == 3)
599
600 *type = INSN_CALL_DYNAMIC;
601
602 else if (modrm_reg == 4)
603
604 *type = INSN_JUMP_DYNAMIC;
605
606 else if (modrm_reg == 5)
607
608 /* jmpf */
609 *type = INSN_CONTEXT_SWITCH;
610
611 else if (modrm_reg == 6) {
612
613 /* push from mem */
614 ADD_OP(op) {
615 op->src.type = OP_SRC_CONST;
616 op->dest.type = OP_DEST_PUSH;
617 }
618 }
619
620 break;
621
622 default:
623 break;
624 }
625
626 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
627
628 return 0;
629 }
630
631 void arch_initial_func_cfi_state(struct cfi_init_state *state)
632 {
633 int i;
634
635 for (i = 0; i < CFI_NUM_REGS; i++) {
636 state->regs[i].base = CFI_UNDEFINED;
637 state->regs[i].offset = 0;
638 }
639
640 /* initial CFA (call frame address) */
641 state->cfa.base = CFI_SP;
642 state->cfa.offset = 8;
643
644 /* initial RA (return address) */
645 state->regs[CFI_RA].base = CFI_CFA;
646 state->regs[CFI_RA].offset = -8;
647 }
648
649 const char *arch_nop_insn(int len)
650 {
651 static const char nops[5][5] = {
652 { BYTES_NOP1 },
653 { BYTES_NOP2 },
654 { BYTES_NOP3 },
655 { BYTES_NOP4 },
656 { BYTES_NOP5 },
657 };
658
659 if (len < 1 || len > 5) {
660 WARN("invalid NOP size: %d\n", len);
661 return NULL;
662 }
663
664 return nops[len-1];
665 }
666
667 #define BYTE_RET 0xC3
668
669 const char *arch_ret_insn(int len)
670 {
671 static const char ret[5][5] = {
672 { BYTE_RET },
673 { BYTE_RET, 0xcc },
674 { BYTE_RET, 0xcc, BYTES_NOP1 },
675 { BYTE_RET, 0xcc, BYTES_NOP2 },
676 { BYTE_RET, 0xcc, BYTES_NOP3 },
677 };
678
679 if (len < 1 || len > 5) {
680 WARN("invalid RET size: %d\n", len);
681 return NULL;
682 }
683
684 return ret[len-1];
685 }
686
687 int arch_decode_hint_reg(u8 sp_reg, int *base)
688 {
689 switch (sp_reg) {
690 case ORC_REG_UNDEFINED:
691 *base = CFI_UNDEFINED;
692 break;
693 case ORC_REG_SP:
694 *base = CFI_SP;
695 break;
696 case ORC_REG_BP:
697 *base = CFI_BP;
698 break;
699 case ORC_REG_SP_INDIRECT:
700 *base = CFI_SP_INDIRECT;
701 break;
702 case ORC_REG_R10:
703 *base = CFI_R10;
704 break;
705 case ORC_REG_R13:
706 *base = CFI_R13;
707 break;
708 case ORC_REG_DI:
709 *base = CFI_DI;
710 break;
711 case ORC_REG_DX:
712 *base = CFI_DX;
713 break;
714 default:
715 return -1;
716 }
717
718 return 0;
719 }
720
721 bool arch_is_retpoline(struct symbol *sym)
722 {
723 return !strncmp(sym->name, "__x86_indirect_", 15);
724 }
725
726 bool arch_is_rethunk(struct symbol *sym)
727 {
728 return !strcmp(sym->name, "__x86_return_thunk");
729 }