]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - arch/x86/kernel/ftrace_64.S
Merge branch 'pm-cpufreq'
[mirror_ubuntu-artful-kernel.git] / arch / x86 / kernel / ftrace_64.S
1 /*
2 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
3 */
4
5 #include <linux/linkage.h>
6 #include <asm/ptrace.h>
7 #include <asm/ftrace.h>
8 #include <asm/export.h>
9
10
11 .code64
12 .section .entry.text, "ax"
13
14 #ifdef CC_USING_FENTRY
15 # define function_hook __fentry__
16 EXPORT_SYMBOL(__fentry__)
17 #else
18 # define function_hook mcount
19 EXPORT_SYMBOL(mcount)
20 #endif
21
22 /* All cases save the original rbp (8 bytes) */
23 #ifdef CONFIG_FRAME_POINTER
24 # ifdef CC_USING_FENTRY
25 /* Save parent and function stack frames (rip and rbp) */
26 # define MCOUNT_FRAME_SIZE (8+16*2)
27 # else
28 /* Save just function stack frame (rip and rbp) */
29 # define MCOUNT_FRAME_SIZE (8+16)
30 # endif
31 #else
32 /* No need to save a stack frame */
33 # define MCOUNT_FRAME_SIZE 8
34 #endif /* CONFIG_FRAME_POINTER */
35
36 /* Size of stack used to save mcount regs in save_mcount_regs */
37 #define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
38
39 /*
40 * gcc -pg option adds a call to 'mcount' in most functions.
41 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
42 * and is done before the function's stack frame is set up.
43 * They both require a set of regs to be saved before calling
44 * any C code and restored before returning back to the function.
45 *
46 * On boot up, all these calls are converted into nops. When tracing
47 * is enabled, the call can jump to either ftrace_caller or
48 * ftrace_regs_caller. Callbacks (tracing functions) that require
49 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
50 * it. For this reason, the size of the pt_regs structure will be
51 * allocated on the stack and the required mcount registers will
52 * be saved in the locations that pt_regs has them in.
53 */
54
55 /*
56 * @added: the amount of stack added before calling this
57 *
58 * After this is called, the following registers contain:
59 *
60 * %rdi - holds the address that called the trampoline
61 * %rsi - holds the parent function (traced function's return address)
62 * %rdx - holds the original %rbp
63 */
64 .macro save_mcount_regs added=0
65
66 /* Always save the original rbp */
67 pushq %rbp
68
69 #ifdef CONFIG_FRAME_POINTER
70 /*
71 * Stack traces will stop at the ftrace trampoline if the frame pointer
72 * is not set up properly. If fentry is used, we need to save a frame
73 * pointer for the parent as well as the function traced, because the
74 * fentry is called before the stack frame is set up, where as mcount
75 * is called afterward.
76 */
77 #ifdef CC_USING_FENTRY
78 /* Save the parent pointer (skip orig rbp and our return address) */
79 pushq \added+8*2(%rsp)
80 pushq %rbp
81 movq %rsp, %rbp
82 /* Save the return address (now skip orig rbp, rbp and parent) */
83 pushq \added+8*3(%rsp)
84 #else
85 /* Can't assume that rip is before this (unless added was zero) */
86 pushq \added+8(%rsp)
87 #endif
88 pushq %rbp
89 movq %rsp, %rbp
90 #endif /* CONFIG_FRAME_POINTER */
91
92 /*
93 * We add enough stack to save all regs.
94 */
95 subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
96 movq %rax, RAX(%rsp)
97 movq %rcx, RCX(%rsp)
98 movq %rdx, RDX(%rsp)
99 movq %rsi, RSI(%rsp)
100 movq %rdi, RDI(%rsp)
101 movq %r8, R8(%rsp)
102 movq %r9, R9(%rsp)
103 /*
104 * Save the original RBP. Even though the mcount ABI does not
105 * require this, it helps out callers.
106 */
107 movq MCOUNT_REG_SIZE-8(%rsp), %rdx
108 movq %rdx, RBP(%rsp)
109
110 /* Copy the parent address into %rsi (second parameter) */
111 #ifdef CC_USING_FENTRY
112 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
113 #else
114 /* %rdx contains original %rbp */
115 movq 8(%rdx), %rsi
116 #endif
117
118 /* Move RIP to its proper location */
119 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
120 movq %rdi, RIP(%rsp)
121
122 /*
123 * Now %rdi (the first parameter) has the return address of
124 * where ftrace_call returns. But the callbacks expect the
125 * address of the call itself.
126 */
127 subq $MCOUNT_INSN_SIZE, %rdi
128 .endm
129
130 .macro restore_mcount_regs
131 movq R9(%rsp), %r9
132 movq R8(%rsp), %r8
133 movq RDI(%rsp), %rdi
134 movq RSI(%rsp), %rsi
135 movq RDX(%rsp), %rdx
136 movq RCX(%rsp), %rcx
137 movq RAX(%rsp), %rax
138
139 /* ftrace_regs_caller can modify %rbp */
140 movq RBP(%rsp), %rbp
141
142 addq $MCOUNT_REG_SIZE, %rsp
143
144 .endm
145
146 #ifdef CONFIG_DYNAMIC_FTRACE
147
148 ENTRY(function_hook)
149 retq
150 END(function_hook)
151
152 ENTRY(ftrace_caller)
153 /* save_mcount_regs fills in first two parameters */
154 save_mcount_regs
155
156 GLOBAL(ftrace_caller_op_ptr)
157 /* Load the ftrace_ops into the 3rd parameter */
158 movq function_trace_op(%rip), %rdx
159
160 /* regs go into 4th parameter (but make it NULL) */
161 movq $0, %rcx
162
163 GLOBAL(ftrace_call)
164 call ftrace_stub
165
166 restore_mcount_regs
167
168 /*
169 * The copied trampoline must call ftrace_epilogue as it
170 * still may need to call the function graph tracer.
171 *
172 * The code up to this label is copied into trampolines so
173 * think twice before adding any new code or changing the
174 * layout here.
175 */
176 GLOBAL(ftrace_epilogue)
177
178 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
179 GLOBAL(ftrace_graph_call)
180 jmp ftrace_stub
181 #endif
182
183 /* This is weak to keep gas from relaxing the jumps */
184 WEAK(ftrace_stub)
185 retq
186 END(ftrace_caller)
187
188 ENTRY(ftrace_regs_caller)
189 /* Save the current flags before any operations that can change them */
190 pushfq
191
192 /* added 8 bytes to save flags */
193 save_mcount_regs 8
194 /* save_mcount_regs fills in first two parameters */
195
196 GLOBAL(ftrace_regs_caller_op_ptr)
197 /* Load the ftrace_ops into the 3rd parameter */
198 movq function_trace_op(%rip), %rdx
199
200 /* Save the rest of pt_regs */
201 movq %r15, R15(%rsp)
202 movq %r14, R14(%rsp)
203 movq %r13, R13(%rsp)
204 movq %r12, R12(%rsp)
205 movq %r11, R11(%rsp)
206 movq %r10, R10(%rsp)
207 movq %rbx, RBX(%rsp)
208 /* Copy saved flags */
209 movq MCOUNT_REG_SIZE(%rsp), %rcx
210 movq %rcx, EFLAGS(%rsp)
211 /* Kernel segments */
212 movq $__KERNEL_DS, %rcx
213 movq %rcx, SS(%rsp)
214 movq $__KERNEL_CS, %rcx
215 movq %rcx, CS(%rsp)
216 /* Stack - skipping return address and flags */
217 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
218 movq %rcx, RSP(%rsp)
219
220 /* regs go into 4th parameter */
221 leaq (%rsp), %rcx
222
223 GLOBAL(ftrace_regs_call)
224 call ftrace_stub
225
226 /* Copy flags back to SS, to restore them */
227 movq EFLAGS(%rsp), %rax
228 movq %rax, MCOUNT_REG_SIZE(%rsp)
229
230 /* Handlers can change the RIP */
231 movq RIP(%rsp), %rax
232 movq %rax, MCOUNT_REG_SIZE+8(%rsp)
233
234 /* restore the rest of pt_regs */
235 movq R15(%rsp), %r15
236 movq R14(%rsp), %r14
237 movq R13(%rsp), %r13
238 movq R12(%rsp), %r12
239 movq R10(%rsp), %r10
240 movq RBX(%rsp), %rbx
241
242 restore_mcount_regs
243
244 /* Restore flags */
245 popfq
246
247 /*
248 * As this jmp to ftrace_epilogue can be a short jump
249 * it must not be copied into the trampoline.
250 * The trampoline will add the code to jump
251 * to the return.
252 */
253 GLOBAL(ftrace_regs_caller_end)
254
255 jmp ftrace_epilogue
256
257 END(ftrace_regs_caller)
258
259
260 #else /* ! CONFIG_DYNAMIC_FTRACE */
261
262 ENTRY(function_hook)
263 cmpq $ftrace_stub, ftrace_trace_function
264 jnz trace
265
266 fgraph_trace:
267 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
268 cmpq $ftrace_stub, ftrace_graph_return
269 jnz ftrace_graph_caller
270
271 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
272 jnz ftrace_graph_caller
273 #endif
274
275 GLOBAL(ftrace_stub)
276 retq
277
278 trace:
279 /* save_mcount_regs fills in first two parameters */
280 save_mcount_regs
281
282 /*
283 * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
284 * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the
285 * ip and parent ip are used and the list function is called when
286 * function tracing is enabled.
287 */
288 call *ftrace_trace_function
289
290 restore_mcount_regs
291
292 jmp fgraph_trace
293 END(function_hook)
294 #endif /* CONFIG_DYNAMIC_FTRACE */
295
296 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
297 ENTRY(ftrace_graph_caller)
298 /* Saves rbp into %rdx and fills first parameter */
299 save_mcount_regs
300
301 #ifdef CC_USING_FENTRY
302 leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
303 movq $0, %rdx /* No framepointers needed */
304 #else
305 /* Save address of the return address of traced function */
306 leaq 8(%rdx), %rsi
307 /* ftrace does sanity checks against frame pointers */
308 movq (%rdx), %rdx
309 #endif
310 call prepare_ftrace_return
311
312 restore_mcount_regs
313
314 retq
315 END(ftrace_graph_caller)
316
317 GLOBAL(return_to_handler)
318 subq $24, %rsp
319
320 /* Save the return values */
321 movq %rax, (%rsp)
322 movq %rdx, 8(%rsp)
323 movq %rbp, %rdi
324
325 call ftrace_return_to_handler
326
327 movq %rax, %rdi
328 movq 8(%rsp), %rdx
329 movq (%rsp), %rax
330 addq $24, %rsp
331 jmp *%rdi
332 #endif