Merge tag 'v4.7-rc2' into v4l_for_linus
[deliverable/linux.git] / arch / x86 / kernel / mcount_64.S
CommitLineData
e18eead3
SR
1/*
2 * linux/arch/x86_64/mcount_64.S
3 *
4 * Copyright (C) 2014 Steven Rostedt, Red Hat Inc
5 */
6
7#include <linux/linkage.h>
8#include <asm/ptrace.h>
9#include <asm/ftrace.h>
10
11
12 .code64
13 .section .entry.text, "ax"
14
15
16#ifdef CONFIG_FUNCTION_TRACER
17
18#ifdef CC_USING_FENTRY
19# define function_hook __fentry__
20#else
21# define function_hook mcount
22#endif
23
0687c36e
SRRH
24/* All cases save the original rbp (8 bytes) */
25#ifdef CONFIG_FRAME_POINTER
26# ifdef CC_USING_FENTRY
27/* Save parent and function stack frames (rip and rbp) */
28# define MCOUNT_FRAME_SIZE (8+16*2)
29# else
30/* Save just function stack frame (rip and rbp) */
31# define MCOUNT_FRAME_SIZE (8+16)
32# endif
33#else
34/* No need to save a stack frame */
35# define MCOUNT_FRAME_SIZE 8
36#endif /* CONFIG_FRAME_POINTER */
37
85f6f029 38/* Size of stack used to save mcount regs in save_mcount_regs */
0687c36e 39#define MCOUNT_REG_SIZE (SS+8 + MCOUNT_FRAME_SIZE)
85f6f029 40
05df710e
SRRH
41/*
42 * gcc -pg option adds a call to 'mcount' in most functions.
43 * When -mfentry is used, the call is to 'fentry' and not 'mcount'
44 * and is done before the function's stack frame is set up.
45 * They both require a set of regs to be saved before calling
46 * any C code and restored before returning back to the function.
47 *
48 * On boot up, all these calls are converted into nops. When tracing
49 * is enabled, the call can jump to either ftrace_caller or
50 * ftrace_regs_caller. Callbacks (tracing functions) that require
51 * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
52 * it. For this reason, the size of the pt_regs structure will be
53 * allocated on the stack and the required mcount registers will
54 * be saved in the locations that pt_regs has them in.
55 */
56
f1ab00af
SRRH
57/*
58 * @added: the amount of stack added before calling this
59 *
60 * After this is called, the following registers contain:
61 *
62 * %rdi - holds the address that called the trampoline
63 * %rsi - holds the parent function (traced function's return address)
64 * %rdx - holds the original %rbp
65 */
527aa75b 66.macro save_mcount_regs added=0
0687c36e
SRRH
67
68 /* Always save the original rbp */
69 pushq %rbp
70
71#ifdef CONFIG_FRAME_POINTER
72 /*
73 * Stack traces will stop at the ftrace trampoline if the frame pointer
74 * is not set up properly. If fentry is used, we need to save a frame
75 * pointer for the parent as well as the function traced, because the
76 * fentry is called before the stack frame is set up, where as mcount
77 * is called afterward.
78 */
79#ifdef CC_USING_FENTRY
80 /* Save the parent pointer (skip orig rbp and our return address) */
81 pushq \added+8*2(%rsp)
82 pushq %rbp
83 movq %rsp, %rbp
84 /* Save the return address (now skip orig rbp, rbp and parent) */
85 pushq \added+8*3(%rsp)
86#else
87 /* Can't assume that rip is before this (unless added was zero) */
88 pushq \added+8(%rsp)
89#endif
90 pushq %rbp
91 movq %rsp, %rbp
92#endif /* CONFIG_FRAME_POINTER */
93
94 /*
95 * We add enough stack to save all regs.
96 */
97 subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
4bcdf152
SRRH
98 movq %rax, RAX(%rsp)
99 movq %rcx, RCX(%rsp)
100 movq %rdx, RDX(%rsp)
101 movq %rsi, RSI(%rsp)
102 movq %rdi, RDI(%rsp)
103 movq %r8, R8(%rsp)
104 movq %r9, R9(%rsp)
0687c36e
SRRH
105 /*
106 * Save the original RBP. Even though the mcount ABI does not
107 * require this, it helps out callers.
108 */
109 movq MCOUNT_REG_SIZE-8(%rsp), %rdx
110 movq %rdx, RBP(%rsp)
111
f1ab00af
SRRH
112 /* Copy the parent address into %rsi (second parameter) */
113#ifdef CC_USING_FENTRY
114 movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
115#else
116 /* %rdx contains original %rbp */
117 movq 8(%rdx), %rsi
118#endif
119
4bcdf152 120 /* Move RIP to its proper location */
85f6f029 121 movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
094dfc54 122 movq %rdi, RIP(%rsp)
f1ab00af
SRRH
123
124 /*
125 * Now %rdi (the first parameter) has the return address of
126 * where ftrace_call returns. But the callbacks expect the
6a06bdbf 127 * address of the call itself.
f1ab00af
SRRH
128 */
129 subq $MCOUNT_INSN_SIZE, %rdi
4bcdf152
SRRH
130 .endm
131
527aa75b 132.macro restore_mcount_regs
4bcdf152
SRRH
133 movq R9(%rsp), %r9
134 movq R8(%rsp), %r8
135 movq RDI(%rsp), %rdi
136 movq RSI(%rsp), %rsi
137 movq RDX(%rsp), %rdx
138 movq RCX(%rsp), %rcx
139 movq RAX(%rsp), %rax
0687c36e
SRRH
140
141 /* ftrace_regs_caller can modify %rbp */
142 movq RBP(%rsp), %rbp
143
85f6f029 144 addq $MCOUNT_REG_SIZE, %rsp
0687c36e 145
4bcdf152
SRRH
146 .endm
147
76c2f13c
SRRH
148#ifdef CONFIG_DYNAMIC_FTRACE
149
150ENTRY(function_hook)
151 retq
152END(function_hook)
153
e18eead3 154ENTRY(ftrace_caller)
f1ab00af
SRRH
155 /* save_mcount_regs fills in first two parameters */
156 save_mcount_regs
157
158GLOBAL(ftrace_caller_op_ptr)
159 /* Load the ftrace_ops into the 3rd parameter */
160 movq function_trace_op(%rip), %rdx
161
e18eead3
SR
162 /* regs go into 4th parameter (but make it NULL) */
163 movq $0, %rcx
164
165GLOBAL(ftrace_call)
166 call ftrace_stub
167
05df710e 168 restore_mcount_regs
f3bea491
SRRH
169
170 /*
f1b92bb6 171 * The copied trampoline must call ftrace_epilogue as it
f3bea491 172 * still may need to call the function graph tracer.
f1b92bb6
BP
173 *
174 * The code up to this label is copied into trampolines so
175 * think twice before adding any new code or changing the
176 * layout here.
f3bea491 177 */
f1b92bb6 178GLOBAL(ftrace_epilogue)
e18eead3
SR
179
180#ifdef CONFIG_FUNCTION_GRAPH_TRACER
181GLOBAL(ftrace_graph_call)
182 jmp ftrace_stub
183#endif
184
8329e818
SR
185/* This is weak to keep gas from relaxing the jumps */
186WEAK(ftrace_stub)
e18eead3
SR
187 retq
188END(ftrace_caller)
189
190ENTRY(ftrace_regs_caller)
527aa75b 191 /* Save the current flags before any operations that can change them */
e18eead3
SR
192 pushfq
193
527aa75b 194 /* added 8 bytes to save flags */
f1ab00af
SRRH
195 save_mcount_regs 8
196 /* save_mcount_regs fills in first two parameters */
197
198GLOBAL(ftrace_regs_caller_op_ptr)
199 /* Load the ftrace_ops into the 3rd parameter */
200 movq function_trace_op(%rip), %rdx
e18eead3
SR
201
202 /* Save the rest of pt_regs */
203 movq %r15, R15(%rsp)
204 movq %r14, R14(%rsp)
205 movq %r13, R13(%rsp)
206 movq %r12, R12(%rsp)
207 movq %r11, R11(%rsp)
208 movq %r10, R10(%rsp)
e18eead3
SR
209 movq %rbx, RBX(%rsp)
210 /* Copy saved flags */
85f6f029 211 movq MCOUNT_REG_SIZE(%rsp), %rcx
e18eead3
SR
212 movq %rcx, EFLAGS(%rsp)
213 /* Kernel segments */
214 movq $__KERNEL_DS, %rcx
215 movq %rcx, SS(%rsp)
216 movq $__KERNEL_CS, %rcx
217 movq %rcx, CS(%rsp)
527aa75b 218 /* Stack - skipping return address and flags */
85f6f029 219 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
e18eead3
SR
220 movq %rcx, RSP(%rsp)
221
222 /* regs go into 4th parameter */
223 leaq (%rsp), %rcx
224
225GLOBAL(ftrace_regs_call)
226 call ftrace_stub
227
228 /* Copy flags back to SS, to restore them */
229 movq EFLAGS(%rsp), %rax
85f6f029 230 movq %rax, MCOUNT_REG_SIZE(%rsp)
e18eead3
SR
231
232 /* Handlers can change the RIP */
233 movq RIP(%rsp), %rax
85f6f029 234 movq %rax, MCOUNT_REG_SIZE+8(%rsp)
e18eead3
SR
235
236 /* restore the rest of pt_regs */
237 movq R15(%rsp), %r15
238 movq R14(%rsp), %r14
239 movq R13(%rsp), %r13
240 movq R12(%rsp), %r12
241 movq R10(%rsp), %r10
e18eead3
SR
242 movq RBX(%rsp), %rbx
243
527aa75b 244 restore_mcount_regs
e18eead3
SR
245
246 /* Restore flags */
247 popfq
248
f3bea491 249 /*
f1b92bb6 250 * As this jmp to ftrace_epilogue can be a short jump
f3bea491
SRRH
251 * it must not be copied into the trampoline.
252 * The trampoline will add the code to jump
253 * to the return.
254 */
255GLOBAL(ftrace_regs_caller_end)
256
f1b92bb6 257 jmp ftrace_epilogue
fdc841b5 258
e18eead3
SR
259END(ftrace_regs_caller)
260
261
262#else /* ! CONFIG_DYNAMIC_FTRACE */
263
264ENTRY(function_hook)
e18eead3
SR
265 cmpq $ftrace_stub, ftrace_trace_function
266 jnz trace
267
62a207d7 268fgraph_trace:
e18eead3
SR
269#ifdef CONFIG_FUNCTION_GRAPH_TRACER
270 cmpq $ftrace_stub, ftrace_graph_return
271 jnz ftrace_graph_caller
272
273 cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
274 jnz ftrace_graph_caller
275#endif
276
277GLOBAL(ftrace_stub)
278 retq
279
280trace:
f1ab00af
SRRH
281 /* save_mcount_regs fills in first two parameters */
282 save_mcount_regs
e18eead3 283
112677d6
NK
284 /*
285 * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
286 * set (see include/asm/ftrace.h and include/linux/ftrace.h). Only the
287 * ip and parent ip are used and the list function is called when
288 * function tracing is enabled.
289 */
e18eead3
SR
290 call *ftrace_trace_function
291
05df710e 292 restore_mcount_regs
e18eead3 293
62a207d7 294 jmp fgraph_trace
e18eead3
SR
295END(function_hook)
296#endif /* CONFIG_DYNAMIC_FTRACE */
297#endif /* CONFIG_FUNCTION_TRACER */
298
299#ifdef CONFIG_FUNCTION_GRAPH_TRACER
300ENTRY(ftrace_graph_caller)
6a06bdbf 301 /* Saves rbp into %rdx and fills first parameter */
05df710e 302 save_mcount_regs
e18eead3
SR
303
304#ifdef CC_USING_FENTRY
6a06bdbf 305 leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
e18eead3
SR
306 movq $0, %rdx /* No framepointers needed */
307#else
f1ab00af 308 /* Save address of the return address of traced function */
6a06bdbf 309 leaq 8(%rdx), %rsi
f1ab00af 310 /* ftrace does sanity checks against frame pointers */
0687c36e 311 movq (%rdx), %rdx
e18eead3 312#endif
e18eead3
SR
313 call prepare_ftrace_return
314
05df710e 315 restore_mcount_regs
e18eead3
SR
316
317 retq
318END(ftrace_graph_caller)
319
320GLOBAL(return_to_handler)
321 subq $24, %rsp
322
323 /* Save the return values */
324 movq %rax, (%rsp)
325 movq %rdx, 8(%rsp)
326 movq %rbp, %rdi
327
328 call ftrace_return_to_handler
329
330 movq %rax, %rdi
331 movq 8(%rsp), %rdx
332 movq (%rsp), %rax
333 addq $24, %rsp
334 jmp *%rdi
335#endif
This page took 0.138562 seconds and 5 git commands to generate.