Commit f0461d01 authored by Ingo Molnar's avatar Ingo Molnar

Merge branches 'tracing/ftrace' and 'tracing/function-graph-tracer' into tracing/core

parents 66eafebc 48d68b20
...@@ -29,7 +29,7 @@ config X86 ...@@ -29,7 +29,7 @@ config X86
select HAVE_FTRACE_MCOUNT_RECORD select HAVE_FTRACE_MCOUNT_RECORD
select HAVE_DYNAMIC_FTRACE select HAVE_DYNAMIC_FTRACE
select HAVE_FUNCTION_TRACER select HAVE_FUNCTION_TRACER
select HAVE_FUNCTION_GRAPH_TRACER if X86_32 select HAVE_FUNCTION_GRAPH_TRACER
select HAVE_FUNCTION_TRACE_MCOUNT_TEST select HAVE_FUNCTION_TRACE_MCOUNT_TEST
select HAVE_KVM if ((X86_32 && !X86_VOYAGER && !X86_VISWS && !X86_NUMAQ) || X86_64) select HAVE_KVM if ((X86_32 && !X86_VOYAGER && !X86_VISWS && !X86_NUMAQ) || X86_64)
select HAVE_ARCH_KGDB if !X86_VOYAGER select HAVE_ARCH_KGDB if !X86_VOYAGER
......
...@@ -17,6 +17,7 @@ endif ...@@ -17,6 +17,7 @@ endif
ifdef CONFIG_FUNCTION_GRAPH_TRACER ifdef CONFIG_FUNCTION_GRAPH_TRACER
# Don't trace __switch_to() but let it for function tracer # Don't trace __switch_to() but let it for function tracer
CFLAGS_REMOVE_process_32.o = -pg CFLAGS_REMOVE_process_32.o = -pg
CFLAGS_REMOVE_process_64.o = -pg
endif endif
# #
......
...@@ -98,6 +98,12 @@ ftrace_call: ...@@ -98,6 +98,12 @@ ftrace_call:
movq (%rsp), %rax movq (%rsp), %rax
addq $0x38, %rsp addq $0x38, %rsp
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
.globl ftrace_graph_call
ftrace_graph_call:
jmp ftrace_stub
#endif
.globl ftrace_stub .globl ftrace_stub
ftrace_stub: ftrace_stub:
retq retq
...@@ -110,6 +116,12 @@ ENTRY(mcount) ...@@ -110,6 +116,12 @@ ENTRY(mcount)
cmpq $ftrace_stub, ftrace_trace_function cmpq $ftrace_stub, ftrace_trace_function
jnz trace jnz trace
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
cmpq $ftrace_stub, ftrace_graph_return
jnz ftrace_graph_caller
#endif
.globl ftrace_stub .globl ftrace_stub
ftrace_stub: ftrace_stub:
retq retq
...@@ -145,6 +157,68 @@ END(mcount) ...@@ -145,6 +157,68 @@ END(mcount)
#endif /* CONFIG_DYNAMIC_FTRACE */ #endif /* CONFIG_DYNAMIC_FTRACE */
#endif /* CONFIG_FUNCTION_TRACER */ #endif /* CONFIG_FUNCTION_TRACER */
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
ENTRY(ftrace_graph_caller)
cmpl $0, function_trace_stop
jne ftrace_stub
subq $0x38, %rsp
movq %rax, (%rsp)
movq %rcx, 8(%rsp)
movq %rdx, 16(%rsp)
movq %rsi, 24(%rsp)
movq %rdi, 32(%rsp)
movq %r8, 40(%rsp)
movq %r9, 48(%rsp)
leaq 8(%rbp), %rdi
movq 0x38(%rsp), %rsi
call prepare_ftrace_return
movq 48(%rsp), %r9
movq 40(%rsp), %r8
movq 32(%rsp), %rdi
movq 24(%rsp), %rsi
movq 16(%rsp), %rdx
movq 8(%rsp), %rcx
movq (%rsp), %rax
addq $0x38, %rsp
retq
END(ftrace_graph_caller)
.globl return_to_handler
return_to_handler:
subq $80, %rsp
movq %rax, (%rsp)
movq %rcx, 8(%rsp)
movq %rdx, 16(%rsp)
movq %rsi, 24(%rsp)
movq %rdi, 32(%rsp)
movq %r8, 40(%rsp)
movq %r9, 48(%rsp)
movq %r10, 56(%rsp)
movq %r11, 64(%rsp)
call ftrace_return_to_handler
movq %rax, 72(%rsp)
movq 64(%rsp), %r11
movq 56(%rsp), %r10
movq 48(%rsp), %r9
movq 40(%rsp), %r8
movq 32(%rsp), %rdi
movq 24(%rsp), %rsi
movq 16(%rsp), %rdx
movq 8(%rsp), %rcx
movq (%rsp), %rax
addq $72, %rsp
retq
#endif
#ifndef CONFIG_PREEMPT #ifndef CONFIG_PREEMPT
#define retint_kernel retint_restore_args #define retint_kernel retint_restore_args
#endif #endif
......
...@@ -467,8 +467,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr) ...@@ -467,8 +467,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
* ignore such a protection. * ignore such a protection.
*/ */
asm volatile( asm volatile(
#ifdef CONFIG_X86_64
"1: movq (%[parent_old]), %[old]\n"
"2: movq %[return_hooker], (%[parent_replaced])\n"
#else
"1: movl (%[parent_old]), %[old]\n" "1: movl (%[parent_old]), %[old]\n"
"2: movl %[return_hooker], (%[parent_replaced])\n" "2: movl %[return_hooker], (%[parent_replaced])\n"
#endif
" movl $0, %[faulted]\n" " movl $0, %[faulted]\n"
".section .fixup, \"ax\"\n" ".section .fixup, \"ax\"\n"
...@@ -476,8 +481,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr) ...@@ -476,8 +481,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
".previous\n" ".previous\n"
".section __ex_table, \"a\"\n" ".section __ex_table, \"a\"\n"
#ifdef CONFIG_X86_64
" .quad 1b, 3b\n"
" .quad 2b, 3b\n"
#else
" .long 1b, 3b\n" " .long 1b, 3b\n"
" .long 2b, 3b\n" " .long 2b, 3b\n"
#endif
".previous\n" ".previous\n"
: [parent_replaced] "=r" (parent), [old] "=r" (old), : [parent_replaced] "=r" (parent), [old] "=r" (old),
...@@ -509,5 +519,4 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr) ...@@ -509,5 +519,4 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
ftrace_graph_entry(&trace); ftrace_graph_entry(&trace);
} }
#endif /* CONFIG_FUNCTION_GRAPH_TRACER */ #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
...@@ -1671,8 +1671,10 @@ static int alloc_retstack_tasklist(struct ftrace_ret_stack **ret_stack_list) ...@@ -1671,8 +1671,10 @@ static int alloc_retstack_tasklist(struct ftrace_ret_stack **ret_stack_list)
} }
if (t->ret_stack == NULL) { if (t->ret_stack == NULL) {
t->ret_stack = ret_stack_list[start++];
t->curr_ret_stack = -1; t->curr_ret_stack = -1;
/* Make sure IRQs see the -1 first: */
barrier();
t->ret_stack = ret_stack_list[start++];
atomic_set(&t->trace_overrun, 0); atomic_set(&t->trace_overrun, 0);
} }
} while_each_thread(g, t); } while_each_thread(g, t);
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment