Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
L
linux-davinci
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Redmine
Redmine
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Operations
Operations
Metrics
Environments
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
linux
linux-davinci
Commits
f0461d01
Commit
f0461d01
authored
Dec 03, 2008
by
Ingo Molnar
Browse files
Options
Browse Files
Download
Plain Diff
Merge branches 'tracing/ftrace' and 'tracing/function-graph-tracer' into tracing/core
parents
66eafebc
48d68b20
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
89 additions
and
3 deletions
+89
-3
arch/x86/Kconfig
arch/x86/Kconfig
+1
-1
arch/x86/kernel/Makefile
arch/x86/kernel/Makefile
+1
-0
arch/x86/kernel/entry_64.S
arch/x86/kernel/entry_64.S
+74
-0
arch/x86/kernel/ftrace.c
arch/x86/kernel/ftrace.c
+10
-1
kernel/trace/ftrace.c
kernel/trace/ftrace.c
+3
-1
No files found.
arch/x86/Kconfig
View file @
f0461d01
...
...
@@ -29,7 +29,7 @@ config X86
select HAVE_FTRACE_MCOUNT_RECORD
select HAVE_DYNAMIC_FTRACE
select HAVE_FUNCTION_TRACER
select HAVE_FUNCTION_GRAPH_TRACER
if X86_32
select HAVE_FUNCTION_GRAPH_TRACER
select HAVE_FUNCTION_TRACE_MCOUNT_TEST
select HAVE_KVM if ((X86_32 && !X86_VOYAGER && !X86_VISWS && !X86_NUMAQ) || X86_64)
select HAVE_ARCH_KGDB if !X86_VOYAGER
...
...
arch/x86/kernel/Makefile
View file @
f0461d01
...
...
@@ -17,6 +17,7 @@ endif
ifdef
CONFIG_FUNCTION_GRAPH_TRACER
# Don't trace __switch_to() but let it for function tracer
CFLAGS_REMOVE_process_32.o
=
-pg
CFLAGS_REMOVE_process_64.o
=
-pg
endif
#
...
...
arch/x86/kernel/entry_64.S
View file @
f0461d01
...
...
@@ -98,6 +98,12 @@ ftrace_call:
movq
(%
rsp
),
%
rax
addq
$
0x38
,
%
rsp
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
.
globl
ftrace_graph_call
ftrace_graph_call
:
jmp
ftrace_stub
#endif
.
globl
ftrace_stub
ftrace_stub
:
retq
...
...
@@ -110,6 +116,12 @@ ENTRY(mcount)
cmpq
$ftrace_stub
,
ftrace_trace_function
jnz
trace
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
cmpq
$ftrace_stub
,
ftrace_graph_return
jnz
ftrace_graph_caller
#endif
.
globl
ftrace_stub
ftrace_stub
:
retq
...
...
@@ -145,6 +157,68 @@ END(mcount)
#endif /* CONFIG_DYNAMIC_FTRACE */
#endif /* CONFIG_FUNCTION_TRACER */
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
ENTRY
(
ftrace_graph_caller
)
cmpl
$
0
,
function_trace_stop
jne
ftrace_stub
subq
$
0x38
,
%
rsp
movq
%
rax
,
(%
rsp
)
movq
%
rcx
,
8
(%
rsp
)
movq
%
rdx
,
16
(%
rsp
)
movq
%
rsi
,
24
(%
rsp
)
movq
%
rdi
,
32
(%
rsp
)
movq
%
r8
,
40
(%
rsp
)
movq
%
r9
,
48
(%
rsp
)
leaq
8
(%
rbp
),
%
rdi
movq
0x38
(%
rsp
),
%
rsi
call
prepare_ftrace_return
movq
48
(%
rsp
),
%
r9
movq
40
(%
rsp
),
%
r8
movq
32
(%
rsp
),
%
rdi
movq
24
(%
rsp
),
%
rsi
movq
16
(%
rsp
),
%
rdx
movq
8
(%
rsp
),
%
rcx
movq
(%
rsp
),
%
rax
addq
$
0x38
,
%
rsp
retq
END
(
ftrace_graph_caller
)
.
globl
return_to_handler
return_to_handler
:
subq
$
80
,
%
rsp
movq
%
rax
,
(%
rsp
)
movq
%
rcx
,
8
(%
rsp
)
movq
%
rdx
,
16
(%
rsp
)
movq
%
rsi
,
24
(%
rsp
)
movq
%
rdi
,
32
(%
rsp
)
movq
%
r8
,
40
(%
rsp
)
movq
%
r9
,
48
(%
rsp
)
movq
%
r10
,
56
(%
rsp
)
movq
%
r11
,
64
(%
rsp
)
call
ftrace_return_to_handler
movq
%
rax
,
72
(%
rsp
)
movq
64
(%
rsp
),
%
r11
movq
56
(%
rsp
),
%
r10
movq
48
(%
rsp
),
%
r9
movq
40
(%
rsp
),
%
r8
movq
32
(%
rsp
),
%
rdi
movq
24
(%
rsp
),
%
rsi
movq
16
(%
rsp
),
%
rdx
movq
8
(%
rsp
),
%
rcx
movq
(%
rsp
),
%
rax
addq
$
72
,
%
rsp
retq
#endif
#ifndef CONFIG_PREEMPT
#define retint_kernel retint_restore_args
#endif
...
...
arch/x86/kernel/ftrace.c
View file @
f0461d01
...
...
@@ -467,8 +467,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
* ignore such a protection.
*/
asm
volatile
(
#ifdef CONFIG_X86_64
"1: movq (%[parent_old]), %[old]
\n
"
"2: movq %[return_hooker], (%[parent_replaced])
\n
"
#else
"1: movl (%[parent_old]), %[old]
\n
"
"2: movl %[return_hooker], (%[parent_replaced])
\n
"
#endif
" movl $0, %[faulted]
\n
"
".section .fixup,
\"
ax
\"\n
"
...
...
@@ -476,8 +481,13 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
".previous
\n
"
".section __ex_table,
\"
a
\"\n
"
#ifdef CONFIG_X86_64
" .quad 1b, 3b
\n
"
" .quad 2b, 3b
\n
"
#else
" .long 1b, 3b
\n
"
" .long 2b, 3b
\n
"
#endif
".previous
\n
"
:
[
parent_replaced
]
"=r"
(
parent
),
[
old
]
"=r"
(
old
),
...
...
@@ -509,5 +519,4 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
ftrace_graph_entry
(
&
trace
);
}
#endif
/* CONFIG_FUNCTION_GRAPH_TRACER */
kernel/trace/ftrace.c
View file @
f0461d01
...
...
@@ -1671,8 +1671,10 @@ static int alloc_retstack_tasklist(struct ftrace_ret_stack **ret_stack_list)
}
if
(
t
->
ret_stack
==
NULL
)
{
t
->
ret_stack
=
ret_stack_list
[
start
++
];
t
->
curr_ret_stack
=
-
1
;
/* Make sure IRQs see the -1 first: */
barrier
();
t
->
ret_stack
=
ret_stack_list
[
start
++
];
atomic_set
(
&
t
->
trace_overrun
,
0
);
}
}
while_each_thread
(
g
,
t
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment