Commit 63b75495 authored by David S. Miller's avatar David S. Miller

sparc64: Add HAVE_FUNCTION_TRACE_MCOUNT_TEST and tidy up.

Check function_trace_stop at ftrace_caller

Toss mcount_call and dummy call of ftrace_stub, unnecessary.

Document problems we'll have if the final kernel image link
ever turns on relaxation.

Properly size 'ftrace_call' so it looks right when inspecting
instructions under gdb et al.
Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent 0c25e9e6
...@@ -37,6 +37,7 @@ config SPARC64 ...@@ -37,6 +37,7 @@ config SPARC64
def_bool 64BIT def_bool 64BIT
select ARCH_SUPPORTS_MSI select ARCH_SUPPORTS_MSI
select HAVE_FUNCTION_TRACER select HAVE_FUNCTION_TRACER
select HAVE_FUNCTION_TRACE_MCOUNT_TEST
select HAVE_KRETPROBES select HAVE_KRETPROBES
select HAVE_KPROBES select HAVE_KPROBES
select HAVE_LMB select HAVE_LMB
......
...@@ -96,12 +96,11 @@ mcount: ...@@ -96,12 +96,11 @@ mcount:
#endif #endif
#ifdef CONFIG_FUNCTION_TRACER #ifdef CONFIG_FUNCTION_TRACER
#ifdef CONFIG_DYNAMIC_FTRACE #ifdef CONFIG_DYNAMIC_FTRACE
mov %o7, %o0 /* Do nothing, the retl/nop below is all we need. */
.globl mcount_call
mcount_call:
call ftrace_stub
mov %o0, %o7
#else #else
sethi %hi(function_trace_stop), %g1
lduw [%g1 + %lo(function_trace_stop)], %g2
brnz,pn %g2, 1f
sethi %hi(ftrace_trace_function), %g1 sethi %hi(ftrace_trace_function), %g1
sethi %hi(ftrace_stub), %g2 sethi %hi(ftrace_stub), %g2
ldx [%g1 + %lo(ftrace_trace_function)], %g1 ldx [%g1 + %lo(ftrace_trace_function)], %g1
...@@ -131,14 +130,23 @@ ftrace_stub: ...@@ -131,14 +130,23 @@ ftrace_stub:
.globl ftrace_caller .globl ftrace_caller
.type ftrace_caller,#function .type ftrace_caller,#function
ftrace_caller: ftrace_caller:
sethi %hi(function_trace_stop), %g1
mov %i7, %o1 mov %i7, %o1
lduw [%g1 + %lo(function_trace_stop)], %g2
brnz,pn %g2, ftrace_stub
mov %o7, %o0 mov %o7, %o0
.globl ftrace_call .globl ftrace_call
ftrace_call: ftrace_call:
/* If the final kernel link ever turns on relaxation, we'll need
* to do something about this tail call. Otherwise the linker
* will rewrite the call into a branch and nop out the move
* instruction.
*/
call ftrace_stub call ftrace_stub
mov %o0, %o7 mov %o0, %o7
retl retl
nop nop
.size ftrace_call,.-ftrace_call
.size ftrace_caller,.-ftrace_caller .size ftrace_caller,.-ftrace_caller
#endif #endif
#endif #endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment