From 63b754957371c23b7515399a977a2e1d361a036c Mon Sep 17 00:00:00 2001 From: "David S. Miller" Date: Mon, 12 Apr 2010 22:35:24 -0700 Subject: [PATCH] sparc64: Add HAVE_FUNCTION_TRACE_MCOUNT_TEST and tidy up. Check function_trace_stop at ftrace_caller Toss mcount_call and dummy call of ftrace_stub, unnecessary. Document problems we'll have if the final kernel image link ever turns on relaxation. Properly size 'ftrace_call' so it looks right when inspecting instructions under gdb et al. Signed-off-by: David S. Miller --- arch/sparc/Kconfig | 1 + arch/sparc/lib/mcount.S | 22 +++++++++++++++------- 2 files changed, 16 insertions(+), 7 deletions(-) diff --git a/arch/sparc/Kconfig b/arch/sparc/Kconfig index 6db51367405..035304c30ab 100644 --- a/arch/sparc/Kconfig +++ b/arch/sparc/Kconfig @@ -37,6 +37,7 @@ config SPARC64 def_bool 64BIT select ARCH_SUPPORTS_MSI select HAVE_FUNCTION_TRACER + select HAVE_FUNCTION_TRACE_MCOUNT_TEST select HAVE_KRETPROBES select HAVE_KPROBES select HAVE_LMB diff --git a/arch/sparc/lib/mcount.S b/arch/sparc/lib/mcount.S index 24b8b12deed..7047997be0e 100644 --- a/arch/sparc/lib/mcount.S +++ b/arch/sparc/lib/mcount.S @@ -96,13 +96,12 @@ mcount: #endif #ifdef CONFIG_FUNCTION_TRACER #ifdef CONFIG_DYNAMIC_FTRACE - mov %o7, %o0 - .globl mcount_call -mcount_call: - call ftrace_stub - mov %o0, %o7 + /* Do nothing, the retl/nop below is all we need. */ #else - sethi %hi(ftrace_trace_function), %g1 + sethi %hi(function_trace_stop), %g1 + lduw [%g1 + %lo(function_trace_stop)], %g2 + brnz,pn %g2, 1f + sethi %hi(ftrace_trace_function), %g1 sethi %hi(ftrace_stub), %g2 ldx [%g1 + %lo(ftrace_trace_function)], %g1 or %g2, %lo(ftrace_stub), %g2 @@ -131,14 +130,23 @@ ftrace_stub: .globl ftrace_caller .type ftrace_caller,#function ftrace_caller: + sethi %hi(function_trace_stop), %g1 mov %i7, %o1 - mov %o7, %o0 + lduw [%g1 + %lo(function_trace_stop)], %g2 + brnz,pn %g2, ftrace_stub + mov %o7, %o0 .globl ftrace_call ftrace_call: + /* If the final kernel link ever turns on relaxation, we'll need + * to do something about this tail call. Otherwise the linker + * will rewrite the call into a branch and nop out the move + * instruction. + */ call ftrace_stub mov %o0, %o7 retl nop + .size ftrace_call,.-ftrace_call .size ftrace_caller,.-ftrace_caller #endif #endif -- 2.41.0