--- /dev/null
+#include <linux/spinlock.h>
+#include <linux/hardirq.h>
+#include <linux/ftrace.h>
+#include <linux/percpu.h>
+#include <linux/init.h>
+#include <linux/list.h>
+
+static const u32 ftrace_nop = 0x01000000;
+
+notrace int ftrace_ip_converted(unsigned long ip)
+{
+       u32 insn = *(u32 *) ip;
+
+       return (insn == ftrace_nop);
+}
+
+notrace unsigned char *ftrace_nop_replace(void)
+{
+       return (char *)&ftrace_nop;
+}
+
+notrace unsigned char *ftrace_call_replace(unsigned long ip, unsigned long addr)
+{
+       static u32 call;
+       s32 off;
+
+       off = ((s32)addr - (s32)ip);
+       call = 0x40000000 | ((u32)off >> 2);
+
+       return (unsigned char *) &call;
+}
+
+notrace int
+ftrace_modify_code(unsigned long ip, unsigned char *old_code,
+                  unsigned char *new_code)
+{
+       u32 old = *(u32 *)old_code;
+       u32 new = *(u32 *)new_code;
+       u32 replaced;
+       int faulted;
+
+       __asm__ __volatile__(
+       "1:     cas     [%[ip]], %[old], %[new]\n"
+       "       flush   %[ip]\n"
+       "       mov     0, %[faulted]\n"
+       "2:\n"
+       "       .section .fixup,#alloc,#execinstr\n"
+       "       .align  4\n"
+       "3:     sethi   %%hi(2b), %[faulted]\n"
+       "       jmpl    %[faulted] + %%lo(2b), %%g0\n"
+       "        mov    1, %[faulted]\n"
+       "       .previous\n"
+       "       .section __ex_table,\"a\"\n"
+       "       .align  4\n"
+       "       .word   1b, 3b\n"
+       "       .previous\n"
+       : "=r" (replaced), [faulted] "=r" (faulted)
+       : [new] "0" (new), [old] "r" (old), [ip] "r" (ip)
+       : "memory");
+
+       if (replaced != old && replaced != new)
+               faulted = 2;
+
+       return faulted;
+}
+
+notrace int ftrace_update_ftrace_func(ftrace_func_t func)
+{
+       unsigned long ip = (unsigned long)(&ftrace_call);
+       unsigned char old[4], *new;
+
+       memcpy(old, &ftrace_call, 4);
+       new = ftrace_call_replace(ip, (unsigned long)func);
+       return ftrace_modify_code(ip, old, new);
+}
+
+notrace int ftrace_mcount_set(unsigned long *data)
+{
+       unsigned long ip = (long)(&mcount_call);
+       unsigned long *addr = data;
+       unsigned char old[4], *new;
+
+       /*
+        * Replace the mcount stub with a pointer to the
+        * ip recorder function.
+        */
+       memcpy(old, &mcount_call, 4);
+       new = ftrace_call_replace(ip, *addr);
+       *addr = ftrace_modify_code(ip, old, new);
+
+       return 0;
+}
+
+
+int __init ftrace_dyn_arch_init(void *data)
+{
+       ftrace_mcount_set(data);
+       return 0;
+}
 
        .skip           OVSTACKSIZE
 #endif
        .text
-       .align 32
-       .globl mcount, _mcount
-mcount:
+       .align          32
+       .globl          _mcount
+       .type           _mcount,#function
+       .globl          mcount
+       .type           mcount,#function
 _mcount:
+mcount:
 #ifdef CONFIG_STACK_DEBUG
        /*
         * Check whether %sp is dangerously low.
         or             %g3, %lo(panicstring), %o0
        call            prom_halt
         nop
+1:
+#endif
+#ifdef CONFIG_FTRACE
+#ifdef CONFIG_DYNAMIC_FTRACE
+       mov             %o7, %o0
+       .globl          mcount_call
+mcount_call:
+       call            ftrace_stub
+        mov            %o0, %o7
+#else
+       sethi           %hi(ftrace_trace_function), %g1
+       sethi           %hi(ftrace_stub), %g2
+       ldx             [%g1 + %lo(ftrace_trace_function)], %g1
+       or              %g2, %lo(ftrace_stub), %g2
+       cmp             %g1, %g2
+       be,pn           %icc, 1f
+        mov            %i7, %o1
+       jmpl            %g1, %g0
+        mov            %o7, %o0
+       /* not reached */
+1:
 #endif
-1:     retl
+#endif
+       retl
         nop
+       .size           _mcount,.-_mcount
+       .size           mcount,.-mcount
+
+#ifdef CONFIG_FTRACE
+       .globl          ftrace_stub
+       .type           ftrace_stub,#function
+ftrace_stub:
+       retl
+        nop
+       .size           ftrace_stub,.-ftrace_stub
+#ifdef CONFIG_DYNAMIC_FTRACE
+       .globl          ftrace_caller
+       .type           ftrace_caller,#function
+ftrace_caller:
+       mov             %i7, %o1
+       mov             %o7, %o0
+       .globl          ftrace_call
+ftrace_call:
+       call            ftrace_stub
+        mov            %o0, %o7
+       retl
+        nop
+       .size           ftrace_caller,.-ftrace_caller
+#endif
+#endif