2008-10-23 07:26:29 +02:00
|
|
|
#ifndef _ASM_X86_FTRACE_H
|
|
|
|
#define _ASM_X86_FTRACE_H
|
2008-06-21 20:17:27 +02:00
|
|
|
|
2008-12-12 22:09:08 +01:00
|
|
|
#ifdef __ASSEMBLY__
|
|
|
|
|
|
|
|
.macro MCOUNT_SAVE_FRAME
|
|
|
|
/* taken from glibc */
|
|
|
|
subq $0x38, %rsp
|
|
|
|
movq %rax, (%rsp)
|
|
|
|
movq %rcx, 8(%rsp)
|
|
|
|
movq %rdx, 16(%rsp)
|
|
|
|
movq %rsi, 24(%rsp)
|
|
|
|
movq %rdi, 32(%rsp)
|
|
|
|
movq %r8, 40(%rsp)
|
|
|
|
movq %r9, 48(%rsp)
|
|
|
|
.endm
|
|
|
|
|
|
|
|
.macro MCOUNT_RESTORE_FRAME
|
|
|
|
movq 48(%rsp), %r9
|
|
|
|
movq 40(%rsp), %r8
|
|
|
|
movq 32(%rsp), %rdi
|
|
|
|
movq 24(%rsp), %rsi
|
|
|
|
movq 16(%rsp), %rdx
|
|
|
|
movq 8(%rsp), %rcx
|
|
|
|
movq (%rsp), %rax
|
|
|
|
addq $0x38, %rsp
|
|
|
|
.endm
|
|
|
|
|
|
|
|
#endif
|
|
|
|
|
2008-10-07 01:06:12 +02:00
|
|
|
#ifdef CONFIG_FUNCTION_TRACER
|
2008-06-21 20:17:27 +02:00
|
|
|
#define MCOUNT_ADDR ((long)(mcount))
|
|
|
|
#define MCOUNT_INSN_SIZE 5 /* sizeof mcount call */
|
|
|
|
|
|
|
|
#ifndef __ASSEMBLY__
|
|
|
|
extern void mcount(void);
|
2008-08-14 21:45:08 +02:00
|
|
|
|
|
|
|
static inline unsigned long ftrace_call_adjust(unsigned long addr)
|
|
|
|
{
|
|
|
|
/*
|
|
|
|
* call mcount is "e8 <4 byte offset>"
|
|
|
|
* The addr points to the 4 byte offset and the caller of this
|
|
|
|
* function wants the pointer to e8. Simply subtract one.
|
|
|
|
*/
|
|
|
|
return addr - 1;
|
|
|
|
}
|
2008-11-15 01:21:19 +01:00
|
|
|
|
|
|
|
#ifdef CONFIG_DYNAMIC_FTRACE
|
|
|
|
|
|
|
|
struct dyn_arch_ftrace {
|
|
|
|
/* No extra data needed for x86 */
|
|
|
|
};
|
|
|
|
|
|
|
|
#endif /* CONFIG_DYNAMIC_FTRACE */
|
2008-10-31 05:03:22 +01:00
|
|
|
#endif /* __ASSEMBLY__ */
|
2008-10-07 01:06:12 +02:00
|
|
|
#endif /* CONFIG_FUNCTION_TRACER */
|
2008-06-21 20:17:27 +02:00
|
|
|
|
2008-10-23 07:26:29 +02:00
|
|
|
#endif /* _ASM_X86_FTRACE_H */
|