@@ -177,6 +177,8 @@ config ARM64
select HAVE_DYNAMIC_FTRACE
select HAVE_DYNAMIC_FTRACE_WITH_REGS \
if $(cc-option,-fpatchable-function-entry=2)
+ select HAVE_DYNAMIC_FTRACE_WITH_DIRECT_CALLS \
+ if DYNAMIC_FTRACE_WITH_REGS
select FTRACE_MCOUNT_USE_PATCHABLE_FUNCTION_ENTRY \
if DYNAMIC_FTRACE_WITH_REGS
select HAVE_EFFICIENT_UNALIGNED_ACCESS
@@ -78,6 +78,16 @@ static inline unsigned long ftrace_call_adjust(unsigned long addr)
return addr;
}
+static inline void arch_ftrace_set_direct_caller(struct pt_regs *regs,
+ unsigned long addr)
+{
+ /*
+ * Place custom trampoline address in regs->orig_x0 to let ftrace
+ * trampoline jump to it.
+ */
+ regs->orig_x0 = addr;
+}
+
#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
struct dyn_ftrace;
int ftrace_init_nop(struct module *mod, struct dyn_ftrace *rec);
@@ -80,6 +80,7 @@ int main(void)
DEFINE(S_SDEI_TTBR1, offsetof(struct pt_regs, sdei_ttbr1));
DEFINE(S_PMR_SAVE, offsetof(struct pt_regs, pmr_save));
DEFINE(S_STACKFRAME, offsetof(struct pt_regs, stackframe));
+ DEFINE(S_ORIG_X0, offsetof(struct pt_regs, orig_x0));
DEFINE(PT_REGS_SIZE, sizeof(struct pt_regs));
BLANK();
#ifdef CONFIG_COMPAT
@@ -60,6 +60,9 @@
str x29, [sp, #S_FP]
.endif
+ /* Set orig_x0 to zero */
+ str xzr, [sp, #S_ORIG_X0]
+
/* Save the callsite's SP and LR */
add x10, sp, #(PT_REGS_SIZE + 16)
stp x9, x10, [sp, #S_LR]
@@ -119,12 +122,21 @@ ftrace_common_return:
/* Restore the callsite's FP, LR, PC */
ldr x29, [sp, #S_FP]
ldr x30, [sp, #S_LR]
- ldr x9, [sp, #S_PC]
-
+ ldr x10, [sp, #S_PC]
+
+ ldr x11, [sp, #S_ORIG_X0]
+ cbz x11, 1f
+ /* Set x9 to parent ip before jump to custom trampoline */
+ mov x9, x30
+ /* Set lr to self ip */
+ ldr x30, [sp, #S_PC]
+ /* Set x10 (used for return address) to custom trampoline */
+ mov x10, x11
+1:
/* Restore the callsite's SP */
add sp, sp, #PT_REGS_SIZE + 16
- ret x9
+ ret x10
SYM_CODE_END(ftrace_common)
#ifdef CONFIG_FUNCTION_GRAPH_TRACER