31
31
32
32
#include "core.h"
33
33
34
+ /*
35
+ * See register_usage_flags. If the probed instruction doesn't use PC,
36
+ * we can copy it into template and have it executed directly without
37
+ * simulation or emulation.
38
+ */
39
+ #define ARM_REG_PC 15
40
+ #define can_kprobe_direct_exec (m ) (!test_bit(ARM_REG_PC, &(m)))
41
+
34
42
/*
35
43
* NOTE: the first sub and add instruction will be modified according
36
44
* to the stack cost of the instruction.
71
79
" orrne r2, #1\n"
72
80
" strne r2, [sp, #60] @ set bit0 of PC for thumb\n"
73
81
" msr cpsr_cxsf, r1\n"
82
+ ".global optprobe_template_restore_begin\n"
83
+ "optprobe_template_restore_begin:\n"
74
84
" ldmia sp, {r0 - r15}\n"
85
+ ".global optprobe_template_restore_orig_insn\n"
86
+ "optprobe_template_restore_orig_insn:\n"
87
+ " nop\n"
88
+ ".global optprobe_template_restore_end\n"
89
+ "optprobe_template_restore_end:\n"
90
+ " nop\n"
75
91
".global optprobe_template_val\n"
76
92
"optprobe_template_val:\n"
77
93
"1: .long 0\n"
@@ -91,6 +107,12 @@ asm (
91
107
((unsigned long *)&optprobe_template_add_sp - (unsigned long *)&optprobe_template_entry)
92
108
#define TMPL_SUB_SP \
93
109
((unsigned long *)&optprobe_template_sub_sp - (unsigned long *)&optprobe_template_entry)
110
+ #define TMPL_RESTORE_BEGIN \
111
+ ((unsigned long *)&optprobe_template_restore_begin - (unsigned long *)&optprobe_template_entry)
112
+ #define TMPL_RESTORE_ORIGN_INSN \
113
+ ((unsigned long *)&optprobe_template_restore_orig_insn - (unsigned long *)&optprobe_template_entry)
114
+ #define TMPL_RESTORE_END \
115
+ ((unsigned long *)&optprobe_template_restore_end - (unsigned long *)&optprobe_template_entry)
94
116
95
117
/*
96
118
* ARM can always optimize an instruction when using ARM ISA, except
@@ -160,8 +182,12 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs)
160
182
__this_cpu_write (current_kprobe , NULL );
161
183
}
162
184
163
- /* In each case, we must singlestep the replaced instruction. */
164
- op -> kp .ainsn .insn_singlestep (p -> opcode , & p -> ainsn , regs );
185
+ /*
186
+ * We singlestep the replaced instruction only when it can't be
187
+ * executed directly during restore.
188
+ */
189
+ if (!p -> ainsn .kprobe_direct_exec )
190
+ op -> kp .ainsn .insn_singlestep (p -> opcode , & p -> ainsn , regs );
165
191
166
192
local_irq_restore (flags );
167
193
}
@@ -243,6 +269,28 @@ int arch_prepare_optimized_kprobe(struct optimized_kprobe *op, struct kprobe *or
243
269
val = (unsigned long )optimized_callback ;
244
270
code [TMPL_CALL_IDX ] = val ;
245
271
272
+ /* If possible, copy insn and have it executed during restore */
273
+ orig -> ainsn .kprobe_direct_exec = false;
274
+ if (can_kprobe_direct_exec (orig -> ainsn .register_usage_flags )) {
275
+ kprobe_opcode_t final_branch = arm_gen_branch (
276
+ (unsigned long )(& code [TMPL_RESTORE_END ]),
277
+ (unsigned long )(op -> kp .addr ) + 4 );
278
+ if (final_branch != 0 ) {
279
+ /*
280
+ * Replace original 'ldmia sp, {r0 - r15}' with
281
+ * 'ldmia {r0 - r14}', restore all registers except pc.
282
+ */
283
+ code [TMPL_RESTORE_BEGIN ] = __opcode_to_mem_arm (0xe89d7fff );
284
+
285
+ /* The original probed instruction */
286
+ code [TMPL_RESTORE_ORIGN_INSN ] = __opcode_to_mem_arm (orig -> opcode );
287
+
288
+ /* Jump back to next instruction */
289
+ code [TMPL_RESTORE_END ] = __opcode_to_mem_arm (final_branch );
290
+ orig -> ainsn .kprobe_direct_exec = true;
291
+ }
292
+ }
293
+
246
294
flush_icache_range ((unsigned long )code ,
247
295
(unsigned long )(& code [TMPL_END_IDX ]));
248
296
0 commit comments