blob: b7582010125c3fe6f4fcbbd3e762ee25e0180297 [file] [log] [blame]
Thomas Gleixner71700662019-05-19 15:51:55 +02001// SPDX-License-Identifier: GPL-2.0-or-later
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +01002/*
Jeff Johnson30347492024-06-19 13:59:15 -07003 * test_kprobes.c - simple sanity test for k*probes
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +01004 *
5 * Copyright IBM Corp. 2008
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +01006 */
7
8#include <linux/kernel.h>
9#include <linux/kprobes.h>
10#include <linux/random.h>
Sven Schnellee44e81c2021-10-21 09:54:24 +090011#include <kunit/test.h>
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010012
13#define div_factor 3
14
Masami Hiramatsu2c7d6622017-10-06 08:15:17 +090015static u32 rand1, preh_val, posth_val;
Masami Hiramatsu8e114402009-01-06 14:41:47 -080016static u32 (*target)(u32 value);
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +090017static u32 (*recursed_target)(u32 value);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -080018static u32 (*target2)(u32 value);
Sven Schnellee44e81c2021-10-21 09:54:24 +090019static struct kunit *current_test;
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010020
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +090021static unsigned long (*internal_target)(void);
22static unsigned long (*stacktrace_target)(void);
23static unsigned long (*stacktrace_driver)(void);
24static unsigned long target_return_address[2];
25
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010026static noinline u32 kprobe_target(u32 value)
27{
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010028 return (value / div_factor);
29}
30
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +090031static noinline u32 kprobe_recursed_target(u32 value)
32{
33 return (value / div_factor);
34}
35
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010036static int kp_pre_handler(struct kprobe *p, struct pt_regs *regs)
37{
Sven Schnellee44e81c2021-10-21 09:54:24 +090038 KUNIT_EXPECT_FALSE(current_test, preemptible());
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +090039
40 preh_val = recursed_target(rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010041 return 0;
42}
43
44static void kp_post_handler(struct kprobe *p, struct pt_regs *regs,
45 unsigned long flags)
46{
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +090047 u32 expval = recursed_target(rand1);
48
Sven Schnellee44e81c2021-10-21 09:54:24 +090049 KUNIT_EXPECT_FALSE(current_test, preemptible());
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +090050 KUNIT_EXPECT_EQ(current_test, preh_val, expval);
51
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010052 posth_val = preh_val + div_factor;
53}
54
55static struct kprobe kp = {
56 .symbol_name = "kprobe_target",
57 .pre_handler = kp_pre_handler,
58 .post_handler = kp_post_handler
59};
60
Sven Schnellee44e81c2021-10-21 09:54:24 +090061static void test_kprobe(struct kunit *test)
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010062{
Sven Schnellee44e81c2021-10-21 09:54:24 +090063 current_test = test;
64 KUNIT_EXPECT_EQ(test, 0, register_kprobe(&kp));
65 target(rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010066 unregister_kprobe(&kp);
Sven Schnellee44e81c2021-10-21 09:54:24 +090067 KUNIT_EXPECT_NE(test, 0, preh_val);
68 KUNIT_EXPECT_NE(test, 0, posth_val);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +010069}
70
Masami Hiramatsu12da3b82009-01-06 14:41:48 -080071static noinline u32 kprobe_target2(u32 value)
72{
73 return (value / div_factor) + 1;
74}
75
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +090076static noinline unsigned long kprobe_stacktrace_internal_target(void)
77{
78 if (!target_return_address[0])
79 target_return_address[0] = (unsigned long)__builtin_return_address(0);
80 return target_return_address[0];
81}
82
83static noinline unsigned long kprobe_stacktrace_target(void)
84{
85 if (!target_return_address[1])
86 target_return_address[1] = (unsigned long)__builtin_return_address(0);
87
88 if (internal_target)
89 internal_target();
90
91 return target_return_address[1];
92}
93
94static noinline unsigned long kprobe_stacktrace_driver(void)
95{
96 if (stacktrace_target)
97 stacktrace_target();
98
99 /* This is for preventing inlining the function */
100 return (unsigned long)__builtin_return_address(0);
101}
102
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800103static int kp_pre_handler2(struct kprobe *p, struct pt_regs *regs)
104{
105 preh_val = (rand1 / div_factor) + 1;
106 return 0;
107}
108
109static void kp_post_handler2(struct kprobe *p, struct pt_regs *regs,
110 unsigned long flags)
111{
Sven Schnellee44e81c2021-10-21 09:54:24 +0900112 KUNIT_EXPECT_EQ(current_test, preh_val, (rand1 / div_factor) + 1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800113 posth_val = preh_val + div_factor;
114}
115
116static struct kprobe kp2 = {
117 .symbol_name = "kprobe_target2",
118 .pre_handler = kp_pre_handler2,
119 .post_handler = kp_post_handler2
120};
121
Sven Schnellee44e81c2021-10-21 09:54:24 +0900122static void test_kprobes(struct kunit *test)
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800123{
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800124 struct kprobe *kps[2] = {&kp, &kp2};
125
Sven Schnellee44e81c2021-10-21 09:54:24 +0900126 current_test = test;
127
Masami Hiramatsufd02e6f2010-10-14 12:10:24 +0900128 /* addr and flags should be cleard for reusing kprobe. */
129 kp.addr = NULL;
130 kp.flags = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900131
132 KUNIT_EXPECT_EQ(test, 0, register_kprobes(kps, 2));
133 preh_val = 0;
134 posth_val = 0;
135 target(rand1);
136
137 KUNIT_EXPECT_NE(test, 0, preh_val);
138 KUNIT_EXPECT_NE(test, 0, posth_val);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800139
140 preh_val = 0;
141 posth_val = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900142 target2(rand1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800143
Sven Schnellee44e81c2021-10-21 09:54:24 +0900144 KUNIT_EXPECT_NE(test, 0, preh_val);
145 KUNIT_EXPECT_NE(test, 0, posth_val);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800146 unregister_kprobes(kps, 2);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800147}
148
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +0900149static struct kprobe kp_missed = {
150 .symbol_name = "kprobe_recursed_target",
151 .pre_handler = kp_pre_handler,
152 .post_handler = kp_post_handler,
153};
154
155static void test_kprobe_missed(struct kunit *test)
156{
157 current_test = test;
158 preh_val = 0;
159 posth_val = 0;
160
161 KUNIT_EXPECT_EQ(test, 0, register_kprobe(&kp_missed));
162
163 recursed_target(rand1);
164
165 KUNIT_EXPECT_EQ(test, 2, kp_missed.nmissed);
166 KUNIT_EXPECT_NE(test, 0, preh_val);
167 KUNIT_EXPECT_NE(test, 0, posth_val);
168
169 unregister_kprobe(&kp_missed);
170}
171
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100172#ifdef CONFIG_KRETPROBES
173static u32 krph_val;
174
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800175static int entry_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
176{
Sven Schnellee44e81c2021-10-21 09:54:24 +0900177 KUNIT_EXPECT_FALSE(current_test, preemptible());
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800178 krph_val = (rand1 / div_factor);
179 return 0;
180}
181
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100182static int return_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
183{
184 unsigned long ret = regs_return_value(regs);
185
Sven Schnellee44e81c2021-10-21 09:54:24 +0900186 KUNIT_EXPECT_FALSE(current_test, preemptible());
187 KUNIT_EXPECT_EQ(current_test, ret, rand1 / div_factor);
188 KUNIT_EXPECT_NE(current_test, krph_val, 0);
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800189 krph_val = rand1;
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100190 return 0;
191}
192
193static struct kretprobe rp = {
194 .handler = return_handler,
Abhishek Sagarf47cd9b2008-02-06 01:38:22 -0800195 .entry_handler = entry_handler,
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100196 .kp.symbol_name = "kprobe_target"
197};
198
Sven Schnellee44e81c2021-10-21 09:54:24 +0900199static void test_kretprobe(struct kunit *test)
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100200{
Sven Schnellee44e81c2021-10-21 09:54:24 +0900201 current_test = test;
202 KUNIT_EXPECT_EQ(test, 0, register_kretprobe(&rp));
203 target(rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100204 unregister_kretprobe(&rp);
Sven Schnellee44e81c2021-10-21 09:54:24 +0900205 KUNIT_EXPECT_EQ(test, krph_val, rand1);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100206}
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800207
208static int return_handler2(struct kretprobe_instance *ri, struct pt_regs *regs)
209{
210 unsigned long ret = regs_return_value(regs);
211
Sven Schnellee44e81c2021-10-21 09:54:24 +0900212 KUNIT_EXPECT_EQ(current_test, ret, (rand1 / div_factor) + 1);
213 KUNIT_EXPECT_NE(current_test, krph_val, 0);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800214 krph_val = rand1;
215 return 0;
216}
217
218static struct kretprobe rp2 = {
219 .handler = return_handler2,
220 .entry_handler = entry_handler,
221 .kp.symbol_name = "kprobe_target2"
222};
223
Sven Schnellee44e81c2021-10-21 09:54:24 +0900224static void test_kretprobes(struct kunit *test)
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800225{
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800226 struct kretprobe *rps[2] = {&rp, &rp2};
227
Sven Schnellee44e81c2021-10-21 09:54:24 +0900228 current_test = test;
Masami Hiramatsufd02e6f2010-10-14 12:10:24 +0900229 /* addr and flags should be cleard for reusing kprobe. */
230 rp.kp.addr = NULL;
231 rp.kp.flags = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900232 KUNIT_EXPECT_EQ(test, 0, register_kretprobes(rps, 2));
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800233
234 krph_val = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900235 target(rand1);
236 KUNIT_EXPECT_EQ(test, krph_val, rand1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800237
238 krph_val = 0;
Sven Schnellee44e81c2021-10-21 09:54:24 +0900239 target2(rand1);
240 KUNIT_EXPECT_EQ(test, krph_val, rand1);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800241 unregister_kretprobes(rps, 2);
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800242}
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +0900243
244#ifdef CONFIG_ARCH_CORRECT_STACKTRACE_ON_KRETPROBE
245#define STACK_BUF_SIZE 16
246static unsigned long stack_buf[STACK_BUF_SIZE];
247
248static int stacktrace_return_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
249{
250 unsigned long retval = regs_return_value(regs);
251 int i, ret;
252
253 KUNIT_EXPECT_FALSE(current_test, preemptible());
254 KUNIT_EXPECT_EQ(current_test, retval, target_return_address[1]);
255
256 /*
257 * Test stacktrace inside the kretprobe handler, this will involves
258 * kretprobe trampoline, but must include correct return address
259 * of the target function.
260 */
261 ret = stack_trace_save(stack_buf, STACK_BUF_SIZE, 0);
262 KUNIT_EXPECT_NE(current_test, ret, 0);
263
264 for (i = 0; i < ret; i++) {
265 if (stack_buf[i] == target_return_address[1])
266 break;
267 }
268 KUNIT_EXPECT_NE(current_test, i, ret);
269
270#if !IS_MODULE(CONFIG_KPROBES_SANITY_TEST)
271 /*
272 * Test stacktrace from pt_regs at the return address. Thus the stack
273 * trace must start from the target return address.
274 */
275 ret = stack_trace_save_regs(regs, stack_buf, STACK_BUF_SIZE, 0);
276 KUNIT_EXPECT_NE(current_test, ret, 0);
277 KUNIT_EXPECT_EQ(current_test, stack_buf[0], target_return_address[1]);
278#endif
279
280 return 0;
281}
282
283static struct kretprobe rp3 = {
284 .handler = stacktrace_return_handler,
285 .kp.symbol_name = "kprobe_stacktrace_target"
286};
287
288static void test_stacktrace_on_kretprobe(struct kunit *test)
289{
290 unsigned long myretaddr = (unsigned long)__builtin_return_address(0);
291
292 current_test = test;
293 rp3.kp.addr = NULL;
294 rp3.kp.flags = 0;
295
296 /*
297 * Run the stacktrace_driver() to record correct return address in
298 * stacktrace_target() and ensure stacktrace_driver() call is not
299 * inlined by checking the return address of stacktrace_driver()
300 * and the return address of this function is different.
301 */
302 KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
303
304 KUNIT_ASSERT_EQ(test, 0, register_kretprobe(&rp3));
305 KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
306 unregister_kretprobe(&rp3);
307}
308
309static int stacktrace_internal_return_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
310{
311 unsigned long retval = regs_return_value(regs);
312 int i, ret;
313
314 KUNIT_EXPECT_FALSE(current_test, preemptible());
315 KUNIT_EXPECT_EQ(current_test, retval, target_return_address[0]);
316
317 /*
318 * Test stacktrace inside the kretprobe handler for nested case.
319 * The unwinder will find the kretprobe_trampoline address on the
320 * return address, and kretprobe must solve that.
321 */
322 ret = stack_trace_save(stack_buf, STACK_BUF_SIZE, 0);
323 KUNIT_EXPECT_NE(current_test, ret, 0);
324
325 for (i = 0; i < ret - 1; i++) {
326 if (stack_buf[i] == target_return_address[0]) {
327 KUNIT_EXPECT_EQ(current_test, stack_buf[i + 1], target_return_address[1]);
328 break;
329 }
330 }
331 KUNIT_EXPECT_NE(current_test, i, ret);
332
333#if !IS_MODULE(CONFIG_KPROBES_SANITY_TEST)
334 /* Ditto for the regs version. */
335 ret = stack_trace_save_regs(regs, stack_buf, STACK_BUF_SIZE, 0);
336 KUNIT_EXPECT_NE(current_test, ret, 0);
337 KUNIT_EXPECT_EQ(current_test, stack_buf[0], target_return_address[0]);
338 KUNIT_EXPECT_EQ(current_test, stack_buf[1], target_return_address[1]);
339#endif
340
341 return 0;
342}
343
344static struct kretprobe rp4 = {
345 .handler = stacktrace_internal_return_handler,
346 .kp.symbol_name = "kprobe_stacktrace_internal_target"
347};
348
349static void test_stacktrace_on_nested_kretprobe(struct kunit *test)
350{
351 unsigned long myretaddr = (unsigned long)__builtin_return_address(0);
352 struct kretprobe *rps[2] = {&rp3, &rp4};
353
354 current_test = test;
355 rp3.kp.addr = NULL;
356 rp3.kp.flags = 0;
357
358 //KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
359
360 KUNIT_ASSERT_EQ(test, 0, register_kretprobes(rps, 2));
361 KUNIT_ASSERT_NE(test, myretaddr, stacktrace_driver());
362 unregister_kretprobes(rps, 2);
363}
364#endif /* CONFIG_ARCH_CORRECT_STACKTRACE_ON_KRETPROBE */
365
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100366#endif /* CONFIG_KRETPROBES */
367
Sven Schnellee44e81c2021-10-21 09:54:24 +0900368static int kprobes_test_init(struct kunit *test)
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100369{
Masami Hiramatsu8e114402009-01-06 14:41:47 -0800370 target = kprobe_target;
Masami Hiramatsu12da3b82009-01-06 14:41:48 -0800371 target2 = kprobe_target2;
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +0900372 recursed_target = kprobe_recursed_target;
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +0900373 stacktrace_target = kprobe_stacktrace_target;
374 internal_target = kprobe_stacktrace_internal_target;
375 stacktrace_driver = kprobe_stacktrace_driver;
Jason A. Donenfeldd247aab2022-10-09 20:44:02 -0600376 rand1 = get_random_u32_above(div_factor);
Ananth N Mavinakayanahalli8c1c9352008-01-30 13:32:53 +0100377 return 0;
378}
Sven Schnellee44e81c2021-10-21 09:54:24 +0900379
380static struct kunit_case kprobes_testcases[] = {
381 KUNIT_CASE(test_kprobe),
382 KUNIT_CASE(test_kprobes),
Masami Hiramatsu (Google)1fcd09f2023-02-21 08:52:42 +0900383 KUNIT_CASE(test_kprobe_missed),
Sven Schnellee44e81c2021-10-21 09:54:24 +0900384#ifdef CONFIG_KRETPROBES
385 KUNIT_CASE(test_kretprobe),
386 KUNIT_CASE(test_kretprobes),
Masami Hiramatsu1f6d3a82021-10-25 20:41:52 +0900387#ifdef CONFIG_ARCH_CORRECT_STACKTRACE_ON_KRETPROBE
388 KUNIT_CASE(test_stacktrace_on_kretprobe),
389 KUNIT_CASE(test_stacktrace_on_nested_kretprobe),
390#endif
Sven Schnellee44e81c2021-10-21 09:54:24 +0900391#endif
392 {}
393};
394
395static struct kunit_suite kprobes_test_suite = {
396 .name = "kprobes_test",
397 .init = kprobes_test_init,
398 .test_cases = kprobes_testcases,
399};
400
401kunit_test_suites(&kprobes_test_suite);
402
Jeff Johnson30347492024-06-19 13:59:15 -0700403MODULE_DESCRIPTION("simple sanity test for k*probes");
Sven Schnellee44e81c2021-10-21 09:54:24 +0900404MODULE_LICENSE("GPL");