]> git.proxmox.com Git - mirror_ubuntu-eoan-kernel.git/blob - arch/arm/kernel/kprobes-test.c
ARM: kprobes: Add Thumb instruction simulation test cases
[mirror_ubuntu-eoan-kernel.git] / arch / arm / kernel / kprobes-test.c
1 /*
2 * arch/arm/kernel/kprobes-test.c
3 *
4 * Copyright (C) 2011 Jon Medhurst <tixy@yxit.co.uk>.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11 /*
12 * TESTING METHODOLOGY
13 * -------------------
14 *
15 * The methodology used to test an ARM instruction 'test_insn' is to use
16 * inline assembler like:
17 *
18 * test_before: nop
19 * test_case: test_insn
20 * test_after: nop
21 *
22 * When the test case is run a kprobe is placed of each nop. The
23 * post-handler of the test_before probe is used to modify the saved CPU
24 * register context to that which we require for the test case. The
25 * pre-handler of the of the test_after probe saves a copy of the CPU
26 * register context. In this way we can execute test_insn with a specific
27 * register context and see the results afterwards.
28 *
29 * To actually test the kprobes instruction emulation we perform the above
30 * step a second time but with an additional kprobe on the test_case
31 * instruction itself. If the emulation is accurate then the results seen
32 * by the test_after probe will be identical to the first run which didn't
33 * have a probe on test_case.
34 *
35 * Each test case is run several times with a variety of variations in the
36 * flags value of stored in CPSR, and for Thumb code, different ITState.
37 *
38 * For instructions which can modify PC, a second test_after probe is used
39 * like this:
40 *
41 * test_before: nop
42 * test_case: test_insn
43 * test_after: nop
44 * b test_done
45 * test_after2: nop
46 * test_done:
47 *
48 * The test case is constructed such that test_insn branches to
49 * test_after2, or, if testing a conditional instruction, it may just
50 * continue to test_after. The probes inserted at both locations let us
51 * determine which happened. A similar approach is used for testing
52 * backwards branches...
53 *
54 * b test_before
55 * b test_done @ helps to cope with off by 1 branches
56 * test_after2: nop
57 * b test_done
58 * test_before: nop
59 * test_case: test_insn
60 * test_after: nop
61 * test_done:
62 *
63 * The macros used to generate the assembler instructions describe above
64 * are TEST_INSTRUCTION, TEST_BRANCH_F (branch forwards) and TEST_BRANCH_B
65 * (branch backwards). In these, the local variables numbered 1, 50, 2 and
66 * 99 represent: test_before, test_case, test_after2 and test_done.
67 *
68 * FRAMEWORK
69 * ---------
70 *
71 * Each test case is wrapped between the pair of macros TESTCASE_START and
72 * TESTCASE_END. As well as performing the inline assembler boilerplate,
73 * these call out to the kprobes_test_case_start() and
74 * kprobes_test_case_end() functions which drive the execution of the test
75 * case. The specific arguments to use for each test case are stored as
76 * inline data constructed using the various TEST_ARG_* macros. Putting
77 * this all together, a simple test case may look like:
78 *
79 * TESTCASE_START("Testing mov r0, r7")
80 * TEST_ARG_REG(7, 0x12345678) // Set r7=0x12345678
81 * TEST_ARG_END("")
82 * TEST_INSTRUCTION("mov r0, r7")
83 * TESTCASE_END
84 *
85 * Note, in practice the single convenience macro TEST_R would be used for this
86 * instead.
87 *
88 * The above would expand to assembler looking something like:
89 *
90 * @ TESTCASE_START
91 * bl __kprobes_test_case_start
92 * @ start of inline data...
93 * .ascii "mov r0, r7" @ text title for test case
94 * .byte 0
95 * .align 2
96 *
97 * @ TEST_ARG_REG
98 * .byte ARG_TYPE_REG
99 * .byte 7
100 * .short 0
101 * .word 0x1234567
102 *
103 * @ TEST_ARG_END
104 * .byte ARG_TYPE_END
105 * .byte TEST_ISA @ flags, including ISA being tested
106 * .short 50f-0f @ offset of 'test_before'
107 * .short 2f-0f @ offset of 'test_after2' (if relevent)
108 * .short 99f-0f @ offset of 'test_done'
109 * @ start of test case code...
110 * 0:
111 * .code TEST_ISA @ switch to ISA being tested
112 *
113 * @ TEST_INSTRUCTION
114 * 50: nop @ location for 'test_before' probe
115 * 1: mov r0, r7 @ the test case instruction 'test_insn'
116 * nop @ location for 'test_after' probe
117 *
118 * // TESTCASE_END
119 * 2:
120 * 99: bl __kprobes_test_case_end_##TEST_ISA
121 * .code NONMAL_ISA
122 *
123 * When the above is execute the following happens...
124 *
125 * __kprobes_test_case_start() is an assembler wrapper which sets up space
126 * for a stack buffer and calls the C function kprobes_test_case_start().
127 * This C function will do some initial processing of the inline data and
128 * setup some global state. It then inserts the test_before and test_after
129 * kprobes and returns a value which causes the assembler wrapper to jump
130 * to the start of the test case code, (local label '0').
131 *
132 * When the test case code executes, the test_before probe will be hit and
133 * test_before_post_handler will call setup_test_context(). This fills the
134 * stack buffer and CPU registers with a test pattern and then processes
135 * the test case arguments. In our example there is one TEST_ARG_REG which
136 * indicates that R7 should be loaded with the value 0x12345678.
137 *
138 * When the test_before probe ends, the test case continues and executes
139 * the "mov r0, r7" instruction. It then hits the test_after probe and the
140 * pre-handler for this (test_after_pre_handler) will save a copy of the
141 * CPU register context. This should now have R0 holding the same value as
142 * R7.
143 *
144 * Finally we get to the call to __kprobes_test_case_end_{32,16}. This is
145 * an assembler wrapper which switches back to the ISA used by the test
146 * code and calls the C function kprobes_test_case_end().
147 *
148 * For each run through the test case, test_case_run_count is incremented
149 * by one. For even runs, kprobes_test_case_end() saves a copy of the
150 * register and stack buffer contents from the test case just run. It then
151 * inserts a kprobe on the test case instruction 'test_insn' and returns a
152 * value to cause the test case code to be re-run.
153 *
154 * For odd numbered runs, kprobes_test_case_end() compares the register and
155 * stack buffer contents to those that were saved on the previous even
156 * numbered run (the one without the kprobe on test_insn). These should be
157 * the same if the kprobe instruction simulation routine is correct.
158 *
159 * The pair of test case runs is repeated with different combinations of
160 * flag values in CPSR and, for Thumb, different ITState. This is
161 * controlled by test_context_cpsr().
162 *
163 * BUILDING TEST CASES
164 * -------------------
165 *
166 *
167 * As an aid to building test cases, the stack buffer is initialised with
168 * some special values:
169 *
170 * [SP+13*4] Contains SP+120. This can be used to test instructions
171 * which load a value into SP.
172 *
173 * [SP+15*4] When testing branching instructions using TEST_BRANCH_{F,B},
174 * this holds the target address of the branch, 'test_after2'.
175 * This can be used to test instructions which load a PC value
176 * from memory.
177 */
178
179 #include <linux/kernel.h>
180 #include <linux/module.h>
181 #include <linux/kprobes.h>
182
183 #include "kprobes.h"
184 #include "kprobes-test.h"
185
186
187 /*
188 * Test basic API
189 */
190
191 static bool test_regs_ok;
192 static int test_func_instance;
193 static int pre_handler_called;
194 static int post_handler_called;
195 static int jprobe_func_called;
196 static int kretprobe_handler_called;
197
198 #define FUNC_ARG1 0x12345678
199 #define FUNC_ARG2 0xabcdef
200
201
202 #ifndef CONFIG_THUMB2_KERNEL
203
204 long arm_func(long r0, long r1);
205
206 static void __used __naked __arm_kprobes_test_func(void)
207 {
208 __asm__ __volatile__ (
209 ".arm \n\t"
210 ".type arm_func, %%function \n\t"
211 "arm_func: \n\t"
212 "adds r0, r0, r1 \n\t"
213 "bx lr \n\t"
214 ".code "NORMAL_ISA /* Back to Thumb if necessary */
215 : : : "r0", "r1", "cc"
216 );
217 }
218
219 #else /* CONFIG_THUMB2_KERNEL */
220
221 long thumb16_func(long r0, long r1);
222 long thumb32even_func(long r0, long r1);
223 long thumb32odd_func(long r0, long r1);
224
225 static void __used __naked __thumb_kprobes_test_funcs(void)
226 {
227 __asm__ __volatile__ (
228 ".type thumb16_func, %%function \n\t"
229 "thumb16_func: \n\t"
230 "adds.n r0, r0, r1 \n\t"
231 "bx lr \n\t"
232
233 ".align \n\t"
234 ".type thumb32even_func, %%function \n\t"
235 "thumb32even_func: \n\t"
236 "adds.w r0, r0, r1 \n\t"
237 "bx lr \n\t"
238
239 ".align \n\t"
240 "nop.n \n\t"
241 ".type thumb32odd_func, %%function \n\t"
242 "thumb32odd_func: \n\t"
243 "adds.w r0, r0, r1 \n\t"
244 "bx lr \n\t"
245
246 : : : "r0", "r1", "cc"
247 );
248 }
249
250 #endif /* CONFIG_THUMB2_KERNEL */
251
252
253 static int call_test_func(long (*func)(long, long), bool check_test_regs)
254 {
255 long ret;
256
257 ++test_func_instance;
258 test_regs_ok = false;
259
260 ret = (*func)(FUNC_ARG1, FUNC_ARG2);
261 if (ret != FUNC_ARG1 + FUNC_ARG2) {
262 pr_err("FAIL: call_test_func: func returned %lx\n", ret);
263 return false;
264 }
265
266 if (check_test_regs && !test_regs_ok) {
267 pr_err("FAIL: test regs not OK\n");
268 return false;
269 }
270
271 return true;
272 }
273
274 static int __kprobes pre_handler(struct kprobe *p, struct pt_regs *regs)
275 {
276 pre_handler_called = test_func_instance;
277 if (regs->ARM_r0 == FUNC_ARG1 && regs->ARM_r1 == FUNC_ARG2)
278 test_regs_ok = true;
279 return 0;
280 }
281
282 static void __kprobes post_handler(struct kprobe *p, struct pt_regs *regs,
283 unsigned long flags)
284 {
285 post_handler_called = test_func_instance;
286 if (regs->ARM_r0 != FUNC_ARG1 + FUNC_ARG2 || regs->ARM_r1 != FUNC_ARG2)
287 test_regs_ok = false;
288 }
289
290 static struct kprobe the_kprobe = {
291 .addr = 0,
292 .pre_handler = pre_handler,
293 .post_handler = post_handler
294 };
295
296 static int test_kprobe(long (*func)(long, long))
297 {
298 int ret;
299
300 the_kprobe.addr = (kprobe_opcode_t *)func;
301 ret = register_kprobe(&the_kprobe);
302 if (ret < 0) {
303 pr_err("FAIL: register_kprobe failed with %d\n", ret);
304 return ret;
305 }
306
307 ret = call_test_func(func, true);
308
309 unregister_kprobe(&the_kprobe);
310 the_kprobe.flags = 0; /* Clear disable flag to allow reuse */
311
312 if (!ret)
313 return -EINVAL;
314 if (pre_handler_called != test_func_instance) {
315 pr_err("FAIL: kprobe pre_handler not called\n");
316 return -EINVAL;
317 }
318 if (post_handler_called != test_func_instance) {
319 pr_err("FAIL: kprobe post_handler not called\n");
320 return -EINVAL;
321 }
322 if (!call_test_func(func, false))
323 return -EINVAL;
324 if (pre_handler_called == test_func_instance ||
325 post_handler_called == test_func_instance) {
326 pr_err("FAIL: probe called after unregistering\n");
327 return -EINVAL;
328 }
329
330 return 0;
331 }
332
333 static void __kprobes jprobe_func(long r0, long r1)
334 {
335 jprobe_func_called = test_func_instance;
336 if (r0 == FUNC_ARG1 && r1 == FUNC_ARG2)
337 test_regs_ok = true;
338 jprobe_return();
339 }
340
341 static struct jprobe the_jprobe = {
342 .entry = jprobe_func,
343 };
344
345 static int test_jprobe(long (*func)(long, long))
346 {
347 int ret;
348
349 the_jprobe.kp.addr = (kprobe_opcode_t *)func;
350 ret = register_jprobe(&the_jprobe);
351 if (ret < 0) {
352 pr_err("FAIL: register_jprobe failed with %d\n", ret);
353 return ret;
354 }
355
356 ret = call_test_func(func, true);
357
358 unregister_jprobe(&the_jprobe);
359 the_jprobe.kp.flags = 0; /* Clear disable flag to allow reuse */
360
361 if (!ret)
362 return -EINVAL;
363 if (jprobe_func_called != test_func_instance) {
364 pr_err("FAIL: jprobe handler function not called\n");
365 return -EINVAL;
366 }
367 if (!call_test_func(func, false))
368 return -EINVAL;
369 if (jprobe_func_called == test_func_instance) {
370 pr_err("FAIL: probe called after unregistering\n");
371 return -EINVAL;
372 }
373
374 return 0;
375 }
376
377 static int __kprobes
378 kretprobe_handler(struct kretprobe_instance *ri, struct pt_regs *regs)
379 {
380 kretprobe_handler_called = test_func_instance;
381 if (regs_return_value(regs) == FUNC_ARG1 + FUNC_ARG2)
382 test_regs_ok = true;
383 return 0;
384 }
385
386 static struct kretprobe the_kretprobe = {
387 .handler = kretprobe_handler,
388 };
389
390 static int test_kretprobe(long (*func)(long, long))
391 {
392 int ret;
393
394 the_kretprobe.kp.addr = (kprobe_opcode_t *)func;
395 ret = register_kretprobe(&the_kretprobe);
396 if (ret < 0) {
397 pr_err("FAIL: register_kretprobe failed with %d\n", ret);
398 return ret;
399 }
400
401 ret = call_test_func(func, true);
402
403 unregister_kretprobe(&the_kretprobe);
404 the_kretprobe.kp.flags = 0; /* Clear disable flag to allow reuse */
405
406 if (!ret)
407 return -EINVAL;
408 if (kretprobe_handler_called != test_func_instance) {
409 pr_err("FAIL: kretprobe handler not called\n");
410 return -EINVAL;
411 }
412 if (!call_test_func(func, false))
413 return -EINVAL;
414 if (jprobe_func_called == test_func_instance) {
415 pr_err("FAIL: kretprobe called after unregistering\n");
416 return -EINVAL;
417 }
418
419 return 0;
420 }
421
422 static int run_api_tests(long (*func)(long, long))
423 {
424 int ret;
425
426 pr_info(" kprobe\n");
427 ret = test_kprobe(func);
428 if (ret < 0)
429 return ret;
430
431 pr_info(" jprobe\n");
432 ret = test_jprobe(func);
433 if (ret < 0)
434 return ret;
435
436 pr_info(" kretprobe\n");
437 ret = test_kretprobe(func);
438 if (ret < 0)
439 return ret;
440
441 return 0;
442 }
443
444
445 /*
446 * Framework for instruction set test cases
447 */
448
449 void __naked __kprobes_test_case_start(void)
450 {
451 __asm__ __volatile__ (
452 "stmdb sp!, {r4-r11} \n\t"
453 "sub sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t"
454 "bic r0, lr, #1 @ r0 = inline title string \n\t"
455 "mov r1, sp \n\t"
456 "bl kprobes_test_case_start \n\t"
457 "bx r0 \n\t"
458 );
459 }
460
461 #ifndef CONFIG_THUMB2_KERNEL
462
463 void __naked __kprobes_test_case_end_32(void)
464 {
465 __asm__ __volatile__ (
466 "mov r4, lr \n\t"
467 "bl kprobes_test_case_end \n\t"
468 "cmp r0, #0 \n\t"
469 "movne pc, r0 \n\t"
470 "mov r0, r4 \n\t"
471 "add sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t"
472 "ldmia sp!, {r4-r11} \n\t"
473 "mov pc, r0 \n\t"
474 );
475 }
476
477 #else /* CONFIG_THUMB2_KERNEL */
478
479 void __naked __kprobes_test_case_end_16(void)
480 {
481 __asm__ __volatile__ (
482 "mov r4, lr \n\t"
483 "bl kprobes_test_case_end \n\t"
484 "cmp r0, #0 \n\t"
485 "bxne r0 \n\t"
486 "mov r0, r4 \n\t"
487 "add sp, sp, #"__stringify(TEST_MEMORY_SIZE)"\n\t"
488 "ldmia sp!, {r4-r11} \n\t"
489 "bx r0 \n\t"
490 );
491 }
492
493 void __naked __kprobes_test_case_end_32(void)
494 {
495 __asm__ __volatile__ (
496 ".arm \n\t"
497 "orr lr, lr, #1 @ will return to Thumb code \n\t"
498 "ldr pc, 1f \n\t"
499 "1: \n\t"
500 ".word __kprobes_test_case_end_16 \n\t"
501 );
502 }
503
504 #endif
505
506
507 int kprobe_test_flags;
508 int kprobe_test_cc_position;
509
510 static int test_try_count;
511 static int test_pass_count;
512 static int test_fail_count;
513
514 static struct pt_regs initial_regs;
515 static struct pt_regs expected_regs;
516 static struct pt_regs result_regs;
517
518 static u32 expected_memory[TEST_MEMORY_SIZE/sizeof(u32)];
519
520 static const char *current_title;
521 static struct test_arg *current_args;
522 static u32 *current_stack;
523 static uintptr_t current_branch_target;
524
525 static uintptr_t current_code_start;
526 static kprobe_opcode_t current_instruction;
527
528
529 #define TEST_CASE_PASSED -1
530 #define TEST_CASE_FAILED -2
531
532 static int test_case_run_count;
533 static bool test_case_is_thumb;
534 static int test_instance;
535
536 /*
537 * We ignore the state of the imprecise abort disable flag (CPSR.A) because this
538 * can change randomly as the kernel doesn't take care to preserve or initialise
539 * this across context switches. Also, with Security Extentions, the flag may
540 * not be under control of the kernel; for this reason we ignore the state of
541 * the FIQ disable flag CPSR.F as well.
542 */
543 #define PSR_IGNORE_BITS (PSR_A_BIT | PSR_F_BIT)
544
545 static unsigned long test_check_cc(int cc, unsigned long cpsr)
546 {
547 unsigned long temp;
548
549 switch (cc) {
550 case 0x0: /* eq */
551 return cpsr & PSR_Z_BIT;
552
553 case 0x1: /* ne */
554 return (~cpsr) & PSR_Z_BIT;
555
556 case 0x2: /* cs */
557 return cpsr & PSR_C_BIT;
558
559 case 0x3: /* cc */
560 return (~cpsr) & PSR_C_BIT;
561
562 case 0x4: /* mi */
563 return cpsr & PSR_N_BIT;
564
565 case 0x5: /* pl */
566 return (~cpsr) & PSR_N_BIT;
567
568 case 0x6: /* vs */
569 return cpsr & PSR_V_BIT;
570
571 case 0x7: /* vc */
572 return (~cpsr) & PSR_V_BIT;
573
574 case 0x8: /* hi */
575 cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
576 return cpsr & PSR_C_BIT;
577
578 case 0x9: /* ls */
579 cpsr &= ~(cpsr >> 1); /* PSR_C_BIT &= ~PSR_Z_BIT */
580 return (~cpsr) & PSR_C_BIT;
581
582 case 0xa: /* ge */
583 cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
584 return (~cpsr) & PSR_N_BIT;
585
586 case 0xb: /* lt */
587 cpsr ^= (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
588 return cpsr & PSR_N_BIT;
589
590 case 0xc: /* gt */
591 temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
592 temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
593 return (~temp) & PSR_N_BIT;
594
595 case 0xd: /* le */
596 temp = cpsr ^ (cpsr << 3); /* PSR_N_BIT ^= PSR_V_BIT */
597 temp |= (cpsr << 1); /* PSR_N_BIT |= PSR_Z_BIT */
598 return temp & PSR_N_BIT;
599
600 case 0xe: /* al */
601 case 0xf: /* unconditional */
602 return true;
603 }
604 BUG();
605 return false;
606 }
607
608 static int is_last_scenario;
609 static int probe_should_run; /* 0 = no, 1 = yes, -1 = unknown */
610 static int memory_needs_checking;
611
612 static unsigned long test_context_cpsr(int scenario)
613 {
614 unsigned long cpsr;
615
616 probe_should_run = 1;
617
618 /* Default case is that we cycle through 16 combinations of flags */
619 cpsr = (scenario & 0xf) << 28; /* N,Z,C,V flags */
620 cpsr |= (scenario & 0xf) << 16; /* GE flags */
621 cpsr |= (scenario & 0x1) << 27; /* Toggle Q flag */
622
623 if (!test_case_is_thumb) {
624 /* Testing ARM code */
625 probe_should_run = test_check_cc(current_instruction >> 28, cpsr) != 0;
626 if (scenario == 15)
627 is_last_scenario = true;
628
629 } else if (kprobe_test_flags & TEST_FLAG_NO_ITBLOCK) {
630 /* Testing Thumb code without setting ITSTATE */
631 if (kprobe_test_cc_position) {
632 int cc = (current_instruction >> kprobe_test_cc_position) & 0xf;
633 probe_should_run = test_check_cc(cc, cpsr) != 0;
634 }
635
636 if (scenario == 15)
637 is_last_scenario = true;
638
639 } else if (kprobe_test_flags & TEST_FLAG_FULL_ITBLOCK) {
640 /* Testing Thumb code with all combinations of ITSTATE */
641 unsigned x = (scenario >> 4);
642 unsigned cond_base = x % 7; /* ITSTATE<7:5> */
643 unsigned mask = x / 7 + 2; /* ITSTATE<4:0>, bits reversed */
644
645 if (mask > 0x1f) {
646 /* Finish by testing state from instruction 'itt al' */
647 cond_base = 7;
648 mask = 0x4;
649 if ((scenario & 0xf) == 0xf)
650 is_last_scenario = true;
651 }
652
653 cpsr |= cond_base << 13; /* ITSTATE<7:5> */
654 cpsr |= (mask & 0x1) << 12; /* ITSTATE<4> */
655 cpsr |= (mask & 0x2) << 10; /* ITSTATE<3> */
656 cpsr |= (mask & 0x4) << 8; /* ITSTATE<2> */
657 cpsr |= (mask & 0x8) << 23; /* ITSTATE<1> */
658 cpsr |= (mask & 0x10) << 21; /* ITSTATE<0> */
659
660 probe_should_run = test_check_cc((cpsr >> 12) & 0xf, cpsr) != 0;
661
662 } else {
663 /* Testing Thumb code with several combinations of ITSTATE */
664 switch (scenario) {
665 case 16: /* Clear NZCV flags and 'it eq' state (false as Z=0) */
666 cpsr = 0x00000800;
667 probe_should_run = 0;
668 break;
669 case 17: /* Set NZCV flags and 'it vc' state (false as V=1) */
670 cpsr = 0xf0007800;
671 probe_should_run = 0;
672 break;
673 case 18: /* Clear NZCV flags and 'it ls' state (true as C=0) */
674 cpsr = 0x00009800;
675 break;
676 case 19: /* Set NZCV flags and 'it cs' state (true as C=1) */
677 cpsr = 0xf0002800;
678 is_last_scenario = true;
679 break;
680 }
681 }
682
683 return cpsr;
684 }
685
686 static void setup_test_context(struct pt_regs *regs)
687 {
688 int scenario = test_case_run_count>>1;
689 unsigned long val;
690 struct test_arg *args;
691 int i;
692
693 is_last_scenario = false;
694 memory_needs_checking = false;
695
696 /* Initialise test memory on stack */
697 val = (scenario & 1) ? VALM : ~VALM;
698 for (i = 0; i < TEST_MEMORY_SIZE / sizeof(current_stack[0]); ++i)
699 current_stack[i] = val + (i << 8);
700 /* Put target of branch on stack for tests which load PC from memory */
701 if (current_branch_target)
702 current_stack[15] = current_branch_target;
703 /* Put a value for SP on stack for tests which load SP from memory */
704 current_stack[13] = (u32)current_stack + 120;
705
706 /* Initialise register values to their default state */
707 val = (scenario & 2) ? VALR : ~VALR;
708 for (i = 0; i < 13; ++i)
709 regs->uregs[i] = val ^ (i << 8);
710 regs->ARM_lr = val ^ (14 << 8);
711 regs->ARM_cpsr &= ~(APSR_MASK | PSR_IT_MASK);
712 regs->ARM_cpsr |= test_context_cpsr(scenario);
713
714 /* Perform testcase specific register setup */
715 args = current_args;
716 for (; args[0].type != ARG_TYPE_END; ++args)
717 switch (args[0].type) {
718 case ARG_TYPE_REG: {
719 struct test_arg_regptr *arg =
720 (struct test_arg_regptr *)args;
721 regs->uregs[arg->reg] = arg->val;
722 break;
723 }
724 case ARG_TYPE_PTR: {
725 struct test_arg_regptr *arg =
726 (struct test_arg_regptr *)args;
727 regs->uregs[arg->reg] =
728 (unsigned long)current_stack + arg->val;
729 memory_needs_checking = true;
730 break;
731 }
732 case ARG_TYPE_MEM: {
733 struct test_arg_mem *arg = (struct test_arg_mem *)args;
734 current_stack[arg->index] = arg->val;
735 break;
736 }
737 default:
738 break;
739 }
740 }
741
742 struct test_probe {
743 struct kprobe kprobe;
744 bool registered;
745 int hit;
746 };
747
748 static void unregister_test_probe(struct test_probe *probe)
749 {
750 if (probe->registered) {
751 unregister_kprobe(&probe->kprobe);
752 probe->kprobe.flags = 0; /* Clear disable flag to allow reuse */
753 }
754 probe->registered = false;
755 }
756
757 static int register_test_probe(struct test_probe *probe)
758 {
759 int ret;
760
761 if (probe->registered)
762 BUG();
763
764 ret = register_kprobe(&probe->kprobe);
765 if (ret >= 0) {
766 probe->registered = true;
767 probe->hit = -1;
768 }
769 return ret;
770 }
771
772 static int __kprobes
773 test_before_pre_handler(struct kprobe *p, struct pt_regs *regs)
774 {
775 container_of(p, struct test_probe, kprobe)->hit = test_instance;
776 return 0;
777 }
778
779 static void __kprobes
780 test_before_post_handler(struct kprobe *p, struct pt_regs *regs,
781 unsigned long flags)
782 {
783 setup_test_context(regs);
784 initial_regs = *regs;
785 initial_regs.ARM_cpsr &= ~PSR_IGNORE_BITS;
786 }
787
788 static int __kprobes
789 test_case_pre_handler(struct kprobe *p, struct pt_regs *regs)
790 {
791 container_of(p, struct test_probe, kprobe)->hit = test_instance;
792 return 0;
793 }
794
795 static int __kprobes
796 test_after_pre_handler(struct kprobe *p, struct pt_regs *regs)
797 {
798 if (container_of(p, struct test_probe, kprobe)->hit == test_instance)
799 return 0; /* Already run for this test instance */
800
801 result_regs = *regs;
802 result_regs.ARM_cpsr &= ~PSR_IGNORE_BITS;
803
804 /* Undo any changes done to SP by the test case */
805 regs->ARM_sp = (unsigned long)current_stack;
806
807 container_of(p, struct test_probe, kprobe)->hit = test_instance;
808 return 0;
809 }
810
811 static struct test_probe test_before_probe = {
812 .kprobe.pre_handler = test_before_pre_handler,
813 .kprobe.post_handler = test_before_post_handler,
814 };
815
816 static struct test_probe test_case_probe = {
817 .kprobe.pre_handler = test_case_pre_handler,
818 };
819
820 static struct test_probe test_after_probe = {
821 .kprobe.pre_handler = test_after_pre_handler,
822 };
823
824 static struct test_probe test_after2_probe = {
825 .kprobe.pre_handler = test_after_pre_handler,
826 };
827
828 static void test_case_cleanup(void)
829 {
830 unregister_test_probe(&test_before_probe);
831 unregister_test_probe(&test_case_probe);
832 unregister_test_probe(&test_after_probe);
833 unregister_test_probe(&test_after2_probe);
834 }
835
836 static void print_registers(struct pt_regs *regs)
837 {
838 pr_err("r0 %08lx | r1 %08lx | r2 %08lx | r3 %08lx\n",
839 regs->ARM_r0, regs->ARM_r1, regs->ARM_r2, regs->ARM_r3);
840 pr_err("r4 %08lx | r5 %08lx | r6 %08lx | r7 %08lx\n",
841 regs->ARM_r4, regs->ARM_r5, regs->ARM_r6, regs->ARM_r7);
842 pr_err("r8 %08lx | r9 %08lx | r10 %08lx | r11 %08lx\n",
843 regs->ARM_r8, regs->ARM_r9, regs->ARM_r10, regs->ARM_fp);
844 pr_err("r12 %08lx | sp %08lx | lr %08lx | pc %08lx\n",
845 regs->ARM_ip, regs->ARM_sp, regs->ARM_lr, regs->ARM_pc);
846 pr_err("cpsr %08lx\n", regs->ARM_cpsr);
847 }
848
849 static void print_memory(u32 *mem, size_t size)
850 {
851 int i;
852 for (i = 0; i < size / sizeof(u32); i += 4)
853 pr_err("%08x %08x %08x %08x\n", mem[i], mem[i+1],
854 mem[i+2], mem[i+3]);
855 }
856
857 static size_t expected_memory_size(u32 *sp)
858 {
859 size_t size = sizeof(expected_memory);
860 int offset = (uintptr_t)sp - (uintptr_t)current_stack;
861 if (offset > 0)
862 size -= offset;
863 return size;
864 }
865
866 static void test_case_failed(const char *message)
867 {
868 test_case_cleanup();
869
870 pr_err("FAIL: %s\n", message);
871 pr_err("FAIL: Test %s\n", current_title);
872 pr_err("FAIL: Scenario %d\n", test_case_run_count >> 1);
873 }
874
875 static unsigned long next_instruction(unsigned long pc)
876 {
877 #ifdef CONFIG_THUMB2_KERNEL
878 if ((pc & 1) && !is_wide_instruction(*(u16 *)(pc - 1)))
879 return pc + 2;
880 else
881 #endif
882 return pc + 4;
883 }
884
885 static uintptr_t __used kprobes_test_case_start(const char *title, void *stack)
886 {
887 struct test_arg *args;
888 struct test_arg_end *end_arg;
889 unsigned long test_code;
890
891 args = (struct test_arg *)PTR_ALIGN(title + strlen(title) + 1, 4);
892
893 current_title = title;
894 current_args = args;
895 current_stack = stack;
896
897 ++test_try_count;
898
899 while (args->type != ARG_TYPE_END)
900 ++args;
901 end_arg = (struct test_arg_end *)args;
902
903 test_code = (unsigned long)(args + 1); /* Code starts after args */
904
905 test_case_is_thumb = end_arg->flags & ARG_FLAG_THUMB;
906 if (test_case_is_thumb)
907 test_code |= 1;
908
909 current_code_start = test_code;
910
911 current_branch_target = 0;
912 if (end_arg->branch_offset != end_arg->end_offset)
913 current_branch_target = test_code + end_arg->branch_offset;
914
915 test_code += end_arg->code_offset;
916 test_before_probe.kprobe.addr = (kprobe_opcode_t *)test_code;
917
918 test_code = next_instruction(test_code);
919 test_case_probe.kprobe.addr = (kprobe_opcode_t *)test_code;
920
921 if (test_case_is_thumb) {
922 u16 *p = (u16 *)(test_code & ~1);
923 current_instruction = p[0];
924 if (is_wide_instruction(current_instruction)) {
925 current_instruction <<= 16;
926 current_instruction |= p[1];
927 }
928 } else {
929 current_instruction = *(u32 *)test_code;
930 }
931
932 if (current_title[0] == '.')
933 verbose("%s\n", current_title);
934 else
935 verbose("%s\t@ %0*x\n", current_title,
936 test_case_is_thumb ? 4 : 8,
937 current_instruction);
938
939 test_code = next_instruction(test_code);
940 test_after_probe.kprobe.addr = (kprobe_opcode_t *)test_code;
941
942 if (kprobe_test_flags & TEST_FLAG_NARROW_INSTR) {
943 if (!test_case_is_thumb ||
944 is_wide_instruction(current_instruction)) {
945 test_case_failed("expected 16-bit instruction");
946 goto fail;
947 }
948 } else {
949 if (test_case_is_thumb &&
950 !is_wide_instruction(current_instruction)) {
951 test_case_failed("expected 32-bit instruction");
952 goto fail;
953 }
954 }
955
956 if (end_arg->flags & ARG_FLAG_UNSUPPORTED) {
957 if (register_test_probe(&test_case_probe) < 0)
958 goto pass;
959 test_case_failed("registered probe for unsupported instruction");
960 goto fail;
961 }
962
963 if (end_arg->flags & ARG_FLAG_SUPPORTED) {
964 if (register_test_probe(&test_case_probe) >= 0)
965 goto pass;
966 test_case_failed("couldn't register probe for supported instruction");
967 goto fail;
968 }
969
970 if (register_test_probe(&test_before_probe) < 0) {
971 test_case_failed("register test_before_probe failed");
972 goto fail;
973 }
974 if (register_test_probe(&test_after_probe) < 0) {
975 test_case_failed("register test_after_probe failed");
976 goto fail;
977 }
978 if (current_branch_target) {
979 test_after2_probe.kprobe.addr =
980 (kprobe_opcode_t *)current_branch_target;
981 if (register_test_probe(&test_after2_probe) < 0) {
982 test_case_failed("register test_after2_probe failed");
983 goto fail;
984 }
985 }
986
987 /* Start first run of test case */
988 test_case_run_count = 0;
989 ++test_instance;
990 return current_code_start;
991 pass:
992 test_case_run_count = TEST_CASE_PASSED;
993 return (uintptr_t)test_after_probe.kprobe.addr;
994 fail:
995 test_case_run_count = TEST_CASE_FAILED;
996 return (uintptr_t)test_after_probe.kprobe.addr;
997 }
998
999 static bool check_test_results(void)
1000 {
1001 size_t mem_size = 0;
1002 u32 *mem = 0;
1003
1004 if (memcmp(&expected_regs, &result_regs, sizeof(expected_regs))) {
1005 test_case_failed("registers differ");
1006 goto fail;
1007 }
1008
1009 if (memory_needs_checking) {
1010 mem = (u32 *)result_regs.ARM_sp;
1011 mem_size = expected_memory_size(mem);
1012 if (memcmp(expected_memory, mem, mem_size)) {
1013 test_case_failed("test memory differs");
1014 goto fail;
1015 }
1016 }
1017
1018 return true;
1019
1020 fail:
1021 pr_err("initial_regs:\n");
1022 print_registers(&initial_regs);
1023 pr_err("expected_regs:\n");
1024 print_registers(&expected_regs);
1025 pr_err("result_regs:\n");
1026 print_registers(&result_regs);
1027
1028 if (mem) {
1029 pr_err("current_stack=%p\n", current_stack);
1030 pr_err("expected_memory:\n");
1031 print_memory(expected_memory, mem_size);
1032 pr_err("result_memory:\n");
1033 print_memory(mem, mem_size);
1034 }
1035
1036 return false;
1037 }
1038
1039 static uintptr_t __used kprobes_test_case_end(void)
1040 {
1041 if (test_case_run_count < 0) {
1042 if (test_case_run_count == TEST_CASE_PASSED)
1043 /* kprobes_test_case_start did all the needed testing */
1044 goto pass;
1045 else
1046 /* kprobes_test_case_start failed */
1047 goto fail;
1048 }
1049
1050 if (test_before_probe.hit != test_instance) {
1051 test_case_failed("test_before_handler not run");
1052 goto fail;
1053 }
1054
1055 if (test_after_probe.hit != test_instance &&
1056 test_after2_probe.hit != test_instance) {
1057 test_case_failed("test_after_handler not run");
1058 goto fail;
1059 }
1060
1061 /*
1062 * Even numbered test runs ran without a probe on the test case so
1063 * we can gather reference results. The subsequent odd numbered run
1064 * will have the probe inserted.
1065 */
1066 if ((test_case_run_count & 1) == 0) {
1067 /* Save results from run without probe */
1068 u32 *mem = (u32 *)result_regs.ARM_sp;
1069 expected_regs = result_regs;
1070 memcpy(expected_memory, mem, expected_memory_size(mem));
1071
1072 /* Insert probe onto test case instruction */
1073 if (register_test_probe(&test_case_probe) < 0) {
1074 test_case_failed("register test_case_probe failed");
1075 goto fail;
1076 }
1077 } else {
1078 /* Check probe ran as expected */
1079 if (probe_should_run == 1) {
1080 if (test_case_probe.hit != test_instance) {
1081 test_case_failed("test_case_handler not run");
1082 goto fail;
1083 }
1084 } else if (probe_should_run == 0) {
1085 if (test_case_probe.hit == test_instance) {
1086 test_case_failed("test_case_handler ran");
1087 goto fail;
1088 }
1089 }
1090
1091 /* Remove probe for any subsequent reference run */
1092 unregister_test_probe(&test_case_probe);
1093
1094 if (!check_test_results())
1095 goto fail;
1096
1097 if (is_last_scenario)
1098 goto pass;
1099 }
1100
1101 /* Do next test run */
1102 ++test_case_run_count;
1103 ++test_instance;
1104 return current_code_start;
1105 fail:
1106 ++test_fail_count;
1107 goto end;
1108 pass:
1109 ++test_pass_count;
1110 end:
1111 test_case_cleanup();
1112 return 0;
1113 }
1114
1115
1116 /*
1117 * Top level test functions
1118 */
1119
1120 static int run_test_cases(void (*tests)(void))
1121 {
1122 pr_info(" Run test cases\n");
1123 tests();
1124
1125 return 0;
1126 }
1127
1128
1129 static int __init run_all_tests(void)
1130 {
1131 int ret = 0;
1132
1133 pr_info("Begining kprobe tests...\n");
1134
1135 #ifndef CONFIG_THUMB2_KERNEL
1136
1137 pr_info("Probe ARM code\n");
1138 ret = run_api_tests(arm_func);
1139 if (ret)
1140 goto out;
1141
1142 #else /* CONFIG_THUMB2_KERNEL */
1143
1144 pr_info("Probe 16-bit Thumb code\n");
1145 ret = run_api_tests(thumb16_func);
1146 if (ret)
1147 goto out;
1148
1149 pr_info("Probe 32-bit Thumb code, even halfword\n");
1150 ret = run_api_tests(thumb32even_func);
1151 if (ret)
1152 goto out;
1153
1154 pr_info("Probe 32-bit Thumb code, odd halfword\n");
1155 ret = run_api_tests(thumb32odd_func);
1156 if (ret)
1157 goto out;
1158
1159 pr_info("16-bit Thumb instruction simulation\n");
1160 ret = run_test_cases(kprobe_thumb16_test_cases);
1161 if (ret)
1162 goto out;
1163
1164 pr_info("32-bit Thumb instruction simulation\n");
1165 ret = run_test_cases(kprobe_thumb32_test_cases);
1166 if (ret)
1167 goto out;
1168 #endif
1169
1170 pr_info("Total instruction simulation tests=%d, pass=%d fail=%d\n",
1171 test_try_count, test_pass_count, test_fail_count);
1172 if (test_fail_count) {
1173 ret = -EINVAL;
1174 goto out;
1175 }
1176
1177 out:
1178 if (ret == 0)
1179 pr_info("Finished kprobe tests OK\n");
1180 else
1181 pr_err("kprobe tests failed\n");
1182
1183 return ret;
1184 }
1185
1186
1187 /*
1188 * Module setup
1189 */
1190
1191 #ifdef MODULE
1192
1193 static void __exit kprobe_test_exit(void)
1194 {
1195 }
1196
1197 module_init(run_all_tests)
1198 module_exit(kprobe_test_exit)
1199 MODULE_LICENSE("GPL");
1200
1201 #else /* !MODULE */
1202
1203 late_initcall(run_all_tests);
1204
1205 #endif