]>
Commit | Line | Data |
---|---|---|
31f18b77 FG |
1 | #!/bin/bash -ex |
2 | ||
3 | set -u | |
4 | ||
5 | # number of osds = 10 | |
6 | crushtool -o crushmap --build --num_osds 10 host straw 2 rack straw 2 row straw 2 root straw 0 | |
7 | ceph osd setcrushmap -i crushmap | |
8 | ceph osd tree | |
9 | ceph tell osd.* injectargs --osd_max_markdown_count 1024 --osd_max_markdown_period 1 | |
10 | ||
11 | wait_for_healthy() { | |
12 | while ceph health | grep down | |
13 | do | |
14 | sleep 1 | |
15 | done | |
16 | } | |
17 | ||
18 | test_mark_two_osds_same_host_down() { | |
19 | ceph osd set noup | |
20 | ceph osd down osd.0 osd.1 | |
21 | ceph health detail | |
22 | ceph health | grep "1 host" | |
23 | ceph health | grep "2 osds" | |
24 | ceph health detail | grep "osd.0" | |
25 | ceph health detail | grep "osd.1" | |
26 | ceph osd unset noup | |
27 | wait_for_healthy | |
28 | } | |
29 | ||
30 | test_mark_two_osds_same_rack_down() { | |
31 | ceph osd set noup | |
32 | ceph osd down osd.8 osd.9 | |
33 | ceph health detail | |
34 | ceph health | grep "1 host" | |
35 | ceph health | grep "1 rack" | |
36 | ceph health | grep "1 row" | |
37 | ceph health | grep "2 osds" | |
38 | ceph health detail | grep "osd.8" | |
39 | ceph health detail | grep "osd.9" | |
40 | ceph osd unset noup | |
41 | wait_for_healthy | |
42 | } | |
43 | ||
44 | test_mark_all_but_last_osds_down() { | |
45 | ceph osd set noup | |
46 | ceph osd down $(ceph osd ls | sed \$d) | |
47 | ceph health detail | |
48 | ceph health | grep "1 row" | |
49 | ceph health | grep "2 racks" | |
50 | ceph health | grep "4 hosts" | |
51 | ceph health | grep "9 osds" | |
52 | ceph osd unset noup | |
53 | wait_for_healthy | |
54 | } | |
55 | ||
224ce89b WB |
56 | test_mark_two_osds_same_host_down_with_classes() { |
57 | ceph osd set noup | |
224ce89b WB |
58 | ceph osd crush set-device-class ssd osd.0 osd.2 osd.4 osd.6 osd.8 |
59 | ceph osd crush set-device-class hdd osd.1 osd.3 osd.5 osd.7 osd.9 | |
60 | ceph osd down osd.0 osd.1 | |
61 | ceph health detail | |
62 | ceph health | grep "1 host" | |
63 | ceph health | grep "2 osds" | |
64 | ceph health detail | grep "osd.0" | |
65 | ceph health detail | grep "osd.1" | |
66 | ceph osd unset noup | |
67 | wait_for_healthy | |
68 | } | |
69 | ||
31f18b77 FG |
70 | test_mark_two_osds_same_host_down |
71 | test_mark_two_osds_same_rack_down | |
72 | test_mark_all_but_last_osds_down | |
224ce89b | 73 | test_mark_two_osds_same_host_down_with_classes |
31f18b77 FG |
74 | |
75 | exit 0 |