]> git.proxmox.com Git - ceph.git/blame - ceph/src/ceph-volume/ceph_volume/tests/functional/lvm/xenial/filestore/dmcrypt/test.yml
update source to 12.2.11
[ceph.git] / ceph / src / ceph-volume / ceph_volume / tests / functional / lvm / xenial / filestore / dmcrypt / test.yml
CommitLineData
3a9019d9
FG
1
2- hosts: osds
3 become: yes
4 tasks:
5
6 - name: stop ceph-osd@2 daemon
7 service:
8 name: ceph-osd@2
9 state: stopped
10
28e407b8
AA
11 - name: stop ceph-osd@0 daemon
12 service:
13 name: ceph-osd@0
14 state: stopped
15
16
17- hosts: mons
18 become: yes
19 tasks:
20
94b18763 21 - name: destroy osd.2
91327a77 22 command: "ceph --cluster {{ cluster }} osd destroy osd.2 --yes-i-really-mean-it"
3a9019d9 23
28e407b8 24 - name: destroy osd.0
91327a77 25 command: "ceph --cluster {{ cluster }} osd destroy osd.0 --yes-i-really-mean-it"
28e407b8
AA
26
27
28- hosts: osds
29 become: yes
30 tasks:
31
32 # osd.2 device
94b18763 33 - name: zap /dev/sdd1
91327a77 34 command: "ceph-volume --cluster {{ cluster }} lvm zap /dev/sdd1 --destroy"
3a9019d9
FG
35 environment:
36 CEPH_VOLUME_DEBUG: 1
37
94b18763 38 - name: zap /dev/sdd2
91327a77 39 command: "ceph-volume --cluster {{ cluster }} lvm zap /dev/sdd2 --destroy"
3a9019d9
FG
40 environment:
41 CEPH_VOLUME_DEBUG: 1
42
f64942e4
AA
43 # partitions have been completely removed, so re-create them again
44 - name: re-create partition /dev/sdd for lvm data usage
45 parted:
46 device: /dev/sdd
47 number: 1
48 part_start: 0%
49 part_end: 50%
50 unit: '%'
51 label: gpt
52 state: present
53
54 - name: re-create partition /dev/sdd lvm journals
55 parted:
56 device: /dev/sdd
57 number: 2
58 part_start: 50%
59 part_end: 100%
60 unit: '%'
61 state: present
62 label: gpt
63
94b18763 64 - name: redeploy osd.2 using /dev/sdd1
91327a77 65 command: "ceph-volume --cluster {{ cluster }} lvm create --filestore --data /dev/sdd1 --journal /dev/sdd2 --osd-id 2"
3a9019d9
FG
66 environment:
67 CEPH_VOLUME_DEBUG: 1
94b18763 68
28e407b8 69 # osd.0 lv
94b18763 70 - name: zap test_group/data-lv1
91327a77 71 command: "ceph-volume --cluster {{ cluster }} lvm zap test_group/data-lv1"
94b18763
FG
72 environment:
73 CEPH_VOLUME_DEBUG: 1
74
75 - name: zap /dev/sdc1
91327a77 76 command: "ceph-volume --cluster {{ cluster }} lvm zap /dev/sdc1 --destroy"
94b18763
FG
77 environment:
78 CEPH_VOLUME_DEBUG: 1
79
f64942e4
AA
80 - name: re-create partition /dev/sdc1
81 parted:
82 device: /dev/sdc
83 number: 1
84 part_start: 0%
85 part_end: 50%
86 unit: '%'
87 state: present
88 label: gpt
89
94b18763 90 - name: prepare osd.0 again using test_group/data-lv1
91327a77 91 command: "ceph-volume --cluster {{ cluster }} lvm prepare --filestore --data test_group/data-lv1 --journal /dev/sdc1 --osd-id 0"
94b18763
FG
92 environment:
93 CEPH_VOLUME_DEBUG: 1
94
95 - name: activate all to start the previously prepared osd.0
96 command: "ceph-volume lvm activate --filestore --all"
97 environment:
98 CEPH_VOLUME_DEBUG: 1
1adf2230
AA
99
100 - name: list all OSDs
101 command: "ceph-volume lvm list"
102 environment:
103 CEPH_VOLUME_DEBUG: 1