]> git.proxmox.com Git - ceph.git/blame - ceph/src/ceph-volume/ceph_volume/tests/functional/lvm/tox.ini
import ceph 15.2.14
[ceph.git] / ceph / src / ceph-volume / ceph_volume / tests / functional / lvm / tox.ini
CommitLineData
d2e6a577 1[tox]
9f95a23c 2envlist = centos8-{filestore,bluestore}-{create,prepare_activate,dmcrypt}
d2e6a577
FG
3skipsdist = True
4
5[testenv]
92f5a8d4 6deps = mock
d2e6a577
FG
7whitelist_externals =
8 vagrant
9 bash
10 git
1adf2230
AA
11 cp
12 sleep
d2e6a577
FG
13passenv=*
14setenv=
ec96510d
FG
15 ANSIBLE_CONFIG = {envdir}/tmp/ceph-ansible/ansible.cfg
16 ANSIBLE_SSH_ARGS = -F {changedir}/vagrant_ssh_config -o ControlMaster=auto -o ControlPersist=600s -o PreferredAuthentications=publickey
d2e6a577 17 ANSIBLE_STDOUT_CALLBACK = debug
d2e6a577
FG
18 VAGRANT_CWD = {changedir}
19 CEPH_VOLUME_DEBUG = 1
494da23a 20 DEBIAN_FRONTEND=noninteractive
d2e6a577 21changedir=
b32b8144 22 # plain/unencrypted
9f95a23c
TL
23 centos8-filestore-create: {toxinidir}/centos8/filestore/create
24 centos8-bluestore-create: {toxinidir}/centos8/bluestore/create
b32b8144 25 # dmcrypt
9f95a23c
TL
26 centos8-filestore-dmcrypt: {toxinidir}/centos8/filestore/dmcrypt
27 centos8-bluestore-dmcrypt: {toxinidir}/centos8/bluestore/dmcrypt
d2e6a577
FG
28 # TODO: these are placeholders for now, eventually we want to
29 # test the prepare/activate workflow of ceph-volume as well
9f95a23c
TL
30 centos8-filestore-prepare_activate: {toxinidir}/xenial/filestore/prepare_activate
31 centos8-bluestore-prepare_activate: {toxinidir}/xenial/bluestore/prepare_activate
d2e6a577 32commands=
f6b5b4d7 33 git clone -b {env:CEPH_ANSIBLE_BRANCH:master} --single-branch {env:CEPH_ANSIBLE_CLONE:"https://github.com/ceph/ceph-ansible.git"} {envdir}/tmp/ceph-ansible
91327a77 34 pip install -r {envdir}/tmp/ceph-ansible/tests/requirements.txt
d2e6a577 35
1adf2230 36 bash {toxinidir}/../scripts/vagrant_up.sh {env:VAGRANT_UP_FLAGS:"--no-provision"} {posargs:--provider=virtualbox}
3efd9988 37 bash {toxinidir}/../scripts/generate_ssh_config.sh {changedir}
d2e6a577
FG
38
39 # create logical volumes to test with on the vms
40 ansible-playbook -vv -i {changedir}/hosts {envdir}/tmp/ceph-ansible/tests/functional/lvm_setup.yml
41
3efd9988
FG
42 # ad-hoc/local test setup for lvm
43 ansible-playbook -vv -i {changedir}/hosts {changedir}/setup.yml
44
1adf2230
AA
45 cp {toxinidir}/../playbooks/deploy.yml {envdir}/tmp/ceph-ansible
46
d2e6a577 47 # use ceph-ansible to deploy a ceph cluster on the vms
1adf2230 48 ansible-playbook -vv -i {changedir}/hosts {envdir}/tmp/ceph-ansible/deploy.yml --extra-vars "fetch_directory={changedir}/fetch ceph_dev_branch={env:CEPH_DEV_BRANCH:master} ceph_dev_sha1={env:CEPH_DEV_SHA1:latest} toxinidir={toxinidir}"
d2e6a577
FG
49
50 # prepare nodes for testing with testinfra
51 ansible-playbook -vv -i {changedir}/hosts {envdir}/tmp/ceph-ansible/tests/functional/setup.yml
52
a8e16298 53 # test cluster state using testinfra
ec96510d 54 py.test --reruns 5 --reruns-delay 10 -n 4 --sudo -v --connection=ansible --ssh-config={changedir}/vagrant_ssh_config --ansible-inventory={changedir}/hosts {toxinidir}/../tests
d2e6a577 55
94b18763
FG
56 # reboot all vms - attempt
57 bash {toxinidir}/../scripts/vagrant_reload.sh {env:VAGRANT_UP_FLAGS:"--no-provision"} {posargs:--provider=virtualbox}
d2e6a577 58
81eedcae
TL
59 # after a reboot, osds may take about 20 seconds to come back up
60 sleep 30
61
d2e6a577 62 # retest to ensure cluster came back up correctly after rebooting
ec96510d 63 py.test --reruns 5 --reruns-delay 10 -n 4 --sudo -v --connection=ansible --ssh-config={changedir}/vagrant_ssh_config --ansible-inventory={changedir}/hosts {toxinidir}/../tests
d2e6a577 64
b32b8144 65 # destroy an OSD, zap it's device and recreate it using it's ID
3a9019d9 66 ansible-playbook -vv -i {changedir}/hosts {changedir}/test.yml
b32b8144
FG
67
68 # retest to ensure cluster came back up correctly
ec96510d 69 py.test --reruns 5 --reruns-delay 10 -n 4 --sudo -v --connection=ansible --ssh-config={changedir}/vagrant_ssh_config --ansible-inventory={changedir}/hosts {toxinidir}/../tests
b32b8144 70
11fdf7f2 71 vagrant destroy {env:VAGRANT_DESTROY_FLAGS:"--force"}