]> git.proxmox.com Git - ceph.git/blame - ceph/src/ceph-volume/ceph_volume/tests/functional/simple/tox.ini
import new upstream nautilus stable release 14.2.8
[ceph.git] / ceph / src / ceph-volume / ceph_volume / tests / functional / simple / tox.ini
CommitLineData
d2e6a577 1[tox]
b32b8144 2envlist = {centos7,xenial}-{filestore,bluestore}-{activate,dmcrypt_plain,dmcrypt_luks}
d2e6a577
FG
3skipsdist = True
4
5[testenv]
92f5a8d4 6deps = mock
d2e6a577
FG
7whitelist_externals =
8 vagrant
9 bash
10 git
b32b8144 11 sleep
1adf2230 12 cp
d2e6a577
FG
13passenv=*
14setenv=
15 ANSIBLE_SSH_ARGS = -F {changedir}/vagrant_ssh_config
94b18763 16 ANSIBLE_ACTION_PLUGINS = {envdir}/tmp/ceph-ansible/plugins/actions
d2e6a577
FG
17 ANSIBLE_STDOUT_CALLBACK = debug
18 ANSIBLE_RETRY_FILES_ENABLED = False
3a9019d9 19 ANSIBLE_SSH_RETRIES = 5
d2e6a577
FG
20 VAGRANT_CWD = {changedir}
21 CEPH_VOLUME_DEBUG = 1
494da23a 22 DEBIAN_FRONTEND=noninteractive
d2e6a577 23changedir=
3efd9988
FG
24 centos7-filestore-activate: {toxinidir}/centos7/filestore/activate
25 centos7-bluestore-activate: {toxinidir}/centos7/bluestore/activate
26 xenial-filestore-activate: {toxinidir}/xenial/filestore/activate
27 xenial-bluestore-activate: {toxinidir}/xenial/bluestore/activate
b32b8144
FG
28 xenial-bluestore-dmcrypt_plain: {toxinidir}/xenial/bluestore/dmcrypt-plain
29 xenial-bluestore-dmcrypt_luks: {toxinidir}/xenial/bluestore/dmcrypt-luks
30 xenial-filestore-dmcrypt_plain: {toxinidir}/xenial/filestore/dmcrypt-plain
31 xenial-filestore-dmcrypt_luks: {toxinidir}/xenial/filestore/dmcrypt-luks
32 centos7-bluestore-dmcrypt_plain: {toxinidir}/centos7/bluestore/dmcrypt-plain
33 centos7-bluestore-dmcrypt_luks: {toxinidir}/centos7/bluestore/dmcrypt-luks
34 centos7-filestore-dmcrypt_plain: {toxinidir}/centos7/filestore/dmcrypt-plain
35 centos7-filestore-dmcrypt_luks: {toxinidir}/centos7/filestore/dmcrypt-luks
d2e6a577
FG
36commands=
37 git clone -b {env:CEPH_ANSIBLE_BRANCH:master} --single-branch https://github.com/ceph/ceph-ansible.git {envdir}/tmp/ceph-ansible
91327a77 38 pip install -r {envdir}/tmp/ceph-ansible/tests/requirements.txt
d2e6a577 39
1adf2230 40 bash {toxinidir}/../scripts/vagrant_up.sh {env:VAGRANT_UP_FLAGS:"--no-provision"} {posargs:--provider=virtualbox}
3efd9988 41 bash {toxinidir}/../scripts/generate_ssh_config.sh {changedir}
d2e6a577 42
1adf2230
AA
43 cp {toxinidir}/../playbooks/deploy.yml {envdir}/tmp/ceph-ansible
44
d2e6a577 45 # use ceph-ansible to deploy a ceph cluster on the vms
1adf2230 46 ansible-playbook -vv -i {changedir}/hosts {envdir}/tmp/ceph-ansible/deploy.yml --extra-vars "fetch_directory={changedir}/fetch ceph_dev_branch={env:CEPH_DEV_BRANCH:master} ceph_dev_sha1={env:CEPH_DEV_SHA1:latest} toxinidir={toxinidir}"
d2e6a577
FG
47
48 # prepare nodes for testing with testinfra
49 ansible-playbook -vv -i {changedir}/hosts {envdir}/tmp/ceph-ansible/tests/functional/setup.yml
50
a8e16298 51 # test cluster state testinfra
81eedcae 52 py.test -n 4 --sudo -v --connection=ansible --ssh-config={changedir}/vagrant_ssh_config --ansible-inventory={changedir}/hosts {toxinidir}/../tests
d2e6a577 53
3efd9988
FG
54 # make ceph-volume simple take over all the OSDs that got deployed, disabling ceph-disk
55 ansible-playbook -vv -i {changedir}/hosts {changedir}/test.yml
56
d2e6a577 57 # reboot all vms
94b18763 58 bash {toxinidir}/../scripts/vagrant_reload.sh {env:VAGRANT_UP_FLAGS:"--no-provision"} {posargs:--provider=virtualbox}
d2e6a577 59
b32b8144
FG
60 # wait 2 minutes for services to be ready
61 sleep 120
62
d2e6a577 63 # retest to ensure cluster came back up correctly after rebooting
81eedcae 64 py.test -n 4 --sudo -v --connection=ansible --ssh-config={changedir}/vagrant_ssh_config --ansible-inventory={changedir}/hosts {toxinidir}/../tests
d2e6a577 65
11fdf7f2 66 vagrant destroy {env:VAGRANT_DESTROY_FLAGS:"--force"}