]> git.proxmox.com Git - ceph.git/blob - ceph/qa/tasks/cephfs_test_runner.py
import quincy beta 17.1.0
[ceph.git] / ceph / qa / tasks / cephfs_test_runner.py
1 import contextlib
2 import logging
3 import os
4 import unittest
5 from unittest import suite, loader, case
6 from teuthology.task import interactive
7 from teuthology import misc
8 from tasks.cephfs.filesystem import Filesystem, MDSCluster, CephCluster
9 from tasks.mgr.mgr_test_case import MgrCluster
10
11 log = logging.getLogger(__name__)
12
13
14 class DecoratingLoader(loader.TestLoader):
15 """
16 A specialization of TestLoader that tags some extra attributes
17 onto test classes as they are loaded.
18 """
19 def __init__(self, params):
20 self._params = params
21 super(DecoratingLoader, self).__init__()
22
23 def _apply_params(self, obj):
24 for k, v in self._params.items():
25 if obj.__class__ is type:
26 cls = obj
27 else:
28 cls = obj.__class__
29 setattr(cls, k, v)
30
31 def loadTestsFromTestCase(self, testCaseClass):
32 self._apply_params(testCaseClass)
33 return super(DecoratingLoader, self).loadTestsFromTestCase(testCaseClass)
34
35 def loadTestsFromName(self, name, module=None):
36 result = super(DecoratingLoader, self).loadTestsFromName(name, module)
37
38 # Special case for when we were called with the name of a method, we get
39 # a suite with one TestCase
40 tests_in_result = list(result)
41 if len(tests_in_result) == 1 and isinstance(tests_in_result[0], case.TestCase):
42 self._apply_params(tests_in_result[0])
43
44 return result
45
46
47 class LogStream(object):
48 def __init__(self):
49 self.buffer = ""
50
51 def write(self, data):
52 self.buffer += data
53 if "\n" in self.buffer:
54 lines = self.buffer.split("\n")
55 for line in lines[:-1]:
56 log.info(line)
57 self.buffer = lines[-1]
58
59 def flush(self):
60 pass
61
62
63 class InteractiveFailureResult(unittest.TextTestResult):
64 """
65 Specialization that implements interactive-on-error style
66 behavior.
67 """
68 ctx = None
69
70 def addFailure(self, test, err):
71 log.error(self._exc_info_to_string(err, test))
72 log.error("Failure in test '{0}', going interactive".format(
73 self.getDescription(test)
74 ))
75 interactive.task(ctx=self.ctx, config=None)
76
77 def addError(self, test, err):
78 log.error(self._exc_info_to_string(err, test))
79 log.error("Error in test '{0}', going interactive".format(
80 self.getDescription(test)
81 ))
82 interactive.task(ctx=self.ctx, config=None)
83
84
85 @contextlib.contextmanager
86 def task(ctx, config):
87 """
88 Run the CephFS test cases.
89
90 Run everything in tasks/cephfs/test_*.py:
91
92 ::
93
94 tasks:
95 - install:
96 - ceph:
97 - ceph-fuse:
98 - cephfs_test_runner:
99
100 `modules` argument allows running only some specific modules:
101
102 ::
103
104 tasks:
105 ...
106 - cephfs_test_runner:
107 modules:
108 - tasks.cephfs.test_sessionmap
109 - tasks.cephfs.test_auto_repair
110
111 By default, any cases that can't be run on the current cluster configuration
112 will generate a failure. When the optional `fail_on_skip` argument is set
113 to false, any tests that can't be run on the current configuration will
114 simply be skipped:
115
116 ::
117 tasks:
118 ...
119 - cephfs_test_runner:
120 fail_on_skip: false
121
122 """
123
124 ceph_cluster = CephCluster(ctx)
125
126 if len(list(misc.all_roles_of_type(ctx.cluster, 'mds'))):
127 mds_cluster = MDSCluster(ctx)
128 fs = Filesystem(ctx)
129 else:
130 mds_cluster = None
131 fs = None
132
133 if len(list(misc.all_roles_of_type(ctx.cluster, 'mgr'))):
134 mgr_cluster = MgrCluster(ctx)
135 else:
136 mgr_cluster = None
137
138 # Mount objects, sorted by ID
139 if hasattr(ctx, 'mounts'):
140 mounts = [v for k, v in sorted(ctx.mounts.items(), key=lambda mount: mount[0])]
141 else:
142 # The test configuration has a filesystem but no fuse/kclient mounts
143 mounts = []
144
145 decorating_loader = DecoratingLoader({
146 "ctx": ctx,
147 "mounts": mounts,
148 "fs": fs,
149 "ceph_cluster": ceph_cluster,
150 "mds_cluster": mds_cluster,
151 "mgr_cluster": mgr_cluster,
152 })
153
154 fail_on_skip = config.get('fail_on_skip', True)
155
156 # Put useful things onto ctx for interactive debugging
157 ctx.fs = fs
158 ctx.mds_cluster = mds_cluster
159 ctx.mgr_cluster = mgr_cluster
160
161 # Depending on config, either load specific modules, or scan for moduless
162 if config and 'modules' in config and config['modules']:
163 module_suites = []
164 for mod_name in config['modules']:
165 # Test names like cephfs.test_auto_repair
166 module_suites.append(decorating_loader.loadTestsFromName(mod_name))
167 overall_suite = suite.TestSuite(module_suites)
168 else:
169 # Default, run all tests
170 overall_suite = decorating_loader.discover(
171 os.path.join(
172 os.path.dirname(os.path.abspath(__file__)),
173 "cephfs/"
174 )
175 )
176
177 if ctx.config.get("interactive-on-error", False):
178 InteractiveFailureResult.ctx = ctx
179 result_class = InteractiveFailureResult
180 else:
181 result_class = unittest.TextTestResult
182
183 class LoggingResult(result_class):
184 def startTest(self, test):
185 log.info("Starting test: {0}".format(self.getDescription(test)))
186 return super(LoggingResult, self).startTest(test)
187
188 def addSkip(self, test, reason):
189 if fail_on_skip:
190 # Don't just call addFailure because that requires a traceback
191 self.failures.append((test, reason))
192 else:
193 super(LoggingResult, self).addSkip(test, reason)
194
195 # Execute!
196 result = unittest.TextTestRunner(
197 stream=LogStream(),
198 resultclass=LoggingResult,
199 verbosity=2,
200 failfast=True).run(overall_suite)
201
202 if not result.wasSuccessful():
203 result.printErrors() # duplicate output at end for convenience
204
205 bad_tests = []
206 for test, error in result.errors:
207 bad_tests.append(str(test))
208 for test, failure in result.failures:
209 bad_tests.append(str(test))
210
211 raise RuntimeError("Test failure: {0}".format(", ".join(bad_tests)))
212
213 yield