]>
git.proxmox.com Git - ceph.git/blob - ceph/qa/tasks/mgr/dashboard/test_health.py
1 # -*- coding: utf-8 -*-
2 from __future__
import absolute_import
4 from .helper
import (DashboardTestCase
, JAny
, JLeaf
, JList
, JObj
,
5 addrvec_schema
, module_options_schema
)
8 class HealthTest(DashboardTestCase
):
11 __pg_info_schema
= JObj({
12 'object_stats': JObj({
14 'num_object_copies': int,
15 'num_objects_degraded': int,
16 'num_objects_misplaced': int,
17 'num_objects_unfound': int
20 'statuses': JObj({}, allow_unknown
=True, unknown_schema
=int)
23 __mdsmap_schema
= JObj({
24 'session_autoclose': int,
27 'up': JObj({}, allow_unknown
=True),
28 'last_failure_osd_epoch': int,
32 'explicitly_allowed_features': int,
33 'damaged': JList(int),
38 'stopped': JList(int),
41 'compat': JObj({}, allow_unknown
=True),
42 'ro_compat': JObj({}, allow_unknown
=True),
43 'incompat': JObj({}, allow_unknown
=True)
45 'required_client_features': JObj({}, allow_unknown
=True),
46 'data_pools': JList(int),
47 'info': JObj({}, allow_unknown
=True),
50 'standby_count_wanted': int,
53 'session_timeout': int,
58 'allow_multimds_snaps': bool,
59 'allow_standby_replay': bool,
60 'refuse_client_session': bool
62 'ever_allowed_features': int,
66 def test_minimal_health(self
):
67 data
= self
._get
('/api/health/minimal')
68 self
.assertStatus(200)
71 'read_bytes_sec': int,
72 'read_op_per_sec': int,
73 'recovering_bytes_per_sec': int,
74 'write_bytes_sec': int,
75 'write_op_per_sec': int
79 'total_avail_bytes': int,
81 'total_used_raw_bytes': int,
87 'mdsmap': self
.__mdsmap
_schema
90 'standbys': JList(JObj({}, allow_unknown
=True)),
93 'checks': JList(JObj({}, allow_unknown
=True)),
94 'mutes': JList(JObj({}, allow_unknown
=True)),
98 'iscsi_daemons': JObj({
104 'standbys': JList(JLeaf(dict))
108 'mons': JList(JLeaf(dict)),
120 'pg_info': self
.__pg
_info
_schema
,
121 'pools': JList(JLeaf(dict)),
125 self
.assertSchema(data
, schema
)
127 def test_full_health(self
):
128 data
= self
._get
('/api/health/full')
129 self
.assertStatus(200)
130 module_info_schema
= JObj({
134 'module_options': module_options_schema
137 'client_perf': JObj({
138 'read_bytes_sec': int,
139 'read_op_per_sec': int,
140 'recovering_bytes_per_sec': int,
141 'write_bytes_sec': int,
142 'write_op_per_sec': int
145 'pools': JList(JObj({
153 'data_bytes_used': int,
154 'omap_bytes_used': int,
155 'percent_used': float,
157 'quota_objects': int,
164 'compress_bytes_used': int,
165 'compress_under_bytes': int,
173 'total_avail_bytes': int,
175 'total_used_bytes': int,
176 'total_used_raw_bytes': int,
177 'total_used_raw_ratio': float,
179 'num_per_pool_osds': int,
180 'num_per_pool_omap_osds': int
185 'compat': JObj({}, allow_unknown
=True, unknown_schema
=str),
187 {}, allow_unknown
=True, unknown_schema
=str),
189 {}, allow_unknown
=True, unknown_schema
=str)
191 'default_fscid': int,
193 'feature_flags': JObj(
194 {}, allow_unknown
=True, unknown_schema
=bool),
195 'filesystems': JList(
198 'mdsmap': self
.__mdsmap
_schema
201 'standbys': JList(JObj({}, allow_unknown
=True)),
204 'checks': JList(JObj({}, allow_unknown
=True)),
205 'mutes': JList(JObj({}, allow_unknown
=True)),
209 'iscsi_daemons': JObj({
215 'active_addrs': JObj({
216 'addrvec': addrvec_schema
218 'active_change': str, # timestamp
219 'active_mgr_features': int,
222 'always_on_modules': JObj({}, allow_unknown
=True),
224 'available_modules': JList(module_info_schema
),
226 'modules': JList(str),
228 {'dashboard': str}, # This module should always be present
229 allow_unknown
=True, unknown_schema
=str
231 'standbys': JList(JObj({
232 'available_modules': JList(module_info_schema
),
236 }, allow_unknown
=True))
237 }, allow_unknown
=True),
239 'election_epoch': int,
240 'extra_probe_peers': JList(JAny(none
=True)),
242 {}, allow_unknown
=True, unknown_schema
=JList(JObj({
250 'quorum_mon': JList(str),
252 'required_mon': JList(str)
255 # @TODO: expand on monmap schema
256 'mons': JList(JLeaf(dict)),
257 }, allow_unknown
=True),
259 'outside_quorum': JList(int),
260 'quorum': JList(int),
264 # @TODO: What type should be expected here?
265 'sync_provider': JList(JAny(none
=True)),
269 # @TODO: define schema for crush map and osd_metadata, among
275 }, allow_unknown
=True)),
276 }, allow_unknown
=True),
277 'pg_info': self
.__pg
_info
_schema
,
278 'pools': JList(JLeaf(dict)),
282 self
.assertSchema(data
, schema
)
284 cluster_pools
= self
.ceph_cluster
.mon_manager
.list_pools()
285 self
.assertEqual(len(cluster_pools
), len(data
['pools']))
286 for pool
in data
['pools']:
287 self
.assertIn(pool
['pool_name'], cluster_pools
)
289 @DashboardTestCase.RunAs('test', 'test', ['pool-manager'])
290 def test_health_permissions(self
):
291 data
= self
._get
('/api/health/full')
292 self
.assertStatus(200)
295 'client_perf': JObj({}, allow_unknown
=True),
296 'df': JObj({}, allow_unknown
=True),
298 'checks': JList(JObj({}, allow_unknown
=True)),
299 'mutes': JList(JObj({}, allow_unknown
=True)),
302 'pools': JList(JLeaf(dict)),
304 self
.assertSchema(data
, schema
)
306 cluster_pools
= self
.ceph_cluster
.mon_manager
.list_pools()
307 self
.assertEqual(len(cluster_pools
), len(data
['pools']))
308 for pool
in data
['pools']:
309 self
.assertIn(pool
['pool_name'], cluster_pools
)