]>
Commit | Line | Data |
---|---|---|
11fdf7f2 | 1 | # -*- coding: utf-8 -*- |
11fdf7f2 | 2 | |
a4b75251 | 3 | import json |
9f95a23c | 4 | import logging |
f91f0fd5 TL |
5 | import os |
6 | from functools import partial | |
a4b75251 | 7 | from typing import Any, Dict, List, Optional |
11fdf7f2 | 8 | |
11fdf7f2 | 9 | import cephfs |
a4b75251 | 10 | from mgr_module import NFS_GANESHA_SUPPORTED_FSALS |
11fdf7f2 | 11 | |
a4b75251 | 12 | from .. import mgr |
11fdf7f2 TL |
13 | from ..security import Scope |
14 | from ..services.cephfs import CephFS | |
f91f0fd5 | 15 | from ..services.exception import DashboardException, serialize_dashboard_exception |
a4b75251 TL |
16 | from . import APIDoc, APIRouter, BaseController, Endpoint, EndpointDoc, \ |
17 | ReadPermission, RESTController, Task, UIRouter | |
18 | from ._version import APIVersion | |
11fdf7f2 | 19 | |
a4b75251 TL |
20 | logger = logging.getLogger('controllers.nfs') |
21 | ||
22 | ||
23 | class NFSException(DashboardException): | |
24 | def __init__(self, msg): | |
25 | super(NFSException, self).__init__(component="nfs", msg=msg) | |
9f95a23c TL |
26 | |
27 | ||
11fdf7f2 TL |
28 | # documentation helpers |
29 | EXPORT_SCHEMA = { | |
30 | 'export_id': (int, 'Export ID'), | |
31 | 'path': (str, 'Export path'), | |
32 | 'cluster_id': (str, 'Cluster identifier'), | |
11fdf7f2 | 33 | 'pseudo': (str, 'Pseudo FS path'), |
11fdf7f2 TL |
34 | 'access_type': (str, 'Export access type'), |
35 | 'squash': (str, 'Export squash policy'), | |
36 | 'security_label': (str, 'Security label'), | |
37 | 'protocols': ([int], 'List of protocol types'), | |
38 | 'transports': ([str], 'List of transport types'), | |
39 | 'fsal': ({ | |
40 | 'name': (str, 'name of FSAL'), | |
a4b75251 | 41 | 'fs_name': (str, 'CephFS filesystem name', True), |
11fdf7f2 | 42 | 'sec_label_xattr': (str, 'Name of xattr for security label', True), |
a4b75251 | 43 | 'user_id': (str, 'User id', True) |
11fdf7f2 TL |
44 | }, 'FSAL configuration'), |
45 | 'clients': ([{ | |
46 | 'addresses': ([str], 'list of IP addresses'), | |
47 | 'access_type': (str, 'Client access type'), | |
48 | 'squash': (str, 'Client squash policy') | |
49 | }], 'List of client configurations'), | |
50 | } | |
51 | ||
52 | ||
53 | CREATE_EXPORT_SCHEMA = { | |
54 | 'path': (str, 'Export path'), | |
55 | 'cluster_id': (str, 'Cluster identifier'), | |
11fdf7f2 | 56 | 'pseudo': (str, 'Pseudo FS path'), |
11fdf7f2 TL |
57 | 'access_type': (str, 'Export access type'), |
58 | 'squash': (str, 'Export squash policy'), | |
59 | 'security_label': (str, 'Security label'), | |
60 | 'protocols': ([int], 'List of protocol types'), | |
61 | 'transports': ([str], 'List of transport types'), | |
62 | 'fsal': ({ | |
63 | 'name': (str, 'name of FSAL'), | |
a4b75251 TL |
64 | 'fs_name': (str, 'CephFS filesystem name', True), |
65 | 'sec_label_xattr': (str, 'Name of xattr for security label', True) | |
11fdf7f2 TL |
66 | }, 'FSAL configuration'), |
67 | 'clients': ([{ | |
68 | 'addresses': ([str], 'list of IP addresses'), | |
69 | 'access_type': (str, 'Client access type'), | |
70 | 'squash': (str, 'Client squash policy') | |
a4b75251 | 71 | }], 'List of client configurations') |
11fdf7f2 TL |
72 | } |
73 | ||
74 | ||
75 | # pylint: disable=not-callable | |
9f95a23c | 76 | def NfsTask(name, metadata, wait_for): # noqa: N802 |
11fdf7f2 TL |
77 | def composed_decorator(func): |
78 | return Task("nfs/{}".format(name), metadata, wait_for, | |
79 | partial(serialize_dashboard_exception, | |
80 | include_http_status=True))(func) | |
81 | return composed_decorator | |
82 | ||
83 | ||
a4b75251 TL |
84 | @APIRouter('/nfs-ganesha', Scope.NFS_GANESHA) |
85 | @APIDoc("NFS-Ganesha Cluster Management API", "NFS-Ganesha") | |
11fdf7f2 TL |
86 | class NFSGanesha(RESTController): |
87 | ||
88 | @EndpointDoc("Status of NFS-Ganesha management feature", | |
89 | responses={200: { | |
90 | 'available': (bool, "Is API available?"), | |
91 | 'message': (str, "Error message") | |
92 | }}) | |
93 | @Endpoint() | |
94 | @ReadPermission | |
95 | def status(self): | |
96 | status = {'available': True, 'message': None} | |
97 | try: | |
a4b75251 | 98 | mgr.remote('nfs', 'cluster_ls') |
20effc67 | 99 | except (ImportError, RuntimeError) as error: |
a4b75251 | 100 | logger.exception(error) |
11fdf7f2 | 101 | status['available'] = False |
a4b75251 | 102 | status['message'] = str(error) # type: ignore |
11fdf7f2 TL |
103 | |
104 | return status | |
105 | ||
106 | ||
a4b75251 TL |
107 | @APIRouter('/nfs-ganesha/cluster', Scope.NFS_GANESHA) |
108 | @APIDoc(group="NFS-Ganesha") | |
109 | class NFSGaneshaCluster(RESTController): | |
110 | @ReadPermission | |
111 | @RESTController.MethodMap(version=APIVersion.EXPERIMENTAL) | |
112 | def list(self): | |
113 | return mgr.remote('nfs', 'cluster_ls') | |
114 | ||
115 | ||
116 | @APIRouter('/nfs-ganesha/export', Scope.NFS_GANESHA) | |
117 | @APIDoc(group="NFS-Ganesha") | |
11fdf7f2 TL |
118 | class NFSGaneshaExports(RESTController): |
119 | RESOURCE_ID = "cluster_id/export_id" | |
120 | ||
a4b75251 TL |
121 | @staticmethod |
122 | def _get_schema_export(export: Dict[str, Any]) -> Dict[str, Any]: | |
123 | """ | |
124 | Method that avoids returning export info not exposed in the export schema | |
125 | e.g., rgw user access/secret keys. | |
126 | """ | |
127 | schema_fsal_info = {} | |
128 | for key in export['fsal'].keys(): | |
129 | if key in EXPORT_SCHEMA['fsal'][0].keys(): # type: ignore | |
130 | schema_fsal_info[key] = export['fsal'][key] | |
131 | export['fsal'] = schema_fsal_info | |
132 | return export | |
133 | ||
11fdf7f2 TL |
134 | @EndpointDoc("List all NFS-Ganesha exports", |
135 | responses={200: [EXPORT_SCHEMA]}) | |
a4b75251 TL |
136 | def list(self) -> List[Dict[str, Any]]: |
137 | exports = [] | |
138 | for export in mgr.remote('nfs', 'export_ls'): | |
139 | exports.append(self._get_schema_export(export)) | |
140 | ||
141 | return exports | |
11fdf7f2 TL |
142 | |
143 | @NfsTask('create', {'path': '{path}', 'fsal': '{fsal.name}', | |
144 | 'cluster_id': '{cluster_id}'}, 2.0) | |
145 | @EndpointDoc("Creates a new NFS-Ganesha export", | |
146 | parameters=CREATE_EXPORT_SCHEMA, | |
147 | responses={201: EXPORT_SCHEMA}) | |
a4b75251 TL |
148 | @RESTController.MethodMap(version=APIVersion(2, 0)) # type: ignore |
149 | def create(self, path, cluster_id, pseudo, access_type, | |
150 | squash, security_label, protocols, transports, fsal, clients) -> Dict[str, Any]: | |
151 | export_mgr = mgr.remote('nfs', 'fetch_nfs_export_obj') | |
152 | if export_mgr.get_export_by_pseudo(cluster_id, pseudo): | |
153 | raise DashboardException(msg=f'Pseudo {pseudo} is already in use.', | |
154 | component='nfs') | |
155 | if hasattr(fsal, 'user_id'): | |
156 | fsal.pop('user_id') # mgr/nfs does not let you customize user_id | |
157 | raw_ex = { | |
11fdf7f2 TL |
158 | 'path': path, |
159 | 'pseudo': pseudo, | |
160 | 'cluster_id': cluster_id, | |
11fdf7f2 TL |
161 | 'access_type': access_type, |
162 | 'squash': squash, | |
163 | 'security_label': security_label, | |
164 | 'protocols': protocols, | |
165 | 'transports': transports, | |
166 | 'fsal': fsal, | |
167 | 'clients': clients | |
a4b75251 TL |
168 | } |
169 | ret, _, err = export_mgr.apply_export(cluster_id, json.dumps(raw_ex)) | |
170 | if ret == 0: | |
171 | return self._get_schema_export( | |
172 | export_mgr.get_export_by_pseudo(cluster_id, pseudo)) | |
173 | raise NFSException(f"Export creation failed {err}") | |
11fdf7f2 TL |
174 | |
175 | @EndpointDoc("Get an NFS-Ganesha export", | |
176 | parameters={ | |
177 | 'cluster_id': (str, 'Cluster identifier'), | |
a4b75251 | 178 | 'export_id': (str, "Export ID") |
11fdf7f2 TL |
179 | }, |
180 | responses={200: EXPORT_SCHEMA}) | |
a4b75251 | 181 | def get(self, cluster_id, export_id) -> Optional[Dict[str, Any]]: |
11fdf7f2 | 182 | export_id = int(export_id) |
a4b75251 TL |
183 | export = mgr.remote('nfs', 'export_get', cluster_id, export_id) |
184 | if export: | |
185 | export = self._get_schema_export(export) | |
186 | ||
187 | return export | |
11fdf7f2 TL |
188 | |
189 | @NfsTask('edit', {'cluster_id': '{cluster_id}', 'export_id': '{export_id}'}, | |
190 | 2.0) | |
191 | @EndpointDoc("Updates an NFS-Ganesha export", | |
192 | parameters=dict(export_id=(int, "Export ID"), | |
193 | **CREATE_EXPORT_SCHEMA), | |
194 | responses={200: EXPORT_SCHEMA}) | |
a4b75251 TL |
195 | @RESTController.MethodMap(version=APIVersion(2, 0)) # type: ignore |
196 | def set(self, cluster_id, export_id, path, pseudo, access_type, | |
197 | squash, security_label, protocols, transports, fsal, clients) -> Dict[str, Any]: | |
11fdf7f2 | 198 | |
a4b75251 TL |
199 | if hasattr(fsal, 'user_id'): |
200 | fsal.pop('user_id') # mgr/nfs does not let you customize user_id | |
201 | raw_ex = { | |
11fdf7f2 | 202 | 'path': path, |
11fdf7f2 | 203 | 'pseudo': pseudo, |
a4b75251 TL |
204 | 'cluster_id': cluster_id, |
205 | 'export_id': export_id, | |
11fdf7f2 TL |
206 | 'access_type': access_type, |
207 | 'squash': squash, | |
208 | 'security_label': security_label, | |
209 | 'protocols': protocols, | |
210 | 'transports': transports, | |
211 | 'fsal': fsal, | |
212 | 'clients': clients | |
a4b75251 TL |
213 | } |
214 | ||
215 | export_mgr = mgr.remote('nfs', 'fetch_nfs_export_obj') | |
216 | ret, _, err = export_mgr.apply_export(cluster_id, json.dumps(raw_ex)) | |
217 | if ret == 0: | |
218 | return self._get_schema_export( | |
219 | export_mgr.get_export_by_pseudo(cluster_id, pseudo)) | |
220 | raise NFSException(f"Failed to update export: {err}") | |
11fdf7f2 TL |
221 | |
222 | @NfsTask('delete', {'cluster_id': '{cluster_id}', | |
223 | 'export_id': '{export_id}'}, 2.0) | |
224 | @EndpointDoc("Deletes an NFS-Ganesha export", | |
225 | parameters={ | |
226 | 'cluster_id': (str, 'Cluster identifier'), | |
a4b75251 | 227 | 'export_id': (int, "Export ID") |
11fdf7f2 | 228 | }) |
a4b75251 TL |
229 | @RESTController.MethodMap(version=APIVersion(2, 0)) # type: ignore |
230 | def delete(self, cluster_id, export_id): | |
11fdf7f2 | 231 | export_id = int(export_id) |
11fdf7f2 | 232 | |
a4b75251 TL |
233 | export = mgr.remote('nfs', 'export_get', cluster_id, export_id) |
234 | if not export: | |
235 | raise DashboardException( | |
236 | http_status_code=404, | |
237 | msg=f'Export with id {export_id} not found.', | |
238 | component='nfs') | |
239 | mgr.remote('nfs', 'export_rm', cluster_id, export['pseudo']) | |
11fdf7f2 TL |
240 | |
241 | ||
a4b75251 | 242 | @UIRouter('/nfs-ganesha', Scope.NFS_GANESHA) |
11fdf7f2 | 243 | class NFSGaneshaUi(BaseController): |
11fdf7f2 | 244 | @Endpoint('GET', '/fsals') |
f91f0fd5 | 245 | @ReadPermission |
11fdf7f2 | 246 | def fsals(self): |
a4b75251 | 247 | return NFS_GANESHA_SUPPORTED_FSALS |
11fdf7f2 TL |
248 | |
249 | @Endpoint('GET', '/lsdir') | |
f91f0fd5 TL |
250 | @ReadPermission |
251 | def lsdir(self, fs_name, root_dir=None, depth=1): # pragma: no cover | |
11fdf7f2 TL |
252 | if root_dir is None: |
253 | root_dir = "/" | |
f91f0fd5 TL |
254 | if not root_dir.startswith('/'): |
255 | root_dir = '/{}'.format(root_dir) | |
256 | root_dir = os.path.normpath(root_dir) | |
257 | ||
258 | try: | |
259 | depth = int(depth) | |
260 | error_msg = '' | |
261 | if depth < 0: | |
262 | error_msg = '`depth` must be greater or equal to 0.' | |
263 | if depth > 5: | |
264 | logger.warning("Limiting depth to maximum value of 5: " | |
265 | "input depth=%s", depth) | |
266 | depth = 5 | |
267 | except ValueError: | |
268 | error_msg = '`depth` must be an integer.' | |
269 | finally: | |
270 | if error_msg: | |
271 | raise DashboardException(code=400, | |
a4b75251 | 272 | component='nfs', |
f91f0fd5 TL |
273 | msg=error_msg) |
274 | ||
11fdf7f2 | 275 | try: |
f91f0fd5 TL |
276 | cfs = CephFS(fs_name) |
277 | paths = [root_dir] | |
278 | paths.extend([p['path'].rstrip('/') | |
279 | for p in cfs.ls_dir(root_dir, depth)]) | |
11fdf7f2 | 280 | except (cephfs.ObjectNotFound, cephfs.PermissionError): |
9f95a23c TL |
281 | paths = [] |
282 | return {'paths': paths} | |
11fdf7f2 TL |
283 | |
284 | @Endpoint('GET', '/cephfs/filesystems') | |
f91f0fd5 | 285 | @ReadPermission |
11fdf7f2 TL |
286 | def filesystems(self): |
287 | return CephFS.list_filesystems() |