]> git.proxmox.com Git - proxmox-backup.git/blob - src/api2/admin/datastore.rs
src/api2/admin/datastore.rs: use new WorkerTask::new_thread()
[proxmox-backup.git] / src / api2 / admin / datastore.rs
1 use failure::*;
2
3 use crate::tools;
4 use crate::api_schema::*;
5 use crate::api_schema::router::*;
6 //use crate::server::rest::*;
7 use serde_json::{json, Value};
8 use std::collections::{HashSet, HashMap};
9 use chrono::{DateTime, Datelike, Local};
10 use std::path::PathBuf;
11 use std::sync::Arc;
12
13 //use hyper::StatusCode;
14 //use hyper::rt::{Future, Stream};
15
16 use crate::config::datastore;
17
18 use crate::backup::*;
19 use crate::server::WorkerTask;
20
21 mod pxar;
22 mod upload;
23
24 fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
25
26 let mut group_hash = HashMap::new();
27
28 for info in backup_list {
29 let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
30 let time_list = group_hash.entry(group_id).or_insert(vec![]);
31 time_list.push(info);
32 }
33
34 group_hash
35 }
36
37 fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
38 mark: &mut HashSet<PathBuf>,
39 list: &Vec<BackupInfo>,
40 keep: usize,
41 select_id: F,
42 ){
43 let mut hash = HashSet::new();
44 for info in list {
45 let local_time = info.backup_dir.backup_time().with_timezone(&Local);
46 if hash.len() >= keep as usize { break; }
47 let backup_id = info.backup_dir.relative_path();
48 let sel_id: String = select_id(local_time, &info);
49 if !hash.contains(&sel_id) {
50 hash.insert(sel_id);
51 //println!(" KEEP ID {} {}", backup_id, local_time.format("%c"));
52 mark.insert(backup_id);
53 }
54 }
55 }
56
57 fn list_groups(
58 param: Value,
59 _info: &ApiMethod,
60 _rpcenv: &mut RpcEnvironment,
61 ) -> Result<Value, Error> {
62
63 let store = param["store"].as_str().unwrap();
64
65 let datastore = DataStore::lookup_datastore(store)?;
66
67 let backup_list = datastore.list_backups()?;
68
69 let group_hash = group_backups(backup_list);
70
71 let mut groups = vec![];
72
73 for (_group_id, mut list) in group_hash {
74
75 BackupInfo::sort_list(&mut list, false);
76
77 let info = &list[0];
78 let group = info.backup_dir.group();
79
80 groups.push(json!({
81 "backup-type": group.backup_type(),
82 "backup-id": group.backup_id(),
83 "last-backup": info.backup_dir.backup_time().timestamp(),
84 "backup-count": list.len() as u64,
85 "files": info.files,
86 }));
87 }
88
89 Ok(json!(groups))
90 }
91
92 fn list_snapshot_files (
93 param: Value,
94 _info: &ApiMethod,
95 _rpcenv: &mut RpcEnvironment,
96 ) -> Result<Value, Error> {
97
98 let store = tools::required_string_param(&param, "store")?;
99 let backup_type = tools::required_string_param(&param, "backup-type")?;
100 let backup_id = tools::required_string_param(&param, "backup-id")?;
101 let backup_time = tools::required_integer_param(&param, "backup-time")?;
102
103 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
104
105 let datastore = DataStore::lookup_datastore(store)?;
106
107 let files = datastore.list_files(&snapshot)?;
108
109 Ok(json!(files))
110 }
111
112 fn delete_snapshots (
113 param: Value,
114 _info: &ApiMethod,
115 _rpcenv: &mut RpcEnvironment,
116 ) -> Result<Value, Error> {
117
118 let store = tools::required_string_param(&param, "store")?;
119 let backup_type = tools::required_string_param(&param, "backup-type")?;
120 let backup_id = tools::required_string_param(&param, "backup-id")?;
121 let backup_time = tools::required_integer_param(&param, "backup-time")?;
122
123 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
124
125 let datastore = DataStore::lookup_datastore(store)?;
126
127 datastore.remove_backup_dir(&snapshot)?;
128
129 Ok(Value::Null)
130 }
131
132 fn list_snapshots (
133 param: Value,
134 _info: &ApiMethod,
135 _rpcenv: &mut RpcEnvironment,
136 ) -> Result<Value, Error> {
137
138 let store = tools::required_string_param(&param, "store")?;
139 let backup_type = tools::required_string_param(&param, "backup-type")?;
140 let backup_id = tools::required_string_param(&param, "backup-id")?;
141
142 let group = BackupGroup::new(backup_type, backup_id);
143
144 let datastore = DataStore::lookup_datastore(store)?;
145
146 let backup_list = datastore.list_backups()?;
147
148 let mut group_hash = group_backups(backup_list);
149
150 let group_id = group.group_path().to_str().unwrap().to_owned();
151
152 let group_snapshots = match group_hash.get_mut(&group_id) {
153 Some(data) => {
154 // new backups first
155 BackupInfo::sort_list(data, false);
156 data
157 }
158 None => bail!("Backup group '{}' does not exists.", group_id),
159 };
160
161 let mut snapshots = vec![];
162
163 for info in group_snapshots {
164
165 let group = info.backup_dir.group();
166
167 snapshots.push(json!({
168 "backup-type": group.backup_type(),
169 "backup-id": group.backup_id(),
170 "backup-time": info.backup_dir.backup_time().timestamp(),
171 "files": info.files,
172 }));
173 }
174
175 Ok(json!(snapshots))
176 }
177
178 fn prune(
179 param: Value,
180 _info: &ApiMethod,
181 _rpcenv: &mut RpcEnvironment,
182 ) -> Result<Value, Error> {
183
184 let store = param["store"].as_str().unwrap();
185
186 let datastore = DataStore::lookup_datastore(store)?;
187
188 println!("Starting prune on store {}", store);
189
190 let backup_list = datastore.list_backups()?;
191
192 let group_hash = group_backups(backup_list);
193
194 for (_group_id, mut list) in group_hash {
195
196 let mut mark = HashSet::new();
197
198 BackupInfo::sort_list(&mut list, false);
199
200 if let Some(keep_last) = param["keep-last"].as_u64() {
201 list.iter().take(keep_last as usize).for_each(|info| {
202 mark.insert(info.backup_dir.relative_path());
203 });
204 }
205
206 if let Some(keep_daily) = param["keep-daily"].as_u64() {
207 mark_selections(&mut mark, &list, keep_daily as usize, |local_time, _info| {
208 format!("{}/{}/{}", local_time.year(), local_time.month(), local_time.day())
209 });
210 }
211
212 if let Some(keep_weekly) = param["keep-weekly"].as_u64() {
213 mark_selections(&mut mark, &list, keep_weekly as usize, |local_time, _info| {
214 format!("{}/{}", local_time.year(), local_time.iso_week().week())
215 });
216 }
217
218 if let Some(keep_monthly) = param["keep-monthly"].as_u64() {
219 mark_selections(&mut mark, &list, keep_monthly as usize, |local_time, _info| {
220 format!("{}/{}", local_time.year(), local_time.month())
221 });
222 }
223
224 if let Some(keep_yearly) = param["keep-yearly"].as_u64() {
225 mark_selections(&mut mark, &list, keep_yearly as usize, |local_time, _info| {
226 format!("{}/{}", local_time.year(), local_time.year())
227 });
228 }
229
230 let mut remove_list: Vec<BackupInfo> = list.into_iter()
231 .filter(|info| !mark.contains(&info.backup_dir.relative_path())).collect();
232
233 BackupInfo::sort_list(&mut remove_list, true);
234
235 for info in remove_list {
236 datastore.remove_backup_dir(&info.backup_dir)?;
237 }
238 }
239
240 Ok(json!(null))
241 }
242
243 pub fn add_common_prune_prameters(schema: ObjectSchema) -> ObjectSchema {
244
245 schema
246 .optional(
247 "keep-last",
248 IntegerSchema::new("Number of backups to keep.")
249 .minimum(1)
250 )
251 .optional(
252 "keep-daily",
253 IntegerSchema::new("Number of daily backups to keep.")
254 .minimum(1)
255 )
256 .optional(
257 "keep-weekly",
258 IntegerSchema::new("Number of weekly backups to keep.")
259 .minimum(1)
260 )
261 .optional(
262 "keep-monthly",
263 IntegerSchema::new("Number of monthly backups to keep.")
264 .minimum(1)
265 )
266 .optional(
267 "keep-yearly",
268 IntegerSchema::new("Number of yearly backups to keep.")
269 .minimum(1)
270 )
271 }
272
273 fn api_method_prune() -> ApiMethod {
274 ApiMethod::new(
275 prune,
276 add_common_prune_prameters(
277 ObjectSchema::new("Prune the datastore.")
278 .required(
279 "store",
280 StringSchema::new("Datastore name.")
281 )
282 )
283 )
284 }
285
286 fn start_garbage_collection(
287 param: Value,
288 _info: &ApiMethod,
289 rpcenv: &mut RpcEnvironment,
290 ) -> Result<Value, Error> {
291
292 let store = param["store"].as_str().unwrap().to_string();
293
294 let datastore = DataStore::lookup_datastore(&store)?;
295
296 println!("Starting garbage collection on store {}", store);
297
298 let to_stdout = if rpcenv.env_type() == RpcEnvironmentType::CLI { true } else { false };
299
300 let upid_str = WorkerTask::new_thread(
301 "garbage_collection", Some(store.clone()), "root@pam", to_stdout, move |worker|
302 {
303 worker.log(format!("starting garbage collection on store {}", store));
304 datastore.garbage_collection()
305 })?;
306
307 Ok(json!(upid_str))
308 }
309
310 pub fn api_method_start_garbage_collection() -> ApiMethod {
311 ApiMethod::new(
312 start_garbage_collection,
313 ObjectSchema::new("Start garbage collection.")
314 .required("store", StringSchema::new("Datastore name."))
315 )
316 }
317
318 fn garbage_collection_status(
319 param: Value,
320 _info: &ApiMethod,
321 _rpcenv: &mut RpcEnvironment,
322 ) -> Result<Value, Error> {
323
324 let store = param["store"].as_str().unwrap();
325
326 println!("Garbage collection status on store {}", store);
327
328 Ok(json!(null))
329
330 }
331
332 pub fn api_method_garbage_collection_status() -> ApiMethod {
333 ApiMethod::new(
334 garbage_collection_status,
335 ObjectSchema::new("Garbage collection status.")
336 .required("store", StringSchema::new("Datastore name."))
337 )
338 }
339
340 fn get_backup_list(
341 param: Value,
342 _info: &ApiMethod,
343 _rpcenv: &mut RpcEnvironment,
344 ) -> Result<Value, Error> {
345
346 //let config = datastore::config()?;
347
348 let store = param["store"].as_str().unwrap();
349
350 let datastore = DataStore::lookup_datastore(store)?;
351
352 let mut list = vec![];
353
354 for info in datastore.list_backups()? {
355 list.push(json!({
356 "backup-type": info.backup_dir.group().backup_type(),
357 "backup-id": info.backup_dir.group().backup_id(),
358 "backup-time": info.backup_dir.backup_time().timestamp(),
359 "files": info.files,
360 }));
361 }
362
363 let result = json!(list);
364
365 Ok(result)
366 }
367
368 fn get_datastore_list(
369 _param: Value,
370 _info: &ApiMethod,
371 _rpcenv: &mut RpcEnvironment,
372 ) -> Result<Value, Error> {
373
374 let config = datastore::config()?;
375
376 Ok(config.convert_to_array("store"))
377 }
378
379
380 pub fn router() -> Router {
381
382 let store_schema: Arc<Schema> = Arc::new(
383 StringSchema::new("Datastore name.").into()
384 );
385
386 let datastore_info = Router::new()
387 .get(ApiMethod::new(
388 |_,_,_| Ok(json!([
389 {"subdir": "backups" },
390 {"subdir": "pxar" },
391 {"subdir": "gc" },
392 {"subdir": "groups" },
393 {"subdir": "snapshots" },
394 {"subdir": "status" },
395 {"subdir": "prune" },
396 ])),
397 ObjectSchema::new("Directory index.")
398 .required("store", store_schema.clone()))
399 )
400 .subdir(
401 "backups",
402 Router::new()
403 .get(ApiMethod::new(
404 get_backup_list,
405 ObjectSchema::new("List backups.")
406 .required("store", store_schema.clone()))))
407 .subdir(
408 "pxar",
409 Router::new()
410 .download(pxar::api_method_download_pxar())
411 .upload(pxar::api_method_upload_pxar()))
412 .subdir(
413 "test-upload",
414 Router::new()
415 .upgrade(upload::api_method_upgrade_upload()))
416 .subdir(
417 "gc",
418 Router::new()
419 .get(api_method_garbage_collection_status())
420 .post(api_method_start_garbage_collection()))
421 .subdir(
422 "files",
423 Router::new()
424 .get(
425 ApiMethod::new(
426 list_snapshot_files,
427 ObjectSchema::new("List snapshot files.")
428 .required("store", store_schema.clone())
429 .required("backup-type", StringSchema::new("Backup type."))
430 .required("backup-id", StringSchema::new("Backup ID."))
431 .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
432 .minimum(1547797308))
433 )
434 )
435 )
436 .subdir(
437 "groups",
438 Router::new()
439 .get(ApiMethod::new(
440 list_groups,
441 ObjectSchema::new("List backup groups.")
442 .required("store", store_schema.clone()))))
443 .subdir(
444 "snapshots",
445 Router::new()
446 .get(
447 ApiMethod::new(
448 list_snapshots,
449 ObjectSchema::new("List backup groups.")
450 .required("store", store_schema.clone())
451 .required("backup-type", StringSchema::new("Backup type."))
452 .required("backup-id", StringSchema::new("Backup ID."))
453 )
454 )
455 .delete(
456 ApiMethod::new(
457 delete_snapshots,
458 ObjectSchema::new("Delete backup snapshot.")
459 .required("store", store_schema.clone())
460 .required("backup-type", StringSchema::new("Backup type."))
461 .required("backup-id", StringSchema::new("Backup ID."))
462 .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
463 .minimum(1547797308))
464 )
465 )
466 )
467 .subdir(
468 "prune",
469 Router::new()
470 .post(api_method_prune()));
471
472
473
474 let route = Router::new()
475 .get(ApiMethod::new(
476 get_datastore_list,
477 ObjectSchema::new("Directory index.")))
478 .match_all("store", datastore_info);
479
480
481
482 route
483 }