]> git.proxmox.com Git - proxmox-backup.git/blame - src/api2/admin/datastore.rs
tests/catar.rs: make it compile again
[proxmox-backup.git] / src / api2 / admin / datastore.rs
CommitLineData
15e9b4ed
DM
1use failure::*;
2
184f17af 3use crate::tools;
ef2f2efb 4use crate::api_schema::*;
dc9a007b 5use crate::api_schema::router::*;
2085142e 6//use crate::server::rest::*;
15e9b4ed 7use serde_json::{json, Value};
8f579717 8use std::collections::{HashSet, HashMap};
875fb1c0 9use chrono::{DateTime, Datelike, Local};
38b0dfa5 10use std::path::PathBuf;
812c6f87 11use std::sync::Arc;
15e9b4ed 12
2085142e
DM
13//use hyper::StatusCode;
14//use hyper::rt::{Future, Stream};
7e21da6e 15
15e9b4ed
DM
16use crate::config::datastore;
17
e5064ba6 18use crate::backup::*;
15e9b4ed 19
8968258b 20mod pxar;
e2d007f7 21mod upload;
1629d2ad 22
8f579717
DM
23fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
24
25 let mut group_hash = HashMap::new();
26
27 for info in backup_list {
9b492eb2 28 let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
8f579717
DM
29 let time_list = group_hash.entry(group_id).or_insert(vec![]);
30 time_list.push(info);
31 }
32
33 group_hash
34}
35
36fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
38b0dfa5 37 mark: &mut HashSet<PathBuf>,
8f579717
DM
38 list: &Vec<BackupInfo>,
39 keep: usize,
40 select_id: F,
41){
42 let mut hash = HashSet::new();
43 for info in list {
9b492eb2 44 let local_time = info.backup_dir.backup_time().with_timezone(&Local);
8f579717 45 if hash.len() >= keep as usize { break; }
38b0dfa5 46 let backup_id = info.backup_dir.relative_path();
8f579717
DM
47 let sel_id: String = select_id(local_time, &info);
48 if !hash.contains(&sel_id) {
49 hash.insert(sel_id);
50 //println!(" KEEP ID {} {}", backup_id, local_time.format("%c"));
51 mark.insert(backup_id);
52 }
53 }
54}
55
ad20d198 56fn list_groups(
812c6f87
DM
57 param: Value,
58 _info: &ApiMethod,
59 _rpcenv: &mut RpcEnvironment,
60) -> Result<Value, Error> {
61
62 let store = param["store"].as_str().unwrap();
63
64 let datastore = DataStore::lookup_datastore(store)?;
65
66 let backup_list = datastore.list_backups()?;
67
68 let group_hash = group_backups(backup_list);
69
70 let mut groups = vec![];
71
72 for (_group_id, mut list) in group_hash {
73
2b01a225 74 BackupInfo::sort_list(&mut list, false);
812c6f87
DM
75
76 let info = &list[0];
9b492eb2 77 let group = info.backup_dir.group();
812c6f87
DM
78
79 groups.push(json!({
1e9a94e5
DM
80 "backup-type": group.backup_type(),
81 "backup-id": group.backup_id(),
9b492eb2 82 "last-backup": info.backup_dir.backup_time().timestamp(),
ad20d198
DM
83 "backup-count": list.len() as u64,
84 "files": info.files,
812c6f87
DM
85 }));
86 }
87
88 Ok(json!(groups))
89}
8f579717 90
01a13423
DM
91fn list_snapshot_files (
92 param: Value,
93 _info: &ApiMethod,
94 _rpcenv: &mut RpcEnvironment,
95) -> Result<Value, Error> {
96
97 let store = tools::required_string_param(&param, "store")?;
98 let backup_type = tools::required_string_param(&param, "backup-type")?;
99 let backup_id = tools::required_string_param(&param, "backup-id")?;
100 let backup_time = tools::required_integer_param(&param, "backup-time")?;
101
102 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
103
104 let datastore = DataStore::lookup_datastore(store)?;
105
106 let files = datastore.list_files(&snapshot)?;
107
108 Ok(json!(files))
109}
110
6f62c924
DM
111fn delete_snapshots (
112 param: Value,
113 _info: &ApiMethod,
114 _rpcenv: &mut RpcEnvironment,
115) -> Result<Value, Error> {
116
117 let store = tools::required_string_param(&param, "store")?;
118 let backup_type = tools::required_string_param(&param, "backup-type")?;
119 let backup_id = tools::required_string_param(&param, "backup-id")?;
120 let backup_time = tools::required_integer_param(&param, "backup-time")?;
6f62c924 121
391d3107 122 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
6f62c924
DM
123
124 let datastore = DataStore::lookup_datastore(store)?;
125
126 datastore.remove_backup_dir(&snapshot)?;
127
128 Ok(Value::Null)
129}
130
184f17af
DM
131fn list_snapshots (
132 param: Value,
133 _info: &ApiMethod,
134 _rpcenv: &mut RpcEnvironment,
135) -> Result<Value, Error> {
136
137 let store = tools::required_string_param(&param, "store")?;
138 let backup_type = tools::required_string_param(&param, "backup-type")?;
139 let backup_id = tools::required_string_param(&param, "backup-id")?;
140
1e9a94e5 141 let group = BackupGroup::new(backup_type, backup_id);
184f17af
DM
142
143 let datastore = DataStore::lookup_datastore(store)?;
144
145 let backup_list = datastore.list_backups()?;
146
147 let mut group_hash = group_backups(backup_list);
148
149 let group_id = group.group_path().to_str().unwrap().to_owned();
150
151 let group_snapshots = match group_hash.get_mut(&group_id) {
152 Some(data) => {
153 // new backups first
2b01a225 154 BackupInfo::sort_list(data, false);
184f17af
DM
155 data
156 }
157 None => bail!("Backup group '{}' does not exists.", group_id),
158 };
159
160 let mut snapshots = vec![];
161
162 for info in group_snapshots {
163
9b492eb2 164 let group = info.backup_dir.group();
184f17af
DM
165
166 snapshots.push(json!({
1e9a94e5
DM
167 "backup-type": group.backup_type(),
168 "backup-id": group.backup_id(),
9b492eb2 169 "backup-time": info.backup_dir.backup_time().timestamp(),
184f17af
DM
170 "files": info.files,
171 }));
172 }
173
174 Ok(json!(snapshots))
175}
176
83b7db02
DM
177fn prune(
178 param: Value,
179 _info: &ApiMethod,
180 _rpcenv: &mut RpcEnvironment,
181) -> Result<Value, Error> {
182
183 let store = param["store"].as_str().unwrap();
184
185 let datastore = DataStore::lookup_datastore(store)?;
186
187 println!("Starting prune on store {}", store);
188
8f579717
DM
189 let backup_list = datastore.list_backups()?;
190
191 let group_hash = group_backups(backup_list);
192
193 for (_group_id, mut list) in group_hash {
194
195 let mut mark = HashSet::new();
196
2b01a225 197 BackupInfo::sort_list(&mut list, false);
8f579717
DM
198
199 if let Some(keep_last) = param["keep-last"].as_u64() {
200 list.iter().take(keep_last as usize).for_each(|info| {
38b0dfa5 201 mark.insert(info.backup_dir.relative_path());
8f579717
DM
202 });
203 }
204
205 if let Some(keep_daily) = param["keep-daily"].as_u64() {
206 mark_selections(&mut mark, &list, keep_daily as usize, |local_time, _info| {
207 format!("{}/{}/{}", local_time.year(), local_time.month(), local_time.day())
208 });
209 }
83b7db02 210
8f579717
DM
211 if let Some(keep_weekly) = param["keep-weekly"].as_u64() {
212 mark_selections(&mut mark, &list, keep_weekly as usize, |local_time, _info| {
213 format!("{}/{}", local_time.year(), local_time.iso_week().week())
214 });
215 }
216
217 if let Some(keep_monthly) = param["keep-monthly"].as_u64() {
218 mark_selections(&mut mark, &list, keep_monthly as usize, |local_time, _info| {
219 format!("{}/{}", local_time.year(), local_time.month())
220 });
221 }
222
223 if let Some(keep_yearly) = param["keep-yearly"].as_u64() {
224 mark_selections(&mut mark, &list, keep_yearly as usize, |local_time, _info| {
225 format!("{}/{}", local_time.year(), local_time.year())
226 });
227 }
228
2b01a225 229 let mut remove_list: Vec<BackupInfo> = list.into_iter()
38b0dfa5 230 .filter(|info| !mark.contains(&info.backup_dir.relative_path())).collect();
8f579717 231
2b01a225 232 BackupInfo::sort_list(&mut remove_list, true);
8f579717
DM
233
234 for info in remove_list {
38b0dfa5 235 datastore.remove_backup_dir(&info.backup_dir)?;
8f579717
DM
236 }
237 }
83b7db02
DM
238
239 Ok(json!(null))
240}
241
242pub fn add_common_prune_prameters(schema: ObjectSchema) -> ObjectSchema {
243
244 schema
8f579717
DM
245 .optional(
246 "keep-last",
247 IntegerSchema::new("Number of backups to keep.")
248 .minimum(1)
249 )
83b7db02
DM
250 .optional(
251 "keep-daily",
8f579717
DM
252 IntegerSchema::new("Number of daily backups to keep.")
253 .minimum(1)
254 )
255 .optional(
256 "keep-weekly",
257 IntegerSchema::new("Number of weekly backups to keep.")
258 .minimum(1)
259 )
260 .optional(
261 "keep-monthly",
262 IntegerSchema::new("Number of monthly backups to keep.")
263 .minimum(1)
264 )
265 .optional(
266 "keep-yearly",
267 IntegerSchema::new("Number of yearly backups to keep.")
268 .minimum(1)
83b7db02
DM
269 )
270}
271
272fn api_method_prune() -> ApiMethod {
273 ApiMethod::new(
274 prune,
275 add_common_prune_prameters(
276 ObjectSchema::new("Prune the datastore.")
277 .required(
278 "store",
279 StringSchema::new("Datastore name.")
280 )
281 )
282 )
283}
284
6049b71f
DM
285fn start_garbage_collection(
286 param: Value,
287 _info: &ApiMethod,
288 _rpcenv: &mut RpcEnvironment,
289) -> Result<Value, Error> {
15e9b4ed 290
3e6a7dee 291 let store = param["store"].as_str().unwrap().to_string();
15e9b4ed 292
3e6a7dee 293 let datastore = DataStore::lookup_datastore(&store)?;
15e9b4ed 294
5a778d92 295 println!("Starting garbage collection on store {}", store);
15e9b4ed 296
3e6a7dee
DM
297 std::thread::spawn(move || {
298 if let Err(err) = datastore.garbage_collection() {
299 println!("Garbage collection error on store {} - {}", store, err);
300 }
301 });
15e9b4ed
DM
302
303 Ok(json!(null))
304}
305
691c89a0
DM
306pub fn api_method_start_garbage_collection() -> ApiMethod {
307 ApiMethod::new(
308 start_garbage_collection,
309 ObjectSchema::new("Start garbage collection.")
5a778d92 310 .required("store", StringSchema::new("Datastore name."))
691c89a0
DM
311 )
312}
313
6049b71f
DM
314fn garbage_collection_status(
315 param: Value,
316 _info: &ApiMethod,
317 _rpcenv: &mut RpcEnvironment,
318) -> Result<Value, Error> {
691c89a0 319
5a778d92 320 let store = param["store"].as_str().unwrap();
691c89a0 321
5a778d92 322 println!("Garbage collection status on store {}", store);
691c89a0
DM
323
324 Ok(json!(null))
325
326}
327
328pub fn api_method_garbage_collection_status() -> ApiMethod {
329 ApiMethod::new(
330 garbage_collection_status,
331 ObjectSchema::new("Garbage collection status.")
5a778d92 332 .required("store", StringSchema::new("Datastore name."))
691c89a0
DM
333 )
334}
335
6049b71f
DM
336fn get_backup_list(
337 param: Value,
338 _info: &ApiMethod,
339 _rpcenv: &mut RpcEnvironment,
340) -> Result<Value, Error> {
83dbd80b 341
9f49fe1d 342 //let config = datastore::config()?;
83dbd80b
DM
343
344 let store = param["store"].as_str().unwrap();
345
346 let datastore = DataStore::lookup_datastore(store)?;
347
348 let mut list = vec![];
349
350 for info in datastore.list_backups()? {
351 list.push(json!({
9b492eb2
DM
352 "backup-type": info.backup_dir.group().backup_type(),
353 "backup-id": info.backup_dir.group().backup_id(),
354 "backup-time": info.backup_dir.backup_time().timestamp(),
8c75372b 355 "files": info.files,
83dbd80b
DM
356 }));
357 }
358
359 let result = json!(list);
360
361 Ok(result)
362}
7e21da6e 363
6049b71f
DM
364fn get_datastore_list(
365 _param: Value,
366 _info: &ApiMethod,
367 _rpcenv: &mut RpcEnvironment,
368) -> Result<Value, Error> {
15e9b4ed
DM
369
370 let config = datastore::config()?;
371
5a778d92 372 Ok(config.convert_to_array("store"))
15e9b4ed
DM
373}
374
691c89a0 375
15e9b4ed
DM
376pub fn router() -> Router {
377
812c6f87
DM
378 let store_schema: Arc<Schema> = Arc::new(
379 StringSchema::new("Datastore name.").into()
380 );
381
15e9b4ed
DM
382 let datastore_info = Router::new()
383 .get(ApiMethod::new(
6049b71f 384 |_,_,_| Ok(json!([
83dbd80b 385 {"subdir": "backups" },
8968258b 386 {"subdir": "pxar" },
83b7db02 387 {"subdir": "gc" },
812c6f87 388 {"subdir": "groups" },
184f17af 389 {"subdir": "snapshots" },
83b7db02
DM
390 {"subdir": "status" },
391 {"subdir": "prune" },
392 ])),
15e9b4ed 393 ObjectSchema::new("Directory index.")
812c6f87 394 .required("store", store_schema.clone()))
15e9b4ed 395 )
83dbd80b
DM
396 .subdir(
397 "backups",
398 Router::new()
399 .get(ApiMethod::new(
400 get_backup_list,
401 ObjectSchema::new("List backups.")
812c6f87 402 .required("store", store_schema.clone()))))
264f52cf 403 .subdir(
8968258b 404 "pxar",
264f52cf 405 Router::new()
8968258b
DM
406 .download(pxar::api_method_download_pxar())
407 .upload(pxar::api_method_upload_pxar()))
e2d007f7
WB
408 .subdir(
409 "test-upload",
410 Router::new()
411 .upgrade(upload::api_method_upgrade_upload()))
15e9b4ed
DM
412 .subdir(
413 "gc",
414 Router::new()
691c89a0 415 .get(api_method_garbage_collection_status())
83b7db02 416 .post(api_method_start_garbage_collection()))
01a13423
DM
417 .subdir(
418 "files",
419 Router::new()
420 .get(
421 ApiMethod::new(
422 list_snapshot_files,
423 ObjectSchema::new("List snapshot files.")
424 .required("store", store_schema.clone())
425 .required("backup-type", StringSchema::new("Backup type."))
426 .required("backup-id", StringSchema::new("Backup ID."))
427 .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
428 .minimum(1547797308))
429 )
430 )
431 )
812c6f87
DM
432 .subdir(
433 "groups",
434 Router::new()
435 .get(ApiMethod::new(
ad20d198 436 list_groups,
812c6f87
DM
437 ObjectSchema::new("List backup groups.")
438 .required("store", store_schema.clone()))))
184f17af
DM
439 .subdir(
440 "snapshots",
441 Router::new()
6f62c924
DM
442 .get(
443 ApiMethod::new(
444 list_snapshots,
445 ObjectSchema::new("List backup groups.")
446 .required("store", store_schema.clone())
447 .required("backup-type", StringSchema::new("Backup type."))
448 .required("backup-id", StringSchema::new("Backup ID."))
449 )
450 )
451 .delete(
452 ApiMethod::new(
453 delete_snapshots,
454 ObjectSchema::new("Delete backup snapshot.")
455 .required("store", store_schema.clone())
456 .required("backup-type", StringSchema::new("Backup type."))
457 .required("backup-id", StringSchema::new("Backup ID."))
458 .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
459 .minimum(1547797308))
460 )
461 )
462 )
83b7db02
DM
463 .subdir(
464 "prune",
465 Router::new()
466 .post(api_method_prune()));
7e21da6e 467
15e9b4ed
DM
468
469
470 let route = Router::new()
471 .get(ApiMethod::new(
472 get_datastore_list,
473 ObjectSchema::new("Directory index.")))
5a778d92 474 .match_all("store", datastore_info);
15e9b4ed
DM
475
476
477
478 route
479}