]> git.proxmox.com Git - proxmox-backup.git/blob - src/api2/admin/datastore.rs
src/backup/datastore.rs: add helper to sort backup lists
[proxmox-backup.git] / src / api2 / admin / datastore.rs
1 use failure::*;
2
3 use crate::tools;
4 use crate::api_schema::*;
5 use crate::api_schema::router::*;
6 //use crate::server::rest::*;
7 use serde_json::{json, Value};
8 use std::collections::{HashSet, HashMap};
9 use chrono::{DateTime, Datelike, Local};
10 use std::path::PathBuf;
11 use std::sync::Arc;
12
13 //use hyper::StatusCode;
14 //use hyper::rt::{Future, Stream};
15
16 use crate::config::datastore;
17
18 use crate::backup::*;
19
20 mod catar;
21
22 fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
23
24 let mut group_hash = HashMap::new();
25
26 for info in backup_list {
27 let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
28 let time_list = group_hash.entry(group_id).or_insert(vec![]);
29 time_list.push(info);
30 }
31
32 group_hash
33 }
34
35 fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
36 mark: &mut HashSet<PathBuf>,
37 list: &Vec<BackupInfo>,
38 keep: usize,
39 select_id: F,
40 ){
41 let mut hash = HashSet::new();
42 for info in list {
43 let local_time = info.backup_dir.backup_time().with_timezone(&Local);
44 if hash.len() >= keep as usize { break; }
45 let backup_id = info.backup_dir.relative_path();
46 let sel_id: String = select_id(local_time, &info);
47 if !hash.contains(&sel_id) {
48 hash.insert(sel_id);
49 //println!(" KEEP ID {} {}", backup_id, local_time.format("%c"));
50 mark.insert(backup_id);
51 }
52 }
53 }
54
55 fn list_groups(
56 param: Value,
57 _info: &ApiMethod,
58 _rpcenv: &mut RpcEnvironment,
59 ) -> Result<Value, Error> {
60
61 let store = param["store"].as_str().unwrap();
62
63 let datastore = DataStore::lookup_datastore(store)?;
64
65 let backup_list = datastore.list_backups()?;
66
67 let group_hash = group_backups(backup_list);
68
69 let mut groups = vec![];
70
71 for (_group_id, mut list) in group_hash {
72
73 BackupInfo::sort_list(&mut list, false);
74
75 let info = &list[0];
76 let group = info.backup_dir.group();
77
78 groups.push(json!({
79 "backup-type": group.backup_type(),
80 "backup-id": group.backup_id(),
81 "last-backup": info.backup_dir.backup_time().timestamp(),
82 "backup-count": list.len() as u64,
83 "files": info.files,
84 }));
85 }
86
87 Ok(json!(groups))
88 }
89
90 fn delete_snapshots (
91 param: Value,
92 _info: &ApiMethod,
93 _rpcenv: &mut RpcEnvironment,
94 ) -> Result<Value, Error> {
95
96 let store = tools::required_string_param(&param, "store")?;
97 let backup_type = tools::required_string_param(&param, "backup-type")?;
98 let backup_id = tools::required_string_param(&param, "backup-id")?;
99 let backup_time = tools::required_integer_param(&param, "backup-time")?;
100
101 let snapshot = BackupDir::new(BackupGroup::new(backup_type, backup_id), backup_time);
102
103 let datastore = DataStore::lookup_datastore(store)?;
104
105 datastore.remove_backup_dir(&snapshot)?;
106
107 Ok(Value::Null)
108 }
109
110 fn list_snapshots (
111 param: Value,
112 _info: &ApiMethod,
113 _rpcenv: &mut RpcEnvironment,
114 ) -> Result<Value, Error> {
115
116 let store = tools::required_string_param(&param, "store")?;
117 let backup_type = tools::required_string_param(&param, "backup-type")?;
118 let backup_id = tools::required_string_param(&param, "backup-id")?;
119
120 let group = BackupGroup::new(backup_type, backup_id);
121
122 let datastore = DataStore::lookup_datastore(store)?;
123
124 let backup_list = datastore.list_backups()?;
125
126 let mut group_hash = group_backups(backup_list);
127
128 let group_id = group.group_path().to_str().unwrap().to_owned();
129
130 let group_snapshots = match group_hash.get_mut(&group_id) {
131 Some(data) => {
132 // new backups first
133 BackupInfo::sort_list(data, false);
134 data
135 }
136 None => bail!("Backup group '{}' does not exists.", group_id),
137 };
138
139 let mut snapshots = vec![];
140
141 for info in group_snapshots {
142
143 let group = info.backup_dir.group();
144
145 snapshots.push(json!({
146 "backup-type": group.backup_type(),
147 "backup-id": group.backup_id(),
148 "backup-time": info.backup_dir.backup_time().timestamp(),
149 "files": info.files,
150 }));
151 }
152
153 Ok(json!(snapshots))
154 }
155
156 fn prune(
157 param: Value,
158 _info: &ApiMethod,
159 _rpcenv: &mut RpcEnvironment,
160 ) -> Result<Value, Error> {
161
162 let store = param["store"].as_str().unwrap();
163
164 let datastore = DataStore::lookup_datastore(store)?;
165
166 println!("Starting prune on store {}", store);
167
168 let backup_list = datastore.list_backups()?;
169
170 let group_hash = group_backups(backup_list);
171
172 for (_group_id, mut list) in group_hash {
173
174 let mut mark = HashSet::new();
175
176 BackupInfo::sort_list(&mut list, false);
177
178 if let Some(keep_last) = param["keep-last"].as_u64() {
179 list.iter().take(keep_last as usize).for_each(|info| {
180 mark.insert(info.backup_dir.relative_path());
181 });
182 }
183
184 if let Some(keep_daily) = param["keep-daily"].as_u64() {
185 mark_selections(&mut mark, &list, keep_daily as usize, |local_time, _info| {
186 format!("{}/{}/{}", local_time.year(), local_time.month(), local_time.day())
187 });
188 }
189
190 if let Some(keep_weekly) = param["keep-weekly"].as_u64() {
191 mark_selections(&mut mark, &list, keep_weekly as usize, |local_time, _info| {
192 format!("{}/{}", local_time.year(), local_time.iso_week().week())
193 });
194 }
195
196 if let Some(keep_monthly) = param["keep-monthly"].as_u64() {
197 mark_selections(&mut mark, &list, keep_monthly as usize, |local_time, _info| {
198 format!("{}/{}", local_time.year(), local_time.month())
199 });
200 }
201
202 if let Some(keep_yearly) = param["keep-yearly"].as_u64() {
203 mark_selections(&mut mark, &list, keep_yearly as usize, |local_time, _info| {
204 format!("{}/{}", local_time.year(), local_time.year())
205 });
206 }
207
208 let mut remove_list: Vec<BackupInfo> = list.into_iter()
209 .filter(|info| !mark.contains(&info.backup_dir.relative_path())).collect();
210
211 BackupInfo::sort_list(&mut remove_list, true);
212
213 for info in remove_list {
214 datastore.remove_backup_dir(&info.backup_dir)?;
215 }
216 }
217
218 Ok(json!(null))
219 }
220
221 pub fn add_common_prune_prameters(schema: ObjectSchema) -> ObjectSchema {
222
223 schema
224 .optional(
225 "keep-last",
226 IntegerSchema::new("Number of backups to keep.")
227 .minimum(1)
228 )
229 .optional(
230 "keep-daily",
231 IntegerSchema::new("Number of daily backups to keep.")
232 .minimum(1)
233 )
234 .optional(
235 "keep-weekly",
236 IntegerSchema::new("Number of weekly backups to keep.")
237 .minimum(1)
238 )
239 .optional(
240 "keep-monthly",
241 IntegerSchema::new("Number of monthly backups to keep.")
242 .minimum(1)
243 )
244 .optional(
245 "keep-yearly",
246 IntegerSchema::new("Number of yearly backups to keep.")
247 .minimum(1)
248 )
249 }
250
251 fn api_method_prune() -> ApiMethod {
252 ApiMethod::new(
253 prune,
254 add_common_prune_prameters(
255 ObjectSchema::new("Prune the datastore.")
256 .required(
257 "store",
258 StringSchema::new("Datastore name.")
259 )
260 )
261 )
262 }
263
264 // this is just a test for mutability/mutex handling - will remove later
265 fn start_garbage_collection(
266 param: Value,
267 _info: &ApiMethod,
268 _rpcenv: &mut RpcEnvironment,
269 ) -> Result<Value, Error> {
270
271 let store = param["store"].as_str().unwrap();
272
273 let datastore = DataStore::lookup_datastore(store)?;
274
275 println!("Starting garbage collection on store {}", store);
276
277 datastore.garbage_collection()?;
278
279 Ok(json!(null))
280 }
281
282 pub fn api_method_start_garbage_collection() -> ApiMethod {
283 ApiMethod::new(
284 start_garbage_collection,
285 ObjectSchema::new("Start garbage collection.")
286 .required("store", StringSchema::new("Datastore name."))
287 )
288 }
289
290 fn garbage_collection_status(
291 param: Value,
292 _info: &ApiMethod,
293 _rpcenv: &mut RpcEnvironment,
294 ) -> Result<Value, Error> {
295
296 let store = param["store"].as_str().unwrap();
297
298 println!("Garbage collection status on store {}", store);
299
300 Ok(json!(null))
301
302 }
303
304 pub fn api_method_garbage_collection_status() -> ApiMethod {
305 ApiMethod::new(
306 garbage_collection_status,
307 ObjectSchema::new("Garbage collection status.")
308 .required("store", StringSchema::new("Datastore name."))
309 )
310 }
311
312 fn get_backup_list(
313 param: Value,
314 _info: &ApiMethod,
315 _rpcenv: &mut RpcEnvironment,
316 ) -> Result<Value, Error> {
317
318 //let config = datastore::config()?;
319
320 let store = param["store"].as_str().unwrap();
321
322 let datastore = DataStore::lookup_datastore(store)?;
323
324 let mut list = vec![];
325
326 for info in datastore.list_backups()? {
327 list.push(json!({
328 "backup-type": info.backup_dir.group().backup_type(),
329 "backup-id": info.backup_dir.group().backup_id(),
330 "backup-time": info.backup_dir.backup_time().timestamp(),
331 "files": info.files,
332 }));
333 }
334
335 let result = json!(list);
336
337 Ok(result)
338 }
339
340 fn get_datastore_list(
341 _param: Value,
342 _info: &ApiMethod,
343 _rpcenv: &mut RpcEnvironment,
344 ) -> Result<Value, Error> {
345
346 let config = datastore::config()?;
347
348 Ok(config.convert_to_array("store"))
349 }
350
351
352 pub fn router() -> Router {
353
354 let store_schema: Arc<Schema> = Arc::new(
355 StringSchema::new("Datastore name.").into()
356 );
357
358 let datastore_info = Router::new()
359 .get(ApiMethod::new(
360 |_,_,_| Ok(json!([
361 {"subdir": "backups" },
362 {"subdir": "catar" },
363 {"subdir": "gc" },
364 {"subdir": "groups" },
365 {"subdir": "snapshots" },
366 {"subdir": "status" },
367 {"subdir": "prune" },
368 ])),
369 ObjectSchema::new("Directory index.")
370 .required("store", store_schema.clone()))
371 )
372 .subdir(
373 "backups",
374 Router::new()
375 .get(ApiMethod::new(
376 get_backup_list,
377 ObjectSchema::new("List backups.")
378 .required("store", store_schema.clone()))))
379 .subdir(
380 "catar",
381 Router::new()
382 .download(catar::api_method_download_catar())
383 .upload(catar::api_method_upload_catar()))
384 .subdir(
385 "gc",
386 Router::new()
387 .get(api_method_garbage_collection_status())
388 .post(api_method_start_garbage_collection()))
389 .subdir(
390 "groups",
391 Router::new()
392 .get(ApiMethod::new(
393 list_groups,
394 ObjectSchema::new("List backup groups.")
395 .required("store", store_schema.clone()))))
396 .subdir(
397 "snapshots",
398 Router::new()
399 .get(
400 ApiMethod::new(
401 list_snapshots,
402 ObjectSchema::new("List backup groups.")
403 .required("store", store_schema.clone())
404 .required("backup-type", StringSchema::new("Backup type."))
405 .required("backup-id", StringSchema::new("Backup ID."))
406 )
407 )
408 .delete(
409 ApiMethod::new(
410 delete_snapshots,
411 ObjectSchema::new("Delete backup snapshot.")
412 .required("store", store_schema.clone())
413 .required("backup-type", StringSchema::new("Backup type."))
414 .required("backup-id", StringSchema::new("Backup ID."))
415 .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
416 .minimum(1547797308))
417 )
418 )
419 )
420 .subdir(
421 "prune",
422 Router::new()
423 .post(api_method_prune()));
424
425
426
427 let route = Router::new()
428 .get(ApiMethod::new(
429 get_datastore_list,
430 ObjectSchema::new("Directory index.")))
431 .match_all("store", datastore_info);
432
433
434
435 route
436 }