]> git.proxmox.com Git - proxmox-backup.git/blame - src/api2/admin/datastore.rs
d/control: bump proxmox build-dep
[proxmox-backup.git] / src / api2 / admin / datastore.rs
CommitLineData
cad540e9 1use std::collections::{HashSet, HashMap};
cad540e9 2
aeeac29b 3use chrono::{TimeZone, Local};
15e9b4ed 4use failure::*;
9e47c0a5 5use futures::*;
cad540e9
WB
6use hyper::http::request::Parts;
7use hyper::{header, Body, Response, StatusCode};
15e9b4ed
DM
8use serde_json::{json, Value};
9
552c2259 10use proxmox::{sortable, identity};
cad540e9 11use proxmox::api::{http_err, list_subdirs_api_method};
bb084b9c 12use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, Router, RpcEnvironment, RpcEnvironmentType};
cad540e9
WB
13use proxmox::api::router::SubdirMap;
14use proxmox::api::schema::*;
feaa1ad3
WB
15use proxmox::tools::try_block;
16use proxmox::tools::fs::{file_get_contents, replace_file, CreateOptions};
e18a6c9e 17
cad540e9 18use crate::api2::types::*;
e5064ba6 19use crate::backup::*;
cad540e9 20use crate::config::datastore;
0f778e06 21use crate::server::WorkerTask;
cad540e9 22use crate::tools;
1629d2ad 23
8c70e3eb
DM
24fn read_backup_index(store: &DataStore, backup_dir: &BackupDir) -> Result<Value, Error> {
25
26 let mut path = store.base_path();
27 path.push(backup_dir.relative_path());
28 path.push("index.json.blob");
29
30 let raw_data = file_get_contents(&path)?;
31 let data = DataBlob::from_raw(raw_data)?.decode(None)?;
4f1e40a2 32 let index_size = data.len();
8c70e3eb
DM
33 let mut result: Value = serde_json::from_reader(&mut &data[..])?;
34
4f1e40a2 35 let mut result = result["files"].take();
8c70e3eb
DM
36
37 if result == Value::Null {
38 bail!("missing 'files' property in backup index {:?}", path);
39 }
40
4f1e40a2
DM
41 result.as_array_mut().unwrap().push(json!({
42 "filename": "index.json.blob",
43 "size": index_size,
44 }));
45
8c70e3eb
DM
46 Ok(result)
47}
48
8f579717
DM
49fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
50
51 let mut group_hash = HashMap::new();
52
53 for info in backup_list {
9b492eb2 54 let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
8f579717
DM
55 let time_list = group_hash.entry(group_id).or_insert(vec![]);
56 time_list.push(info);
57 }
58
59 group_hash
60}
61
ad20d198 62fn list_groups(
812c6f87
DM
63 param: Value,
64 _info: &ApiMethod,
dd5495d6 65 _rpcenv: &mut dyn RpcEnvironment,
812c6f87
DM
66) -> Result<Value, Error> {
67
68 let store = param["store"].as_str().unwrap();
69
70 let datastore = DataStore::lookup_datastore(store)?;
71
c0977501 72 let backup_list = BackupInfo::list_backups(&datastore.base_path())?;
812c6f87
DM
73
74 let group_hash = group_backups(backup_list);
75
76 let mut groups = vec![];
77
78 for (_group_id, mut list) in group_hash {
79
2b01a225 80 BackupInfo::sort_list(&mut list, false);
812c6f87
DM
81
82 let info = &list[0];
9b492eb2 83 let group = info.backup_dir.group();
812c6f87
DM
84
85 groups.push(json!({
1e9a94e5
DM
86 "backup-type": group.backup_type(),
87 "backup-id": group.backup_id(),
9b492eb2 88 "last-backup": info.backup_dir.backup_time().timestamp(),
ad20d198
DM
89 "backup-count": list.len() as u64,
90 "files": info.files,
812c6f87
DM
91 }));
92 }
93
94 Ok(json!(groups))
95}
8f579717 96
01a13423
DM
97fn list_snapshot_files (
98 param: Value,
99 _info: &ApiMethod,
dd5495d6 100 _rpcenv: &mut dyn RpcEnvironment,
01a13423
DM
101) -> Result<Value, Error> {
102
103 let store = tools::required_string_param(&param, "store")?;
104 let backup_type = tools::required_string_param(&param, "backup-type")?;
105 let backup_id = tools::required_string_param(&param, "backup-id")?;
106 let backup_time = tools::required_integer_param(&param, "backup-time")?;
107
d7c24397 108 let datastore = DataStore::lookup_datastore(store)?;
01a13423
DM
109 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
110
d7c24397
DM
111 let mut files = read_backup_index(&datastore, &snapshot)?;
112
113 let info = BackupInfo::new(&datastore.base_path(), snapshot)?;
01a13423 114
d7c24397
DM
115 let file_set = files.as_array().unwrap().iter().fold(HashSet::new(), |mut acc, item| {
116 acc.insert(item["filename"].as_str().unwrap().to_owned());
117 acc
118 });
119
120 for file in info.files {
121 if file_set.contains(&file) { continue; }
122 files.as_array_mut().unwrap().push(json!({ "filename": file }));
123 }
01a13423 124
8c70e3eb 125 Ok(files)
01a13423
DM
126}
127
6f62c924
DM
128fn delete_snapshots (
129 param: Value,
130 _info: &ApiMethod,
dd5495d6 131 _rpcenv: &mut dyn RpcEnvironment,
6f62c924
DM
132) -> Result<Value, Error> {
133
134 let store = tools::required_string_param(&param, "store")?;
135 let backup_type = tools::required_string_param(&param, "backup-type")?;
136 let backup_id = tools::required_string_param(&param, "backup-id")?;
137 let backup_time = tools::required_integer_param(&param, "backup-time")?;
6f62c924 138
391d3107 139 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
6f62c924
DM
140
141 let datastore = DataStore::lookup_datastore(store)?;
142
143 datastore.remove_backup_dir(&snapshot)?;
144
145 Ok(Value::Null)
146}
147
184f17af
DM
148fn list_snapshots (
149 param: Value,
150 _info: &ApiMethod,
dd5495d6 151 _rpcenv: &mut dyn RpcEnvironment,
184f17af
DM
152) -> Result<Value, Error> {
153
154 let store = tools::required_string_param(&param, "store")?;
15c847f1
DM
155 let backup_type = param["backup-type"].as_str();
156 let backup_id = param["backup-id"].as_str();
184f17af
DM
157
158 let datastore = DataStore::lookup_datastore(store)?;
159
c0977501 160 let base_path = datastore.base_path();
184f17af 161
15c847f1 162 let backup_list = BackupInfo::list_backups(&base_path)?;
184f17af
DM
163
164 let mut snapshots = vec![];
165
c0977501 166 for info in backup_list {
15c847f1
DM
167 let group = info.backup_dir.group();
168 if let Some(backup_type) = backup_type {
169 if backup_type != group.backup_type() { continue; }
170 }
a17a0e7a 171 if let Some(backup_id) = backup_id {
15c847f1
DM
172 if backup_id != group.backup_id() { continue; }
173 }
a17a0e7a
DM
174
175 let mut result_item = json!({
1e9a94e5
DM
176 "backup-type": group.backup_type(),
177 "backup-id": group.backup_id(),
9b492eb2 178 "backup-time": info.backup_dir.backup_time().timestamp(),
184f17af 179 "files": info.files,
a17a0e7a
DM
180 });
181
182 if let Ok(index) = read_backup_index(&datastore, &info.backup_dir) {
183 let mut backup_size = 0;
184 for item in index.as_array().unwrap().iter() {
185 if let Some(item_size) = item["size"].as_u64() {
186 backup_size += item_size;
187 }
188 }
189 result_item["size"] = backup_size.into();
190 }
191
192 snapshots.push(result_item);
184f17af
DM
193 }
194
195 Ok(json!(snapshots))
196}
197
0ab08ac9
DM
198#[sortable]
199const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
200 &ApiHandler::Sync(&status),
201 &ObjectSchema::new(
202 "Get datastore status.",
203 &sorted!([
66c49c21 204 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9
DM
205 ]),
206 )
207);
208
0eecf38f
DM
209fn status(
210 param: Value,
211 _info: &ApiMethod,
212 _rpcenv: &mut dyn RpcEnvironment,
213) -> Result<Value, Error> {
214
215 let store = param["store"].as_str().unwrap();
216
217 let datastore = DataStore::lookup_datastore(store)?;
218
219 let base_path = datastore.base_path();
220
221 let mut stat: libc::statfs64 = unsafe { std::mem::zeroed() };
222
223 use nix::NixPath;
224
225 let res = base_path.with_nix_path(|cstr| unsafe { libc::statfs64(cstr.as_ptr(), &mut stat) })?;
226 nix::errno::Errno::result(res)?;
227
228 let bsize = stat.f_bsize as u64;
229 Ok(json!({
230 "total": stat.f_blocks*bsize,
231 "used": (stat.f_blocks-stat.f_bfree)*bsize,
232 "avail": stat.f_bavail*bsize,
233 }))
234}
235
255f378a
DM
236#[macro_export]
237macro_rules! add_common_prune_prameters {
552c2259
DM
238 ( [ $( $list1:tt )* ] ) => {
239 add_common_prune_prameters!([$( $list1 )* ] , [])
240 };
241 ( [ $( $list1:tt )* ] , [ $( $list2:tt )* ] ) => {
255f378a 242 [
552c2259 243 $( $list1 )*
255f378a 244 (
552c2259 245 "keep-daily",
255f378a 246 true,
552c2259 247 &IntegerSchema::new("Number of daily backups to keep.")
255f378a
DM
248 .minimum(1)
249 .schema()
250 ),
102d8d41
DM
251 (
252 "keep-hourly",
253 true,
254 &IntegerSchema::new("Number of hourly backups to keep.")
255 .minimum(1)
256 .schema()
257 ),
255f378a 258 (
552c2259 259 "keep-last",
255f378a 260 true,
552c2259 261 &IntegerSchema::new("Number of backups to keep.")
255f378a
DM
262 .minimum(1)
263 .schema()
264 ),
265 (
552c2259 266 "keep-monthly",
255f378a 267 true,
552c2259 268 &IntegerSchema::new("Number of monthly backups to keep.")
255f378a
DM
269 .minimum(1)
270 .schema()
271 ),
272 (
552c2259 273 "keep-weekly",
255f378a 274 true,
552c2259 275 &IntegerSchema::new("Number of weekly backups to keep.")
255f378a
DM
276 .minimum(1)
277 .schema()
278 ),
279 (
280 "keep-yearly",
281 true,
282 &IntegerSchema::new("Number of yearly backups to keep.")
283 .minimum(1)
284 .schema()
285 ),
552c2259 286 $( $list2 )*
255f378a
DM
287 ]
288 }
0eecf38f
DM
289}
290
0ab08ac9
DM
291const API_METHOD_PRUNE: ApiMethod = ApiMethod::new(
292 &ApiHandler::Sync(&prune),
255f378a 293 &ObjectSchema::new(
0ab08ac9
DM
294 "Prune the datastore.",
295 &add_common_prune_prameters!([
296 ("backup-id", false, &BACKUP_ID_SCHEMA),
297 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
3b03abfe
DM
298 ("dry-run", true, &BooleanSchema::new(
299 "Just show what prune would do, but do not delete anything.")
300 .schema()
301 ),
0ab08ac9 302 ],[
66c49c21 303 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9 304 ])
255f378a
DM
305 )
306);
307
83b7db02
DM
308fn prune(
309 param: Value,
310 _info: &ApiMethod,
dd5495d6 311 _rpcenv: &mut dyn RpcEnvironment,
83b7db02
DM
312) -> Result<Value, Error> {
313
314 let store = param["store"].as_str().unwrap();
315
9fdc3ef4
DM
316 let backup_type = tools::required_string_param(&param, "backup-type")?;
317 let backup_id = tools::required_string_param(&param, "backup-id")?;
318
3b03abfe
DM
319 let dry_run = param["dry-run"].as_bool().unwrap_or(false);
320
9fdc3ef4
DM
321 let group = BackupGroup::new(backup_type, backup_id);
322
83b7db02
DM
323 let datastore = DataStore::lookup_datastore(store)?;
324
9e3f0088
DM
325 let prune_options = PruneOptions {
326 keep_last: param["keep-last"].as_u64(),
102d8d41 327 keep_hourly: param["keep-hourly"].as_u64(),
9e3f0088
DM
328 keep_daily: param["keep-daily"].as_u64(),
329 keep_weekly: param["keep-weekly"].as_u64(),
330 keep_monthly: param["keep-monthly"].as_u64(),
331 keep_yearly: param["keep-yearly"].as_u64(),
332 };
8f579717 333
503995c7
DM
334 let worker_id = format!("{}_{}_{}", store, backup_type, backup_id);
335
163e9bbe 336 // We use a WorkerTask just to have a task log, but run synchrounously
503995c7 337 let worker = WorkerTask::new("prune", Some(worker_id), "root@pam", true)?;
dd8e744f 338 let result = try_block! {
9e3f0088 339 if !prune_options.keeps_something() {
9fdc3ef4 340 worker.log("No prune selection - keeping all files.");
dd8e744f
DM
341 return Ok(());
342 } else {
236a396a 343 worker.log(format!("retention options: {}", prune_options.cli_options_string()));
3b03abfe 344 if dry_run {
503995c7
DM
345 worker.log(format!("Testing prune on store \"{}\" group \"{}/{}\"",
346 store, backup_type, backup_id));
3b03abfe 347 } else {
503995c7
DM
348 worker.log(format!("Starting prune on store \"{}\" group \"{}/{}\"",
349 store, backup_type, backup_id));
3b03abfe 350 }
dd8e744f 351 }
8f579717 352
aeeac29b 353 let list = group.list_backups(&datastore.base_path())?;
8f579717 354
9b783521 355 let mut prune_info = compute_prune_info(list, &prune_options)?;
dd8e744f 356
8f0b4c1f
DM
357 prune_info.reverse(); // delete older snapshots first
358
359 for (info, keep) in prune_info {
3b03abfe
DM
360 let backup_time = info.backup_dir.backup_time();
361 let timestamp = BackupDir::backup_time_to_string(backup_time);
362 let group = info.backup_dir.group();
363
364 let msg = format!(
365 "{}/{}/{} {}",
366 group.backup_type(),
367 group.backup_id(),
368 timestamp,
369 if keep { "keep" } else { "remove" },
370 );
371
372 worker.log(msg);
373
374 if !(dry_run || keep) {
8f0b4c1f
DM
375 datastore.remove_backup_dir(&info.backup_dir)?;
376 }
8f579717 377 }
dd8e744f
DM
378
379 Ok(())
380 };
381
382 worker.log_result(&result);
383
384 if let Err(err) = result {
385 bail!("prune failed - {}", err);
8f579717 386 }
83b7db02 387
163e9bbe 388 Ok(json!(worker.to_string())) // return the UPID
83b7db02
DM
389}
390
0ab08ac9
DM
391#[sortable]
392pub const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
393 &ApiHandler::Sync(&start_garbage_collection),
255f378a 394 &ObjectSchema::new(
0ab08ac9
DM
395 "Start garbage collection.",
396 &sorted!([
66c49c21 397 ("store", false, &DATASTORE_SCHEMA),
552c2259 398 ])
83b7db02 399 )
255f378a 400);
83b7db02 401
6049b71f
DM
402fn start_garbage_collection(
403 param: Value,
404 _info: &ApiMethod,
dd5495d6 405 rpcenv: &mut dyn RpcEnvironment,
6049b71f 406) -> Result<Value, Error> {
15e9b4ed 407
3e6a7dee 408 let store = param["store"].as_str().unwrap().to_string();
15e9b4ed 409
3e6a7dee 410 let datastore = DataStore::lookup_datastore(&store)?;
15e9b4ed 411
5a778d92 412 println!("Starting garbage collection on store {}", store);
15e9b4ed 413
0f778e06 414 let to_stdout = if rpcenv.env_type() == RpcEnvironmentType::CLI { true } else { false };
15e9b4ed 415
0f778e06
DM
416 let upid_str = WorkerTask::new_thread(
417 "garbage_collection", Some(store.clone()), "root@pam", to_stdout, move |worker|
418 {
419 worker.log(format!("starting garbage collection on store {}", store));
d4b59ae0 420 datastore.garbage_collection(worker)
0f778e06
DM
421 })?;
422
423 Ok(json!(upid_str))
15e9b4ed
DM
424}
425
552c2259 426#[sortable]
0ab08ac9
DM
427pub const API_METHOD_GARBAGE_COLLECTION_STATUS: ApiMethod = ApiMethod::new(
428 &ApiHandler::Sync(&garbage_collection_status),
255f378a 429 &ObjectSchema::new(
0ab08ac9 430 "Garbage collection status.",
552c2259 431 &sorted!([
66c49c21 432 ("store", false, &DATASTORE_SCHEMA),
552c2259 433 ])
691c89a0 434 )
255f378a 435);
691c89a0 436
6049b71f
DM
437fn garbage_collection_status(
438 param: Value,
439 _info: &ApiMethod,
dd5495d6 440 _rpcenv: &mut dyn RpcEnvironment,
6049b71f 441) -> Result<Value, Error> {
691c89a0 442
5a778d92 443 let store = param["store"].as_str().unwrap();
691c89a0 444
f2b99c34
DM
445 let datastore = DataStore::lookup_datastore(&store)?;
446
5a778d92 447 println!("Garbage collection status on store {}", store);
691c89a0 448
f2b99c34 449 let status = datastore.last_gc_status();
691c89a0 450
f2b99c34 451 Ok(serde_json::to_value(&status)?)
691c89a0
DM
452}
453
691c89a0 454
6049b71f
DM
455fn get_datastore_list(
456 _param: Value,
457 _info: &ApiMethod,
dd5495d6 458 _rpcenv: &mut dyn RpcEnvironment,
6049b71f 459) -> Result<Value, Error> {
15e9b4ed
DM
460
461 let config = datastore::config()?;
462
5a778d92 463 Ok(config.convert_to_array("store"))
15e9b4ed
DM
464}
465
0ab08ac9
DM
466#[sortable]
467pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
468 &ApiHandler::AsyncHttp(&download_file),
469 &ObjectSchema::new(
470 "Download single raw file from backup snapshot.",
471 &sorted!([
66c49c21 472 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9
DM
473 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
474 ("backup-id", false, &BACKUP_ID_SCHEMA),
475 ("backup-time", false, &BACKUP_TIME_SCHEMA),
476 ("file-name", false, &StringSchema::new("Raw file name.")
477 .format(&FILENAME_FORMAT)
478 .schema()
479 ),
480 ]),
481 )
482);
691c89a0 483
9e47c0a5
DM
484fn download_file(
485 _parts: Parts,
486 _req_body: Body,
487 param: Value,
255f378a 488 _info: &ApiMethod,
9e47c0a5 489 _rpcenv: Box<dyn RpcEnvironment>,
bb084b9c 490) -> ApiResponseFuture {
9e47c0a5 491
ad51d02a
DM
492 async move {
493 let store = tools::required_string_param(&param, "store")?;
f14a8c9a 494
ad51d02a 495 let datastore = DataStore::lookup_datastore(store)?;
f14a8c9a 496
ad51d02a 497 let file_name = tools::required_string_param(&param, "file-name")?.to_owned();
9e47c0a5 498
ad51d02a
DM
499 let backup_type = tools::required_string_param(&param, "backup-type")?;
500 let backup_id = tools::required_string_param(&param, "backup-id")?;
501 let backup_time = tools::required_integer_param(&param, "backup-time")?;
9e47c0a5 502
ad51d02a
DM
503 println!("Download {} from {} ({}/{}/{}/{})", file_name, store,
504 backup_type, backup_id, Local.timestamp(backup_time, 0), file_name);
9e47c0a5 505
ad51d02a 506 let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
9e47c0a5 507
ad51d02a
DM
508 let mut path = datastore.base_path();
509 path.push(backup_dir.relative_path());
510 path.push(&file_name);
511
512 let file = tokio::fs::File::open(path)
513 .map_err(|err| http_err!(BAD_REQUEST, format!("File open failed: {}", err)))
514 .await?;
515
db0cb9ce
WB
516 let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
517 .map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze()));
ad51d02a 518 let body = Body::wrap_stream(payload);
9e47c0a5 519
ad51d02a
DM
520 // fixme: set other headers ?
521 Ok(Response::builder()
522 .status(StatusCode::OK)
523 .header(header::CONTENT_TYPE, "application/octet-stream")
524 .body(body)
525 .unwrap())
526 }.boxed()
9e47c0a5
DM
527}
528
552c2259 529#[sortable]
0ab08ac9
DM
530pub const API_METHOD_UPLOAD_BACKUP_LOG: ApiMethod = ApiMethod::new(
531 &ApiHandler::AsyncHttp(&upload_backup_log),
255f378a
DM
532 &ObjectSchema::new(
533 "Download single raw file from backup snapshot.",
552c2259 534 &sorted!([
66c49c21 535 ("store", false, &DATASTORE_SCHEMA),
255f378a 536 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
0ab08ac9 537 ("backup-id", false, &BACKUP_ID_SCHEMA),
255f378a 538 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 539 ]),
9e47c0a5 540 )
255f378a 541);
9e47c0a5 542
07ee2235
DM
543fn upload_backup_log(
544 _parts: Parts,
545 req_body: Body,
546 param: Value,
255f378a 547 _info: &ApiMethod,
07ee2235 548 _rpcenv: Box<dyn RpcEnvironment>,
bb084b9c 549) -> ApiResponseFuture {
07ee2235 550
ad51d02a
DM
551 async move {
552 let store = tools::required_string_param(&param, "store")?;
07ee2235 553
ad51d02a 554 let datastore = DataStore::lookup_datastore(store)?;
07ee2235 555
ad51d02a 556 let file_name = "client.log.blob";
07ee2235 557
ad51d02a
DM
558 let backup_type = tools::required_string_param(&param, "backup-type")?;
559 let backup_id = tools::required_string_param(&param, "backup-id")?;
560 let backup_time = tools::required_integer_param(&param, "backup-time")?;
07ee2235 561
ad51d02a 562 let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
07ee2235 563
ad51d02a
DM
564 let mut path = datastore.base_path();
565 path.push(backup_dir.relative_path());
566 path.push(&file_name);
07ee2235 567
ad51d02a
DM
568 if path.exists() {
569 bail!("backup already contains a log.");
570 }
e128d4e8 571
ad51d02a
DM
572 println!("Upload backup log to {}/{}/{}/{}/{}", store,
573 backup_type, backup_id, BackupDir::backup_time_to_string(backup_dir.backup_time()), file_name);
574
575 let data = req_body
576 .map_err(Error::from)
577 .try_fold(Vec::new(), |mut acc, chunk| {
578 acc.extend_from_slice(&*chunk);
579 future::ok::<_, Error>(acc)
580 })
581 .await?;
582
583 let blob = DataBlob::from_raw(data)?;
584 // always verify CRC at server side
585 blob.verify_crc()?;
586 let raw_data = blob.raw_data();
feaa1ad3 587 replace_file(&path, raw_data, CreateOptions::new())?;
ad51d02a
DM
588
589 // fixme: use correct formatter
590 Ok(crate::server::formatter::json_response(Ok(Value::Null)))
591 }.boxed()
07ee2235
DM
592}
593
552c2259 594#[sortable]
255f378a
DM
595const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
596 (
597 "download",
598 &Router::new()
599 .download(&API_METHOD_DOWNLOAD_FILE)
600 ),
601 (
602 "files",
603 &Router::new()
604 .get(
605 &ApiMethod::new(
606 &ApiHandler::Sync(&list_snapshot_files),
607 &ObjectSchema::new(
608 "List snapshot files.",
552c2259 609 &sorted!([
66c49c21 610 ("store", false, &DATASTORE_SCHEMA),
255f378a
DM
611 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
612 ("backup-id", false, &BACKUP_ID_SCHEMA),
613 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 614 ]),
01a13423
DM
615 )
616 )
255f378a
DM
617 )
618 ),
619 (
620 "gc",
621 &Router::new()
622 .get(&API_METHOD_GARBAGE_COLLECTION_STATUS)
623 .post(&API_METHOD_START_GARBAGE_COLLECTION)
624 ),
625 (
626 "groups",
627 &Router::new()
628 .get(
629 &ApiMethod::new(
630 &ApiHandler::Sync(&list_groups),
631 &ObjectSchema::new(
632 "List backup groups.",
66c49c21 633 &sorted!([ ("store", false, &DATASTORE_SCHEMA) ]),
6f62c924
DM
634 )
635 )
255f378a
DM
636 )
637 ),
638 (
639 "prune",
640 &Router::new()
641 .post(&API_METHOD_PRUNE)
642 ),
643 (
644 "snapshots",
645 &Router::new()
646 .get(
647 &ApiMethod::new(
648 &ApiHandler::Sync(&list_snapshots),
649 &ObjectSchema::new(
650 "List backup groups.",
552c2259 651 &sorted!([
66c49c21 652 ("store", false, &DATASTORE_SCHEMA),
255f378a
DM
653 ("backup-type", true, &BACKUP_TYPE_SCHEMA),
654 ("backup-id", true, &BACKUP_ID_SCHEMA),
552c2259 655 ]),
255f378a 656 )
6f62c924 657 )
255f378a
DM
658 )
659 .delete(
660 &ApiMethod::new(
661 &ApiHandler::Sync(&delete_snapshots),
662 &ObjectSchema::new(
663 "Delete backup snapshot.",
552c2259 664 &sorted!([
66c49c21 665 ("store", false, &DATASTORE_SCHEMA),
255f378a
DM
666 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
667 ("backup-id", false, &BACKUP_ID_SCHEMA),
668 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 669 ]),
255f378a
DM
670 )
671 )
672 )
673 ),
674 (
675 "status",
676 &Router::new()
677 .get(&API_METHOD_STATUS)
678 ),
679 (
680 "upload-backup-log",
681 &Router::new()
682 .upload(&API_METHOD_UPLOAD_BACKUP_LOG)
683 ),
684];
685
ad51d02a 686const DATASTORE_INFO_ROUTER: Router = Router::new()
255f378a
DM
687 .get(&list_subdirs_api_method!(DATASTORE_INFO_SUBDIRS))
688 .subdirs(DATASTORE_INFO_SUBDIRS);
689
690
691pub const ROUTER: Router = Router::new()
692 .get(
693 &ApiMethod::new(
694 &ApiHandler::Sync(&get_datastore_list),
695 &ObjectSchema::new("Directory index.", &[])
6f62c924 696 )
255f378a
DM
697 )
698 .match_all("store", &DATASTORE_INFO_ROUTER);