]> git.proxmox.com Git - proxmox-backup.git/blame - src/api2/admin/datastore.rs
src/api2/types.rs: define and use new api type SnapshotListItem
[proxmox-backup.git] / src / api2 / admin / datastore.rs
CommitLineData
cad540e9 1use std::collections::{HashSet, HashMap};
cad540e9 2
aeeac29b 3use chrono::{TimeZone, Local};
15e9b4ed 4use failure::*;
9e47c0a5 5use futures::*;
cad540e9
WB
6use hyper::http::request::Parts;
7use hyper::{header, Body, Response, StatusCode};
15e9b4ed
DM
8use serde_json::{json, Value};
9
552c2259 10use proxmox::{sortable, identity};
fc189b19 11use proxmox::api::api;
cad540e9 12use proxmox::api::{http_err, list_subdirs_api_method};
bb084b9c 13use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, Router, RpcEnvironment, RpcEnvironmentType};
cad540e9
WB
14use proxmox::api::router::SubdirMap;
15use proxmox::api::schema::*;
feaa1ad3
WB
16use proxmox::tools::try_block;
17use proxmox::tools::fs::{file_get_contents, replace_file, CreateOptions};
e18a6c9e 18
cad540e9 19use crate::api2::types::*;
e5064ba6 20use crate::backup::*;
cad540e9 21use crate::config::datastore;
0f778e06 22use crate::server::WorkerTask;
cad540e9 23use crate::tools;
1629d2ad 24
8c70e3eb
DM
25fn read_backup_index(store: &DataStore, backup_dir: &BackupDir) -> Result<Value, Error> {
26
27 let mut path = store.base_path();
28 path.push(backup_dir.relative_path());
29 path.push("index.json.blob");
30
31 let raw_data = file_get_contents(&path)?;
32 let data = DataBlob::from_raw(raw_data)?.decode(None)?;
4f1e40a2 33 let index_size = data.len();
8c70e3eb
DM
34 let mut result: Value = serde_json::from_reader(&mut &data[..])?;
35
4f1e40a2 36 let mut result = result["files"].take();
8c70e3eb
DM
37
38 if result == Value::Null {
39 bail!("missing 'files' property in backup index {:?}", path);
40 }
41
4f1e40a2
DM
42 result.as_array_mut().unwrap().push(json!({
43 "filename": "index.json.blob",
44 "size": index_size,
45 }));
46
8c70e3eb
DM
47 Ok(result)
48}
49
8f579717
DM
50fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
51
52 let mut group_hash = HashMap::new();
53
54 for info in backup_list {
9b492eb2 55 let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
8f579717
DM
56 let time_list = group_hash.entry(group_id).or_insert(vec![]);
57 time_list.push(info);
58 }
59
60 group_hash
61}
62
ad20d198 63fn list_groups(
812c6f87
DM
64 param: Value,
65 _info: &ApiMethod,
dd5495d6 66 _rpcenv: &mut dyn RpcEnvironment,
812c6f87
DM
67) -> Result<Value, Error> {
68
69 let store = param["store"].as_str().unwrap();
70
71 let datastore = DataStore::lookup_datastore(store)?;
72
c0977501 73 let backup_list = BackupInfo::list_backups(&datastore.base_path())?;
812c6f87
DM
74
75 let group_hash = group_backups(backup_list);
76
77 let mut groups = vec![];
78
79 for (_group_id, mut list) in group_hash {
80
2b01a225 81 BackupInfo::sort_list(&mut list, false);
812c6f87
DM
82
83 let info = &list[0];
9b492eb2 84 let group = info.backup_dir.group();
812c6f87
DM
85
86 groups.push(json!({
1e9a94e5
DM
87 "backup-type": group.backup_type(),
88 "backup-id": group.backup_id(),
9b492eb2 89 "last-backup": info.backup_dir.backup_time().timestamp(),
ad20d198
DM
90 "backup-count": list.len() as u64,
91 "files": info.files,
812c6f87
DM
92 }));
93 }
94
95 Ok(json!(groups))
96}
8f579717 97
01a13423
DM
98fn list_snapshot_files (
99 param: Value,
100 _info: &ApiMethod,
dd5495d6 101 _rpcenv: &mut dyn RpcEnvironment,
01a13423
DM
102) -> Result<Value, Error> {
103
104 let store = tools::required_string_param(&param, "store")?;
105 let backup_type = tools::required_string_param(&param, "backup-type")?;
106 let backup_id = tools::required_string_param(&param, "backup-id")?;
107 let backup_time = tools::required_integer_param(&param, "backup-time")?;
108
d7c24397 109 let datastore = DataStore::lookup_datastore(store)?;
01a13423
DM
110 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
111
d7c24397
DM
112 let mut files = read_backup_index(&datastore, &snapshot)?;
113
114 let info = BackupInfo::new(&datastore.base_path(), snapshot)?;
01a13423 115
d7c24397
DM
116 let file_set = files.as_array().unwrap().iter().fold(HashSet::new(), |mut acc, item| {
117 acc.insert(item["filename"].as_str().unwrap().to_owned());
118 acc
119 });
120
121 for file in info.files {
122 if file_set.contains(&file) { continue; }
123 files.as_array_mut().unwrap().push(json!({ "filename": file }));
124 }
01a13423 125
8c70e3eb 126 Ok(files)
01a13423
DM
127}
128
6f62c924
DM
129fn delete_snapshots (
130 param: Value,
131 _info: &ApiMethod,
dd5495d6 132 _rpcenv: &mut dyn RpcEnvironment,
6f62c924
DM
133) -> Result<Value, Error> {
134
135 let store = tools::required_string_param(&param, "store")?;
136 let backup_type = tools::required_string_param(&param, "backup-type")?;
137 let backup_id = tools::required_string_param(&param, "backup-id")?;
138 let backup_time = tools::required_integer_param(&param, "backup-time")?;
6f62c924 139
391d3107 140 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
6f62c924
DM
141
142 let datastore = DataStore::lookup_datastore(store)?;
143
144 datastore.remove_backup_dir(&snapshot)?;
145
146 Ok(Value::Null)
147}
148
fc189b19
DM
149#[api(
150 input: {
151 properties: {
152 store: {
153 schema: DATASTORE_SCHEMA,
154 },
155 "backup-type": {
156 optional: true,
157 schema: BACKUP_TYPE_SCHEMA,
158 },
159 "backup-id": {
160 optional: true,
161 schema: BACKUP_ID_SCHEMA,
162 },
163 },
164 },
165 returns: {
166 type: Array,
167 description: "Returns the list of snapshots.",
168 items: {
169 type: SnapshotListItem,
170 }
171 },
172)]
173/// List backup snapshots.
184f17af
DM
174fn list_snapshots (
175 param: Value,
176 _info: &ApiMethod,
dd5495d6 177 _rpcenv: &mut dyn RpcEnvironment,
fc189b19 178) -> Result<Vec<SnapshotListItem>, Error> {
184f17af
DM
179
180 let store = tools::required_string_param(&param, "store")?;
15c847f1
DM
181 let backup_type = param["backup-type"].as_str();
182 let backup_id = param["backup-id"].as_str();
184f17af
DM
183
184 let datastore = DataStore::lookup_datastore(store)?;
185
c0977501 186 let base_path = datastore.base_path();
184f17af 187
15c847f1 188 let backup_list = BackupInfo::list_backups(&base_path)?;
184f17af
DM
189
190 let mut snapshots = vec![];
191
c0977501 192 for info in backup_list {
15c847f1
DM
193 let group = info.backup_dir.group();
194 if let Some(backup_type) = backup_type {
195 if backup_type != group.backup_type() { continue; }
196 }
a17a0e7a 197 if let Some(backup_id) = backup_id {
15c847f1
DM
198 if backup_id != group.backup_id() { continue; }
199 }
a17a0e7a 200
fc189b19
DM
201 let mut result_item = SnapshotListItem {
202 backup_type: group.backup_type().to_string(),
203 backup_id: group.backup_id().to_string(),
204 backup_time: info.backup_dir.backup_time().timestamp(),
205 files: info.files,
206 size: None,
207 };
a17a0e7a
DM
208
209 if let Ok(index) = read_backup_index(&datastore, &info.backup_dir) {
210 let mut backup_size = 0;
211 for item in index.as_array().unwrap().iter() {
212 if let Some(item_size) = item["size"].as_u64() {
213 backup_size += item_size;
214 }
215 }
fc189b19 216 result_item.size = Some(backup_size);
a17a0e7a
DM
217 }
218
219 snapshots.push(result_item);
184f17af
DM
220 }
221
fc189b19 222 Ok(snapshots)
184f17af
DM
223}
224
0ab08ac9
DM
225#[sortable]
226const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
227 &ApiHandler::Sync(&status),
228 &ObjectSchema::new(
229 "Get datastore status.",
230 &sorted!([
66c49c21 231 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9
DM
232 ]),
233 )
234);
235
0eecf38f
DM
236fn status(
237 param: Value,
238 _info: &ApiMethod,
239 _rpcenv: &mut dyn RpcEnvironment,
240) -> Result<Value, Error> {
241
242 let store = param["store"].as_str().unwrap();
243
244 let datastore = DataStore::lookup_datastore(store)?;
245
246 let base_path = datastore.base_path();
247
248 let mut stat: libc::statfs64 = unsafe { std::mem::zeroed() };
249
250 use nix::NixPath;
251
252 let res = base_path.with_nix_path(|cstr| unsafe { libc::statfs64(cstr.as_ptr(), &mut stat) })?;
253 nix::errno::Errno::result(res)?;
254
255 let bsize = stat.f_bsize as u64;
256 Ok(json!({
257 "total": stat.f_blocks*bsize,
258 "used": (stat.f_blocks-stat.f_bfree)*bsize,
259 "avail": stat.f_bavail*bsize,
260 }))
261}
262
255f378a
DM
263#[macro_export]
264macro_rules! add_common_prune_prameters {
552c2259
DM
265 ( [ $( $list1:tt )* ] ) => {
266 add_common_prune_prameters!([$( $list1 )* ] , [])
267 };
268 ( [ $( $list1:tt )* ] , [ $( $list2:tt )* ] ) => {
255f378a 269 [
552c2259 270 $( $list1 )*
255f378a 271 (
552c2259 272 "keep-daily",
255f378a 273 true,
552c2259 274 &IntegerSchema::new("Number of daily backups to keep.")
255f378a
DM
275 .minimum(1)
276 .schema()
277 ),
102d8d41
DM
278 (
279 "keep-hourly",
280 true,
281 &IntegerSchema::new("Number of hourly backups to keep.")
282 .minimum(1)
283 .schema()
284 ),
255f378a 285 (
552c2259 286 "keep-last",
255f378a 287 true,
552c2259 288 &IntegerSchema::new("Number of backups to keep.")
255f378a
DM
289 .minimum(1)
290 .schema()
291 ),
292 (
552c2259 293 "keep-monthly",
255f378a 294 true,
552c2259 295 &IntegerSchema::new("Number of monthly backups to keep.")
255f378a
DM
296 .minimum(1)
297 .schema()
298 ),
299 (
552c2259 300 "keep-weekly",
255f378a 301 true,
552c2259 302 &IntegerSchema::new("Number of weekly backups to keep.")
255f378a
DM
303 .minimum(1)
304 .schema()
305 ),
306 (
307 "keep-yearly",
308 true,
309 &IntegerSchema::new("Number of yearly backups to keep.")
310 .minimum(1)
311 .schema()
312 ),
552c2259 313 $( $list2 )*
255f378a
DM
314 ]
315 }
0eecf38f
DM
316}
317
0ab08ac9
DM
318const API_METHOD_PRUNE: ApiMethod = ApiMethod::new(
319 &ApiHandler::Sync(&prune),
255f378a 320 &ObjectSchema::new(
0ab08ac9
DM
321 "Prune the datastore.",
322 &add_common_prune_prameters!([
323 ("backup-id", false, &BACKUP_ID_SCHEMA),
324 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
3b03abfe
DM
325 ("dry-run", true, &BooleanSchema::new(
326 "Just show what prune would do, but do not delete anything.")
327 .schema()
328 ),
0ab08ac9 329 ],[
66c49c21 330 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9 331 ])
255f378a
DM
332 )
333);
334
83b7db02
DM
335fn prune(
336 param: Value,
337 _info: &ApiMethod,
dd5495d6 338 _rpcenv: &mut dyn RpcEnvironment,
83b7db02
DM
339) -> Result<Value, Error> {
340
341 let store = param["store"].as_str().unwrap();
342
9fdc3ef4
DM
343 let backup_type = tools::required_string_param(&param, "backup-type")?;
344 let backup_id = tools::required_string_param(&param, "backup-id")?;
345
3b03abfe
DM
346 let dry_run = param["dry-run"].as_bool().unwrap_or(false);
347
9fdc3ef4
DM
348 let group = BackupGroup::new(backup_type, backup_id);
349
83b7db02
DM
350 let datastore = DataStore::lookup_datastore(store)?;
351
9e3f0088
DM
352 let prune_options = PruneOptions {
353 keep_last: param["keep-last"].as_u64(),
102d8d41 354 keep_hourly: param["keep-hourly"].as_u64(),
9e3f0088
DM
355 keep_daily: param["keep-daily"].as_u64(),
356 keep_weekly: param["keep-weekly"].as_u64(),
357 keep_monthly: param["keep-monthly"].as_u64(),
358 keep_yearly: param["keep-yearly"].as_u64(),
359 };
8f579717 360
503995c7
DM
361 let worker_id = format!("{}_{}_{}", store, backup_type, backup_id);
362
163e9bbe 363 // We use a WorkerTask just to have a task log, but run synchrounously
503995c7 364 let worker = WorkerTask::new("prune", Some(worker_id), "root@pam", true)?;
dd8e744f 365 let result = try_block! {
9e3f0088 366 if !prune_options.keeps_something() {
9fdc3ef4 367 worker.log("No prune selection - keeping all files.");
dd8e744f
DM
368 return Ok(());
369 } else {
236a396a 370 worker.log(format!("retention options: {}", prune_options.cli_options_string()));
3b03abfe 371 if dry_run {
503995c7
DM
372 worker.log(format!("Testing prune on store \"{}\" group \"{}/{}\"",
373 store, backup_type, backup_id));
3b03abfe 374 } else {
503995c7
DM
375 worker.log(format!("Starting prune on store \"{}\" group \"{}/{}\"",
376 store, backup_type, backup_id));
3b03abfe 377 }
dd8e744f 378 }
8f579717 379
aeeac29b 380 let list = group.list_backups(&datastore.base_path())?;
8f579717 381
9b783521 382 let mut prune_info = compute_prune_info(list, &prune_options)?;
dd8e744f 383
8f0b4c1f
DM
384 prune_info.reverse(); // delete older snapshots first
385
386 for (info, keep) in prune_info {
3b03abfe
DM
387 let backup_time = info.backup_dir.backup_time();
388 let timestamp = BackupDir::backup_time_to_string(backup_time);
389 let group = info.backup_dir.group();
390
391 let msg = format!(
392 "{}/{}/{} {}",
393 group.backup_type(),
394 group.backup_id(),
395 timestamp,
396 if keep { "keep" } else { "remove" },
397 );
398
399 worker.log(msg);
400
401 if !(dry_run || keep) {
8f0b4c1f
DM
402 datastore.remove_backup_dir(&info.backup_dir)?;
403 }
8f579717 404 }
dd8e744f
DM
405
406 Ok(())
407 };
408
409 worker.log_result(&result);
410
411 if let Err(err) = result {
412 bail!("prune failed - {}", err);
8f579717 413 }
83b7db02 414
163e9bbe 415 Ok(json!(worker.to_string())) // return the UPID
83b7db02
DM
416}
417
0ab08ac9
DM
418#[sortable]
419pub const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
420 &ApiHandler::Sync(&start_garbage_collection),
255f378a 421 &ObjectSchema::new(
0ab08ac9
DM
422 "Start garbage collection.",
423 &sorted!([
66c49c21 424 ("store", false, &DATASTORE_SCHEMA),
552c2259 425 ])
83b7db02 426 )
255f378a 427);
83b7db02 428
6049b71f
DM
429fn start_garbage_collection(
430 param: Value,
431 _info: &ApiMethod,
dd5495d6 432 rpcenv: &mut dyn RpcEnvironment,
6049b71f 433) -> Result<Value, Error> {
15e9b4ed 434
3e6a7dee 435 let store = param["store"].as_str().unwrap().to_string();
15e9b4ed 436
3e6a7dee 437 let datastore = DataStore::lookup_datastore(&store)?;
15e9b4ed 438
5a778d92 439 println!("Starting garbage collection on store {}", store);
15e9b4ed 440
0f778e06 441 let to_stdout = if rpcenv.env_type() == RpcEnvironmentType::CLI { true } else { false };
15e9b4ed 442
0f778e06
DM
443 let upid_str = WorkerTask::new_thread(
444 "garbage_collection", Some(store.clone()), "root@pam", to_stdout, move |worker|
445 {
446 worker.log(format!("starting garbage collection on store {}", store));
d4b59ae0 447 datastore.garbage_collection(worker)
0f778e06
DM
448 })?;
449
450 Ok(json!(upid_str))
15e9b4ed
DM
451}
452
552c2259 453#[sortable]
0ab08ac9
DM
454pub const API_METHOD_GARBAGE_COLLECTION_STATUS: ApiMethod = ApiMethod::new(
455 &ApiHandler::Sync(&garbage_collection_status),
255f378a 456 &ObjectSchema::new(
0ab08ac9 457 "Garbage collection status.",
552c2259 458 &sorted!([
66c49c21 459 ("store", false, &DATASTORE_SCHEMA),
552c2259 460 ])
691c89a0 461 )
255f378a 462);
691c89a0 463
6049b71f
DM
464fn garbage_collection_status(
465 param: Value,
466 _info: &ApiMethod,
dd5495d6 467 _rpcenv: &mut dyn RpcEnvironment,
6049b71f 468) -> Result<Value, Error> {
691c89a0 469
5a778d92 470 let store = param["store"].as_str().unwrap();
691c89a0 471
f2b99c34
DM
472 let datastore = DataStore::lookup_datastore(&store)?;
473
5a778d92 474 println!("Garbage collection status on store {}", store);
691c89a0 475
f2b99c34 476 let status = datastore.last_gc_status();
691c89a0 477
f2b99c34 478 Ok(serde_json::to_value(&status)?)
691c89a0
DM
479}
480
691c89a0 481
6049b71f
DM
482fn get_datastore_list(
483 _param: Value,
484 _info: &ApiMethod,
dd5495d6 485 _rpcenv: &mut dyn RpcEnvironment,
6049b71f 486) -> Result<Value, Error> {
15e9b4ed
DM
487
488 let config = datastore::config()?;
489
5a778d92 490 Ok(config.convert_to_array("store"))
15e9b4ed
DM
491}
492
0ab08ac9
DM
493#[sortable]
494pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
495 &ApiHandler::AsyncHttp(&download_file),
496 &ObjectSchema::new(
497 "Download single raw file from backup snapshot.",
498 &sorted!([
66c49c21 499 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9
DM
500 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
501 ("backup-id", false, &BACKUP_ID_SCHEMA),
502 ("backup-time", false, &BACKUP_TIME_SCHEMA),
503 ("file-name", false, &StringSchema::new("Raw file name.")
504 .format(&FILENAME_FORMAT)
505 .schema()
506 ),
507 ]),
508 )
509);
691c89a0 510
9e47c0a5
DM
511fn download_file(
512 _parts: Parts,
513 _req_body: Body,
514 param: Value,
255f378a 515 _info: &ApiMethod,
9e47c0a5 516 _rpcenv: Box<dyn RpcEnvironment>,
bb084b9c 517) -> ApiResponseFuture {
9e47c0a5 518
ad51d02a
DM
519 async move {
520 let store = tools::required_string_param(&param, "store")?;
f14a8c9a 521
ad51d02a 522 let datastore = DataStore::lookup_datastore(store)?;
f14a8c9a 523
ad51d02a 524 let file_name = tools::required_string_param(&param, "file-name")?.to_owned();
9e47c0a5 525
ad51d02a
DM
526 let backup_type = tools::required_string_param(&param, "backup-type")?;
527 let backup_id = tools::required_string_param(&param, "backup-id")?;
528 let backup_time = tools::required_integer_param(&param, "backup-time")?;
9e47c0a5 529
ad51d02a
DM
530 println!("Download {} from {} ({}/{}/{}/{})", file_name, store,
531 backup_type, backup_id, Local.timestamp(backup_time, 0), file_name);
9e47c0a5 532
ad51d02a 533 let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
9e47c0a5 534
ad51d02a
DM
535 let mut path = datastore.base_path();
536 path.push(backup_dir.relative_path());
537 path.push(&file_name);
538
539 let file = tokio::fs::File::open(path)
540 .map_err(|err| http_err!(BAD_REQUEST, format!("File open failed: {}", err)))
541 .await?;
542
db0cb9ce
WB
543 let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
544 .map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze()));
ad51d02a 545 let body = Body::wrap_stream(payload);
9e47c0a5 546
ad51d02a
DM
547 // fixme: set other headers ?
548 Ok(Response::builder()
549 .status(StatusCode::OK)
550 .header(header::CONTENT_TYPE, "application/octet-stream")
551 .body(body)
552 .unwrap())
553 }.boxed()
9e47c0a5
DM
554}
555
552c2259 556#[sortable]
0ab08ac9
DM
557pub const API_METHOD_UPLOAD_BACKUP_LOG: ApiMethod = ApiMethod::new(
558 &ApiHandler::AsyncHttp(&upload_backup_log),
255f378a
DM
559 &ObjectSchema::new(
560 "Download single raw file from backup snapshot.",
552c2259 561 &sorted!([
66c49c21 562 ("store", false, &DATASTORE_SCHEMA),
255f378a 563 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
0ab08ac9 564 ("backup-id", false, &BACKUP_ID_SCHEMA),
255f378a 565 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 566 ]),
9e47c0a5 567 )
255f378a 568);
9e47c0a5 569
07ee2235
DM
570fn upload_backup_log(
571 _parts: Parts,
572 req_body: Body,
573 param: Value,
255f378a 574 _info: &ApiMethod,
07ee2235 575 _rpcenv: Box<dyn RpcEnvironment>,
bb084b9c 576) -> ApiResponseFuture {
07ee2235 577
ad51d02a
DM
578 async move {
579 let store = tools::required_string_param(&param, "store")?;
07ee2235 580
ad51d02a 581 let datastore = DataStore::lookup_datastore(store)?;
07ee2235 582
ad51d02a 583 let file_name = "client.log.blob";
07ee2235 584
ad51d02a
DM
585 let backup_type = tools::required_string_param(&param, "backup-type")?;
586 let backup_id = tools::required_string_param(&param, "backup-id")?;
587 let backup_time = tools::required_integer_param(&param, "backup-time")?;
07ee2235 588
ad51d02a 589 let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
07ee2235 590
ad51d02a
DM
591 let mut path = datastore.base_path();
592 path.push(backup_dir.relative_path());
593 path.push(&file_name);
07ee2235 594
ad51d02a
DM
595 if path.exists() {
596 bail!("backup already contains a log.");
597 }
e128d4e8 598
ad51d02a
DM
599 println!("Upload backup log to {}/{}/{}/{}/{}", store,
600 backup_type, backup_id, BackupDir::backup_time_to_string(backup_dir.backup_time()), file_name);
601
602 let data = req_body
603 .map_err(Error::from)
604 .try_fold(Vec::new(), |mut acc, chunk| {
605 acc.extend_from_slice(&*chunk);
606 future::ok::<_, Error>(acc)
607 })
608 .await?;
609
610 let blob = DataBlob::from_raw(data)?;
611 // always verify CRC at server side
612 blob.verify_crc()?;
613 let raw_data = blob.raw_data();
feaa1ad3 614 replace_file(&path, raw_data, CreateOptions::new())?;
ad51d02a
DM
615
616 // fixme: use correct formatter
617 Ok(crate::server::formatter::json_response(Ok(Value::Null)))
618 }.boxed()
07ee2235
DM
619}
620
552c2259 621#[sortable]
255f378a
DM
622const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
623 (
624 "download",
625 &Router::new()
626 .download(&API_METHOD_DOWNLOAD_FILE)
627 ),
628 (
629 "files",
630 &Router::new()
631 .get(
632 &ApiMethod::new(
633 &ApiHandler::Sync(&list_snapshot_files),
634 &ObjectSchema::new(
635 "List snapshot files.",
552c2259 636 &sorted!([
66c49c21 637 ("store", false, &DATASTORE_SCHEMA),
255f378a
DM
638 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
639 ("backup-id", false, &BACKUP_ID_SCHEMA),
640 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 641 ]),
01a13423
DM
642 )
643 )
255f378a
DM
644 )
645 ),
646 (
647 "gc",
648 &Router::new()
649 .get(&API_METHOD_GARBAGE_COLLECTION_STATUS)
650 .post(&API_METHOD_START_GARBAGE_COLLECTION)
651 ),
652 (
653 "groups",
654 &Router::new()
655 .get(
656 &ApiMethod::new(
657 &ApiHandler::Sync(&list_groups),
658 &ObjectSchema::new(
659 "List backup groups.",
66c49c21 660 &sorted!([ ("store", false, &DATASTORE_SCHEMA) ]),
6f62c924
DM
661 )
662 )
255f378a
DM
663 )
664 ),
665 (
666 "prune",
667 &Router::new()
668 .post(&API_METHOD_PRUNE)
669 ),
670 (
671 "snapshots",
672 &Router::new()
fc189b19 673 .get(&API_METHOD_LIST_SNAPSHOTS)
255f378a
DM
674 .delete(
675 &ApiMethod::new(
676 &ApiHandler::Sync(&delete_snapshots),
677 &ObjectSchema::new(
678 "Delete backup snapshot.",
552c2259 679 &sorted!([
66c49c21 680 ("store", false, &DATASTORE_SCHEMA),
255f378a
DM
681 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
682 ("backup-id", false, &BACKUP_ID_SCHEMA),
683 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 684 ]),
255f378a
DM
685 )
686 )
687 )
688 ),
689 (
690 "status",
691 &Router::new()
692 .get(&API_METHOD_STATUS)
693 ),
694 (
695 "upload-backup-log",
696 &Router::new()
697 .upload(&API_METHOD_UPLOAD_BACKUP_LOG)
698 ),
699];
700
ad51d02a 701const DATASTORE_INFO_ROUTER: Router = Router::new()
255f378a
DM
702 .get(&list_subdirs_api_method!(DATASTORE_INFO_SUBDIRS))
703 .subdirs(DATASTORE_INFO_SUBDIRS);
704
705
706pub const ROUTER: Router = Router::new()
707 .get(
708 &ApiMethod::new(
709 &ApiHandler::Sync(&get_datastore_list),
710 &ObjectSchema::new("Directory index.", &[])
6f62c924 711 )
255f378a
DM
712 )
713 .match_all("store", &DATASTORE_INFO_ROUTER);