]> git.proxmox.com Git - proxmox-backup.git/blame - src/api2/admin/datastore.rs
src/backup/manifest.rs: check if manifest contains files
[proxmox-backup.git] / src / api2 / admin / datastore.rs
CommitLineData
cad540e9 1use std::collections::{HashSet, HashMap};
cad540e9 2
aeeac29b 3use chrono::{TimeZone, Local};
15e9b4ed 4use failure::*;
9e47c0a5 5use futures::*;
cad540e9
WB
6use hyper::http::request::Parts;
7use hyper::{header, Body, Response, StatusCode};
15e9b4ed
DM
8use serde_json::{json, Value};
9
fc189b19 10use proxmox::api::api;
bb084b9c 11use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, Router, RpcEnvironment, RpcEnvironmentType};
cad540e9
WB
12use proxmox::api::router::SubdirMap;
13use proxmox::api::schema::*;
feaa1ad3 14use proxmox::tools::fs::{file_get_contents, replace_file, CreateOptions};
9ea4bce4
WB
15use proxmox::try_block;
16use proxmox::{http_err, identity, list_subdirs_api_method, sortable};
e18a6c9e 17
cad540e9 18use crate::api2::types::*;
e5064ba6 19use crate::backup::*;
cad540e9 20use crate::config::datastore;
0f778e06 21use crate::server::WorkerTask;
cad540e9 22use crate::tools;
1629d2ad 23
8c70e3eb
DM
24fn read_backup_index(store: &DataStore, backup_dir: &BackupDir) -> Result<Value, Error> {
25
26 let mut path = store.base_path();
27 path.push(backup_dir.relative_path());
28 path.push("index.json.blob");
29
30 let raw_data = file_get_contents(&path)?;
31 let data = DataBlob::from_raw(raw_data)?.decode(None)?;
4f1e40a2 32 let index_size = data.len();
8c70e3eb
DM
33 let mut result: Value = serde_json::from_reader(&mut &data[..])?;
34
4f1e40a2 35 let mut result = result["files"].take();
8c70e3eb
DM
36
37 if result == Value::Null {
38 bail!("missing 'files' property in backup index {:?}", path);
39 }
40
4f1e40a2
DM
41 result.as_array_mut().unwrap().push(json!({
42 "filename": "index.json.blob",
43 "size": index_size,
44 }));
45
8c70e3eb
DM
46 Ok(result)
47}
48
8f579717
DM
49fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
50
51 let mut group_hash = HashMap::new();
52
53 for info in backup_list {
9b492eb2 54 let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
8f579717
DM
55 let time_list = group_hash.entry(group_id).or_insert(vec![]);
56 time_list.push(info);
57 }
58
59 group_hash
60}
61
b31c8019
DM
62#[api(
63 input: {
64 properties: {
65 store: {
66 schema: DATASTORE_SCHEMA,
67 },
68 },
69 },
70 returns: {
71 type: Array,
72 description: "Returns the list of backup groups.",
73 items: {
74 type: GroupListItem,
75 }
76 },
77)]
78/// List backup groups.
ad20d198 79fn list_groups(
b31c8019
DM
80 store: String,
81) -> Result<Vec<GroupListItem>, Error> {
812c6f87 82
b31c8019 83 let datastore = DataStore::lookup_datastore(&store)?;
812c6f87 84
c0977501 85 let backup_list = BackupInfo::list_backups(&datastore.base_path())?;
812c6f87
DM
86
87 let group_hash = group_backups(backup_list);
88
b31c8019 89 let mut groups = Vec::new();
812c6f87
DM
90
91 for (_group_id, mut list) in group_hash {
92
2b01a225 93 BackupInfo::sort_list(&mut list, false);
812c6f87
DM
94
95 let info = &list[0];
9b492eb2 96 let group = info.backup_dir.group();
812c6f87 97
b31c8019
DM
98 let result_item = GroupListItem {
99 backup_type: group.backup_type().to_string(),
100 backup_id: group.backup_id().to_string(),
101 last_backup: info.backup_dir.backup_time().timestamp(),
102 backup_count: list.len() as u64,
103 files: info.files.clone(),
104 };
105 groups.push(result_item);
812c6f87
DM
106 }
107
b31c8019 108 Ok(groups)
812c6f87 109}
8f579717 110
01a13423
DM
111fn list_snapshot_files (
112 param: Value,
113 _info: &ApiMethod,
dd5495d6 114 _rpcenv: &mut dyn RpcEnvironment,
01a13423
DM
115) -> Result<Value, Error> {
116
117 let store = tools::required_string_param(&param, "store")?;
118 let backup_type = tools::required_string_param(&param, "backup-type")?;
119 let backup_id = tools::required_string_param(&param, "backup-id")?;
120 let backup_time = tools::required_integer_param(&param, "backup-time")?;
121
d7c24397 122 let datastore = DataStore::lookup_datastore(store)?;
01a13423
DM
123 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
124
d7c24397
DM
125 let mut files = read_backup_index(&datastore, &snapshot)?;
126
127 let info = BackupInfo::new(&datastore.base_path(), snapshot)?;
01a13423 128
d7c24397
DM
129 let file_set = files.as_array().unwrap().iter().fold(HashSet::new(), |mut acc, item| {
130 acc.insert(item["filename"].as_str().unwrap().to_owned());
131 acc
132 });
133
134 for file in info.files {
135 if file_set.contains(&file) { continue; }
136 files.as_array_mut().unwrap().push(json!({ "filename": file }));
137 }
01a13423 138
8c70e3eb 139 Ok(files)
01a13423
DM
140}
141
68a6a0ee
DM
142#[api(
143 input: {
144 properties: {
145 store: {
146 schema: DATASTORE_SCHEMA,
147 },
148 "backup-type": {
149 schema: BACKUP_TYPE_SCHEMA,
150 },
151 "backup-id": {
152 schema: BACKUP_ID_SCHEMA,
153 },
154 "backup-time": {
155 schema: BACKUP_TIME_SCHEMA,
156 },
157 },
158 },
159)]
160/// Delete backup snapshot.
161fn delete_snapshot(
162 store: String,
163 backup_type: String,
164 backup_id: String,
165 backup_time: i64,
6f62c924 166 _info: &ApiMethod,
dd5495d6 167 _rpcenv: &mut dyn RpcEnvironment,
6f62c924
DM
168) -> Result<Value, Error> {
169
391d3107 170 let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
6f62c924 171
68a6a0ee 172 let datastore = DataStore::lookup_datastore(&store)?;
6f62c924
DM
173
174 datastore.remove_backup_dir(&snapshot)?;
175
176 Ok(Value::Null)
177}
178
fc189b19
DM
179#[api(
180 input: {
181 properties: {
182 store: {
183 schema: DATASTORE_SCHEMA,
184 },
185 "backup-type": {
186 optional: true,
187 schema: BACKUP_TYPE_SCHEMA,
188 },
189 "backup-id": {
190 optional: true,
191 schema: BACKUP_ID_SCHEMA,
192 },
193 },
194 },
195 returns: {
196 type: Array,
197 description: "Returns the list of snapshots.",
198 items: {
199 type: SnapshotListItem,
200 }
201 },
202)]
203/// List backup snapshots.
184f17af
DM
204fn list_snapshots (
205 param: Value,
206 _info: &ApiMethod,
dd5495d6 207 _rpcenv: &mut dyn RpcEnvironment,
fc189b19 208) -> Result<Vec<SnapshotListItem>, Error> {
184f17af
DM
209
210 let store = tools::required_string_param(&param, "store")?;
15c847f1
DM
211 let backup_type = param["backup-type"].as_str();
212 let backup_id = param["backup-id"].as_str();
184f17af
DM
213
214 let datastore = DataStore::lookup_datastore(store)?;
215
c0977501 216 let base_path = datastore.base_path();
184f17af 217
15c847f1 218 let backup_list = BackupInfo::list_backups(&base_path)?;
184f17af
DM
219
220 let mut snapshots = vec![];
221
c0977501 222 for info in backup_list {
15c847f1
DM
223 let group = info.backup_dir.group();
224 if let Some(backup_type) = backup_type {
225 if backup_type != group.backup_type() { continue; }
226 }
a17a0e7a 227 if let Some(backup_id) = backup_id {
15c847f1
DM
228 if backup_id != group.backup_id() { continue; }
229 }
a17a0e7a 230
fc189b19
DM
231 let mut result_item = SnapshotListItem {
232 backup_type: group.backup_type().to_string(),
233 backup_id: group.backup_id().to_string(),
234 backup_time: info.backup_dir.backup_time().timestamp(),
235 files: info.files,
236 size: None,
237 };
a17a0e7a
DM
238
239 if let Ok(index) = read_backup_index(&datastore, &info.backup_dir) {
240 let mut backup_size = 0;
241 for item in index.as_array().unwrap().iter() {
242 if let Some(item_size) = item["size"].as_u64() {
243 backup_size += item_size;
244 }
245 }
fc189b19 246 result_item.size = Some(backup_size);
a17a0e7a
DM
247 }
248
249 snapshots.push(result_item);
184f17af
DM
250 }
251
fc189b19 252 Ok(snapshots)
184f17af
DM
253}
254
0ab08ac9
DM
255#[sortable]
256const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
257 &ApiHandler::Sync(&status),
258 &ObjectSchema::new(
259 "Get datastore status.",
260 &sorted!([
66c49c21 261 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9
DM
262 ]),
263 )
264);
265
0eecf38f
DM
266fn status(
267 param: Value,
268 _info: &ApiMethod,
269 _rpcenv: &mut dyn RpcEnvironment,
270) -> Result<Value, Error> {
271
272 let store = param["store"].as_str().unwrap();
273
274 let datastore = DataStore::lookup_datastore(store)?;
275
276 let base_path = datastore.base_path();
277
278 let mut stat: libc::statfs64 = unsafe { std::mem::zeroed() };
279
280 use nix::NixPath;
281
282 let res = base_path.with_nix_path(|cstr| unsafe { libc::statfs64(cstr.as_ptr(), &mut stat) })?;
283 nix::errno::Errno::result(res)?;
284
285 let bsize = stat.f_bsize as u64;
286 Ok(json!({
287 "total": stat.f_blocks*bsize,
288 "used": (stat.f_blocks-stat.f_bfree)*bsize,
289 "avail": stat.f_bavail*bsize,
290 }))
291}
292
255f378a
DM
293#[macro_export]
294macro_rules! add_common_prune_prameters {
552c2259
DM
295 ( [ $( $list1:tt )* ] ) => {
296 add_common_prune_prameters!([$( $list1 )* ] , [])
297 };
298 ( [ $( $list1:tt )* ] , [ $( $list2:tt )* ] ) => {
255f378a 299 [
552c2259 300 $( $list1 )*
255f378a 301 (
552c2259 302 "keep-daily",
255f378a 303 true,
552c2259 304 &IntegerSchema::new("Number of daily backups to keep.")
255f378a
DM
305 .minimum(1)
306 .schema()
307 ),
102d8d41
DM
308 (
309 "keep-hourly",
310 true,
311 &IntegerSchema::new("Number of hourly backups to keep.")
312 .minimum(1)
313 .schema()
314 ),
255f378a 315 (
552c2259 316 "keep-last",
255f378a 317 true,
552c2259 318 &IntegerSchema::new("Number of backups to keep.")
255f378a
DM
319 .minimum(1)
320 .schema()
321 ),
322 (
552c2259 323 "keep-monthly",
255f378a 324 true,
552c2259 325 &IntegerSchema::new("Number of monthly backups to keep.")
255f378a
DM
326 .minimum(1)
327 .schema()
328 ),
329 (
552c2259 330 "keep-weekly",
255f378a 331 true,
552c2259 332 &IntegerSchema::new("Number of weekly backups to keep.")
255f378a
DM
333 .minimum(1)
334 .schema()
335 ),
336 (
337 "keep-yearly",
338 true,
339 &IntegerSchema::new("Number of yearly backups to keep.")
340 .minimum(1)
341 .schema()
342 ),
552c2259 343 $( $list2 )*
255f378a
DM
344 ]
345 }
0eecf38f
DM
346}
347
0ab08ac9
DM
348const API_METHOD_PRUNE: ApiMethod = ApiMethod::new(
349 &ApiHandler::Sync(&prune),
255f378a 350 &ObjectSchema::new(
0ab08ac9
DM
351 "Prune the datastore.",
352 &add_common_prune_prameters!([
353 ("backup-id", false, &BACKUP_ID_SCHEMA),
354 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
3b03abfe
DM
355 ("dry-run", true, &BooleanSchema::new(
356 "Just show what prune would do, but do not delete anything.")
357 .schema()
358 ),
0ab08ac9 359 ],[
66c49c21 360 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9 361 ])
255f378a
DM
362 )
363);
364
83b7db02
DM
365fn prune(
366 param: Value,
367 _info: &ApiMethod,
dd5495d6 368 _rpcenv: &mut dyn RpcEnvironment,
83b7db02
DM
369) -> Result<Value, Error> {
370
371 let store = param["store"].as_str().unwrap();
372
9fdc3ef4
DM
373 let backup_type = tools::required_string_param(&param, "backup-type")?;
374 let backup_id = tools::required_string_param(&param, "backup-id")?;
375
3b03abfe
DM
376 let dry_run = param["dry-run"].as_bool().unwrap_or(false);
377
9fdc3ef4
DM
378 let group = BackupGroup::new(backup_type, backup_id);
379
83b7db02
DM
380 let datastore = DataStore::lookup_datastore(store)?;
381
9e3f0088
DM
382 let prune_options = PruneOptions {
383 keep_last: param["keep-last"].as_u64(),
102d8d41 384 keep_hourly: param["keep-hourly"].as_u64(),
9e3f0088
DM
385 keep_daily: param["keep-daily"].as_u64(),
386 keep_weekly: param["keep-weekly"].as_u64(),
387 keep_monthly: param["keep-monthly"].as_u64(),
388 keep_yearly: param["keep-yearly"].as_u64(),
389 };
8f579717 390
503995c7
DM
391 let worker_id = format!("{}_{}_{}", store, backup_type, backup_id);
392
163e9bbe 393 // We use a WorkerTask just to have a task log, but run synchrounously
503995c7 394 let worker = WorkerTask::new("prune", Some(worker_id), "root@pam", true)?;
dd8e744f 395 let result = try_block! {
9e3f0088 396 if !prune_options.keeps_something() {
9fdc3ef4 397 worker.log("No prune selection - keeping all files.");
dd8e744f
DM
398 return Ok(());
399 } else {
236a396a 400 worker.log(format!("retention options: {}", prune_options.cli_options_string()));
3b03abfe 401 if dry_run {
503995c7
DM
402 worker.log(format!("Testing prune on store \"{}\" group \"{}/{}\"",
403 store, backup_type, backup_id));
3b03abfe 404 } else {
503995c7
DM
405 worker.log(format!("Starting prune on store \"{}\" group \"{}/{}\"",
406 store, backup_type, backup_id));
3b03abfe 407 }
dd8e744f 408 }
8f579717 409
aeeac29b 410 let list = group.list_backups(&datastore.base_path())?;
8f579717 411
9b783521 412 let mut prune_info = compute_prune_info(list, &prune_options)?;
dd8e744f 413
8f0b4c1f
DM
414 prune_info.reverse(); // delete older snapshots first
415
416 for (info, keep) in prune_info {
3b03abfe
DM
417 let backup_time = info.backup_dir.backup_time();
418 let timestamp = BackupDir::backup_time_to_string(backup_time);
419 let group = info.backup_dir.group();
420
421 let msg = format!(
422 "{}/{}/{} {}",
423 group.backup_type(),
424 group.backup_id(),
425 timestamp,
426 if keep { "keep" } else { "remove" },
427 );
428
429 worker.log(msg);
430
431 if !(dry_run || keep) {
8f0b4c1f
DM
432 datastore.remove_backup_dir(&info.backup_dir)?;
433 }
8f579717 434 }
dd8e744f
DM
435
436 Ok(())
437 };
438
439 worker.log_result(&result);
440
441 if let Err(err) = result {
442 bail!("prune failed - {}", err);
8f579717 443 }
83b7db02 444
163e9bbe 445 Ok(json!(worker.to_string())) // return the UPID
83b7db02
DM
446}
447
0ab08ac9
DM
448#[sortable]
449pub const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
450 &ApiHandler::Sync(&start_garbage_collection),
255f378a 451 &ObjectSchema::new(
0ab08ac9
DM
452 "Start garbage collection.",
453 &sorted!([
66c49c21 454 ("store", false, &DATASTORE_SCHEMA),
552c2259 455 ])
83b7db02 456 )
255f378a 457);
83b7db02 458
6049b71f
DM
459fn start_garbage_collection(
460 param: Value,
461 _info: &ApiMethod,
dd5495d6 462 rpcenv: &mut dyn RpcEnvironment,
6049b71f 463) -> Result<Value, Error> {
15e9b4ed 464
3e6a7dee 465 let store = param["store"].as_str().unwrap().to_string();
15e9b4ed 466
3e6a7dee 467 let datastore = DataStore::lookup_datastore(&store)?;
15e9b4ed 468
5a778d92 469 println!("Starting garbage collection on store {}", store);
15e9b4ed 470
0f778e06 471 let to_stdout = if rpcenv.env_type() == RpcEnvironmentType::CLI { true } else { false };
15e9b4ed 472
0f778e06
DM
473 let upid_str = WorkerTask::new_thread(
474 "garbage_collection", Some(store.clone()), "root@pam", to_stdout, move |worker|
475 {
476 worker.log(format!("starting garbage collection on store {}", store));
d4b59ae0 477 datastore.garbage_collection(worker)
0f778e06
DM
478 })?;
479
480 Ok(json!(upid_str))
15e9b4ed
DM
481}
482
552c2259 483#[sortable]
0ab08ac9
DM
484pub const API_METHOD_GARBAGE_COLLECTION_STATUS: ApiMethod = ApiMethod::new(
485 &ApiHandler::Sync(&garbage_collection_status),
255f378a 486 &ObjectSchema::new(
0ab08ac9 487 "Garbage collection status.",
552c2259 488 &sorted!([
66c49c21 489 ("store", false, &DATASTORE_SCHEMA),
552c2259 490 ])
691c89a0 491 )
255f378a 492);
691c89a0 493
6049b71f
DM
494fn garbage_collection_status(
495 param: Value,
496 _info: &ApiMethod,
dd5495d6 497 _rpcenv: &mut dyn RpcEnvironment,
6049b71f 498) -> Result<Value, Error> {
691c89a0 499
5a778d92 500 let store = param["store"].as_str().unwrap();
691c89a0 501
f2b99c34
DM
502 let datastore = DataStore::lookup_datastore(&store)?;
503
5a778d92 504 println!("Garbage collection status on store {}", store);
691c89a0 505
f2b99c34 506 let status = datastore.last_gc_status();
691c89a0 507
f2b99c34 508 Ok(serde_json::to_value(&status)?)
691c89a0
DM
509}
510
691c89a0 511
6049b71f
DM
512fn get_datastore_list(
513 _param: Value,
514 _info: &ApiMethod,
dd5495d6 515 _rpcenv: &mut dyn RpcEnvironment,
6049b71f 516) -> Result<Value, Error> {
15e9b4ed 517
d0187a51 518 let (config, _digest) = datastore::config()?;
15e9b4ed 519
d0187a51 520 Ok(config.convert_to_array("store", None))
15e9b4ed
DM
521}
522
0ab08ac9
DM
523#[sortable]
524pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
525 &ApiHandler::AsyncHttp(&download_file),
526 &ObjectSchema::new(
527 "Download single raw file from backup snapshot.",
528 &sorted!([
66c49c21 529 ("store", false, &DATASTORE_SCHEMA),
0ab08ac9
DM
530 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
531 ("backup-id", false, &BACKUP_ID_SCHEMA),
532 ("backup-time", false, &BACKUP_TIME_SCHEMA),
533 ("file-name", false, &StringSchema::new("Raw file name.")
534 .format(&FILENAME_FORMAT)
535 .schema()
536 ),
537 ]),
538 )
539);
691c89a0 540
9e47c0a5
DM
541fn download_file(
542 _parts: Parts,
543 _req_body: Body,
544 param: Value,
255f378a 545 _info: &ApiMethod,
9e47c0a5 546 _rpcenv: Box<dyn RpcEnvironment>,
bb084b9c 547) -> ApiResponseFuture {
9e47c0a5 548
ad51d02a
DM
549 async move {
550 let store = tools::required_string_param(&param, "store")?;
f14a8c9a 551
ad51d02a 552 let datastore = DataStore::lookup_datastore(store)?;
f14a8c9a 553
ad51d02a 554 let file_name = tools::required_string_param(&param, "file-name")?.to_owned();
9e47c0a5 555
ad51d02a
DM
556 let backup_type = tools::required_string_param(&param, "backup-type")?;
557 let backup_id = tools::required_string_param(&param, "backup-id")?;
558 let backup_time = tools::required_integer_param(&param, "backup-time")?;
9e47c0a5 559
ad51d02a
DM
560 println!("Download {} from {} ({}/{}/{}/{})", file_name, store,
561 backup_type, backup_id, Local.timestamp(backup_time, 0), file_name);
9e47c0a5 562
ad51d02a 563 let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
9e47c0a5 564
ad51d02a
DM
565 let mut path = datastore.base_path();
566 path.push(backup_dir.relative_path());
567 path.push(&file_name);
568
569 let file = tokio::fs::File::open(path)
570 .map_err(|err| http_err!(BAD_REQUEST, format!("File open failed: {}", err)))
571 .await?;
572
db0cb9ce
WB
573 let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
574 .map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze()));
ad51d02a 575 let body = Body::wrap_stream(payload);
9e47c0a5 576
ad51d02a
DM
577 // fixme: set other headers ?
578 Ok(Response::builder()
579 .status(StatusCode::OK)
580 .header(header::CONTENT_TYPE, "application/octet-stream")
581 .body(body)
582 .unwrap())
583 }.boxed()
9e47c0a5
DM
584}
585
552c2259 586#[sortable]
0ab08ac9
DM
587pub const API_METHOD_UPLOAD_BACKUP_LOG: ApiMethod = ApiMethod::new(
588 &ApiHandler::AsyncHttp(&upload_backup_log),
255f378a
DM
589 &ObjectSchema::new(
590 "Download single raw file from backup snapshot.",
552c2259 591 &sorted!([
66c49c21 592 ("store", false, &DATASTORE_SCHEMA),
255f378a 593 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
0ab08ac9 594 ("backup-id", false, &BACKUP_ID_SCHEMA),
255f378a 595 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 596 ]),
9e47c0a5 597 )
255f378a 598);
9e47c0a5 599
07ee2235
DM
600fn upload_backup_log(
601 _parts: Parts,
602 req_body: Body,
603 param: Value,
255f378a 604 _info: &ApiMethod,
07ee2235 605 _rpcenv: Box<dyn RpcEnvironment>,
bb084b9c 606) -> ApiResponseFuture {
07ee2235 607
ad51d02a
DM
608 async move {
609 let store = tools::required_string_param(&param, "store")?;
07ee2235 610
ad51d02a 611 let datastore = DataStore::lookup_datastore(store)?;
07ee2235 612
ad51d02a 613 let file_name = "client.log.blob";
07ee2235 614
ad51d02a
DM
615 let backup_type = tools::required_string_param(&param, "backup-type")?;
616 let backup_id = tools::required_string_param(&param, "backup-id")?;
617 let backup_time = tools::required_integer_param(&param, "backup-time")?;
07ee2235 618
ad51d02a 619 let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
07ee2235 620
ad51d02a
DM
621 let mut path = datastore.base_path();
622 path.push(backup_dir.relative_path());
623 path.push(&file_name);
07ee2235 624
ad51d02a
DM
625 if path.exists() {
626 bail!("backup already contains a log.");
627 }
e128d4e8 628
ad51d02a
DM
629 println!("Upload backup log to {}/{}/{}/{}/{}", store,
630 backup_type, backup_id, BackupDir::backup_time_to_string(backup_dir.backup_time()), file_name);
631
632 let data = req_body
633 .map_err(Error::from)
634 .try_fold(Vec::new(), |mut acc, chunk| {
635 acc.extend_from_slice(&*chunk);
636 future::ok::<_, Error>(acc)
637 })
638 .await?;
639
640 let blob = DataBlob::from_raw(data)?;
641 // always verify CRC at server side
642 blob.verify_crc()?;
643 let raw_data = blob.raw_data();
feaa1ad3 644 replace_file(&path, raw_data, CreateOptions::new())?;
ad51d02a
DM
645
646 // fixme: use correct formatter
647 Ok(crate::server::formatter::json_response(Ok(Value::Null)))
648 }.boxed()
07ee2235
DM
649}
650
552c2259 651#[sortable]
255f378a
DM
652const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
653 (
654 "download",
655 &Router::new()
656 .download(&API_METHOD_DOWNLOAD_FILE)
657 ),
658 (
659 "files",
660 &Router::new()
661 .get(
662 &ApiMethod::new(
663 &ApiHandler::Sync(&list_snapshot_files),
664 &ObjectSchema::new(
665 "List snapshot files.",
552c2259 666 &sorted!([
66c49c21 667 ("store", false, &DATASTORE_SCHEMA),
255f378a
DM
668 ("backup-type", false, &BACKUP_TYPE_SCHEMA),
669 ("backup-id", false, &BACKUP_ID_SCHEMA),
670 ("backup-time", false, &BACKUP_TIME_SCHEMA),
552c2259 671 ]),
01a13423
DM
672 )
673 )
255f378a
DM
674 )
675 ),
676 (
677 "gc",
678 &Router::new()
679 .get(&API_METHOD_GARBAGE_COLLECTION_STATUS)
680 .post(&API_METHOD_START_GARBAGE_COLLECTION)
681 ),
682 (
683 "groups",
684 &Router::new()
b31c8019 685 .get(&API_METHOD_LIST_GROUPS)
255f378a
DM
686 ),
687 (
688 "prune",
689 &Router::new()
690 .post(&API_METHOD_PRUNE)
691 ),
692 (
693 "snapshots",
694 &Router::new()
fc189b19 695 .get(&API_METHOD_LIST_SNAPSHOTS)
68a6a0ee 696 .delete(&API_METHOD_DELETE_SNAPSHOT)
255f378a
DM
697 ),
698 (
699 "status",
700 &Router::new()
701 .get(&API_METHOD_STATUS)
702 ),
703 (
704 "upload-backup-log",
705 &Router::new()
706 .upload(&API_METHOD_UPLOAD_BACKUP_LOG)
707 ),
708];
709
ad51d02a 710const DATASTORE_INFO_ROUTER: Router = Router::new()
255f378a
DM
711 .get(&list_subdirs_api_method!(DATASTORE_INFO_SUBDIRS))
712 .subdirs(DATASTORE_INFO_SUBDIRS);
713
714
715pub const ROUTER: Router = Router::new()
716 .get(
717 &ApiMethod::new(
718 &ApiHandler::Sync(&get_datastore_list),
719 &ObjectSchema::new("Directory index.", &[])
6f62c924 720 )
255f378a
DM
721 )
722 .match_all("store", &DATASTORE_INFO_ROUTER);