]> git.proxmox.com Git - proxmox-backup.git/blobdiff - src/api2/admin/datastore/catar.rs
src/catar/decoder.rs: simplify public restore API
[proxmox-backup.git] / src / api2 / admin / datastore / catar.rs
index 45c6f44a1613ee85eb23b74c987a753c32764083..6e8e4b7d715a87eba68e85341e48db5588771302 100644 (file)
@@ -2,18 +2,17 @@ use failure::*;
 
 use crate::tools;
 use crate::tools::wrapped_reader_stream::*;
-use crate::backup::datastore::*;
-use crate::backup::dynamic_index::*;
+use crate::backup::*;
 //use crate::server::rest::*;
-use crate::api::schema::*;
-use crate::api::router::*;
+use crate::api_schema::*;
+use crate::api_schema::router::*;
 
-use chrono::{Utc, TimeZone};
+use chrono::{Local, TimeZone};
 
 use serde_json::Value;
 use std::io::Write;
 use futures::*;
-use std::path::PathBuf;
+//use std::path::PathBuf;
 use std::sync::Arc;
 
 use hyper::Body;
@@ -34,7 +33,7 @@ impl Future for UploadCaTar {
             match try_ready!(self.stream.poll()) {
                 Some(chunk) => {
                     self.count += chunk.len();
-                    if let Err(err) = self.index.write(&chunk) {
+                    if let Err(err) = self.index.write_all(&chunk) {
                         bail!("writing chunk failed - {}", err);
                     }
                 }
@@ -56,14 +55,19 @@ fn upload_catar(
 ) -> Result<BoxFut, Error> {
 
     let store = tools::required_string_param(&param, "store")?;
-    let archive_name = tools::required_string_param(&param, "archive_name")?;
+    let mut archive_name = String::from(tools::required_string_param(&param, "archive-name")?);
 
-    let backup_type = tools::required_string_param(&param, "type")?;
-    let backup_id = tools::required_string_param(&param, "id")?;
-    let backup_time = tools::required_integer_param(&param, "time")?;
+    if !archive_name.ends_with(".catar") {
+        bail!("got wront file extension (expected '.catar')");
+    }
+
+    archive_name.push_str(".didx");
 
-    println!("Upload {}.catar to {} ({}/{}/{}/{}.didx)", archive_name, store,
-             backup_type, backup_id, backup_time, archive_name);
+    let backup_type = tools::required_string_param(&param, "backup-type")?;
+    let backup_id = tools::required_string_param(&param, "backup-id")?;
+    let backup_time = tools::required_integer_param(&param, "backup-time")?;
+
+    println!("Upload {}/{}/{}/{}/{}", store, backup_type, backup_id, backup_time, archive_name);
 
     let content_type = parts.headers.get(http::header::CONTENT_TYPE)
         .ok_or(format_err!("missing content-type header"))?;
@@ -72,18 +76,17 @@ fn upload_catar(
         bail!("got wrong content-type for catar archive upload");
     }
 
-    let chunk_size = 4*1024*1024;
+    let chunk_size = param["chunk-size"].as_u64().unwrap_or(4096*1024);
+    verify_chunk_size(chunk_size)?;
 
     let datastore = DataStore::lookup_datastore(store)?;
+    let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
 
-    let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
-
-    let mut full_archive_name = PathBuf::from(archive_name);
-    full_archive_name.set_extension("didx");
+    let (mut path, _new) = datastore.create_backup_dir(&backup_dir)?;
 
-    path.push(full_archive_name);
+    path.push(archive_name);
 
-    let index = datastore.create_dynamic_writer(path, chunk_size)?;
+    let index = datastore.create_dynamic_writer(path, chunk_size as usize)?;
 
     let upload = UploadCaTar { stream: req_body, index, count: 0};
 
@@ -105,13 +108,19 @@ pub fn api_method_upload_catar() -> ApiAsyncMethod {
         upload_catar,
         ObjectSchema::new("Upload .catar backup file.")
             .required("store", StringSchema::new("Datastore name."))
-            .required("archive_name", StringSchema::new("Backup archive name."))
-            .required("type", StringSchema::new("Backup type.")
+            .required("archive-name", StringSchema::new("Backup archive name."))
+            .required("backup-type", StringSchema::new("Backup type.")
                       .format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
-            .required("id", StringSchema::new("Backup ID."))
-            .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+            .required("backup-id", StringSchema::new("Backup ID."))
+            .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
                       .minimum(1547797308))
-
+            .optional(
+                "chunk-size",
+                IntegerSchema::new("Chunk size in bytes. Must be a power of 2.")
+                    .minimum(64*1024)
+                    .maximum(4096*1024)
+                    .default(4096*1024)
+            )
     )
 }
 
@@ -124,24 +133,27 @@ fn download_catar(
 ) -> Result<BoxFut, Error> {
 
     let store = tools::required_string_param(&param, "store")?;
-    let archive_name = tools::required_string_param(&param, "archive_name")?;
+    let mut archive_name = tools::required_string_param(&param, "archive-name")?.to_owned();
+
+    if !archive_name.ends_with(".catar") {
+        bail!("wrong archive extension");
+    } else {
+        archive_name.push_str(".didx");
+    }
 
-    let backup_type = tools::required_string_param(&param, "type")?;
-    let backup_id = tools::required_string_param(&param, "id")?;
-    let backup_time = tools::required_integer_param(&param, "time")?;
-    let backup_time = Utc.timestamp(backup_time, 0);
+    let backup_type = tools::required_string_param(&param, "backup-type")?;
+    let backup_id = tools::required_string_param(&param, "backup-id")?;
+    let backup_time = tools::required_integer_param(&param, "backup-time")?;
 
-    println!("Download {}.catar from {} ({}/{}/{}/{}.didx)", archive_name, store,
-             backup_type, backup_id, backup_time, archive_name);
+    println!("Download {} from {} ({}/{}/{}/{})", archive_name, store,
+             backup_type, backup_id, Local.timestamp(backup_time, 0), archive_name);
 
     let datastore = DataStore::lookup_datastore(store)?;
 
-    let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
-
-    let mut full_archive_name = PathBuf::from(archive_name);
-    full_archive_name.set_extension("didx");
+    let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
 
-    path.push(full_archive_name);
+    let mut path = backup_dir.relative_path();
+    path.push(archive_name);
 
     let index = datastore.open_dynamic_reader(path)?;
     let reader = BufferedDynamicReader::new(index);
@@ -160,11 +172,11 @@ pub fn api_method_download_catar() -> ApiAsyncMethod {
         download_catar,
         ObjectSchema::new("Download .catar backup file.")
             .required("store", StringSchema::new("Datastore name."))
-            .required("archive_name", StringSchema::new("Backup archive name."))
-            .required("type", StringSchema::new("Backup type.")
+            .required("archive-name", StringSchema::new("Backup archive name."))
+            .required("backup-type", StringSchema::new("Backup type.")
                       .format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
-            .required("id", StringSchema::new("Backup ID."))
-            .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+            .required("backup-id", StringSchema::new("Backup ID."))
+            .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
                       .minimum(1547797308))
 
     )