use crate::tools;
use crate::tools::wrapped_reader_stream::*;
-use crate::backup::datastore::*;
-use crate::backup::dynamic_index::*;
+use crate::backup::*;
//use crate::server::rest::*;
-use crate::api::schema::*;
-use crate::api::router::*;
+use crate::api_schema::*;
+use crate::api_schema::router::*;
-use chrono::{Utc, TimeZone};
+use chrono::{Local, TimeZone};
use serde_json::Value;
use std::io::Write;
use futures::*;
-use std::path::PathBuf;
+//use std::path::PathBuf;
use std::sync::Arc;
use hyper::Body;
match try_ready!(self.stream.poll()) {
Some(chunk) => {
self.count += chunk.len();
- if let Err(err) = self.index.write(&chunk) {
+ if let Err(err) = self.index.write_all(&chunk) {
bail!("writing chunk failed - {}", err);
}
}
) -> Result<BoxFut, Error> {
let store = tools::required_string_param(¶m, "store")?;
- let archive_name = tools::required_string_param(¶m, "archive_name")?;
+ let mut archive_name = String::from(tools::required_string_param(¶m, "archive-name")?);
- let backup_type = tools::required_string_param(¶m, "type")?;
- let backup_id = tools::required_string_param(¶m, "id")?;
- let backup_time = tools::required_integer_param(¶m, "time")?;
+ if !archive_name.ends_with(".catar") {
+ bail!("got wront file extension (expected '.catar')");
+ }
+
+ archive_name.push_str(".didx");
- println!("Upload {}.catar to {} ({}/{}/{}/{}.didx)", archive_name, store,
- backup_type, backup_id, backup_time, archive_name);
+ let backup_type = tools::required_string_param(¶m, "backup-type")?;
+ let backup_id = tools::required_string_param(¶m, "backup-id")?;
+ let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+
+ println!("Upload {}/{}/{}/{}/{}", store, backup_type, backup_id, backup_time, archive_name);
let content_type = parts.headers.get(http::header::CONTENT_TYPE)
.ok_or(format_err!("missing content-type header"))?;
bail!("got wrong content-type for catar archive upload");
}
- let chunk_size = 4*1024*1024;
+ let chunk_size = param["chunk-size"].as_u64().unwrap_or(4096*1024);
+ verify_chunk_size(chunk_size)?;
let datastore = DataStore::lookup_datastore(store)?;
+ let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
- let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
-
- let mut full_archive_name = PathBuf::from(archive_name);
- full_archive_name.set_extension("didx");
+ let (mut path, _new) = datastore.create_backup_dir(&backup_dir)?;
- path.push(full_archive_name);
+ path.push(archive_name);
- let index = datastore.create_dynamic_writer(path, chunk_size)?;
+ let index = datastore.create_dynamic_writer(path, chunk_size as usize)?;
let upload = UploadCaTar { stream: req_body, index, count: 0};
upload_catar,
ObjectSchema::new("Upload .catar backup file.")
.required("store", StringSchema::new("Datastore name."))
- .required("archive_name", StringSchema::new("Backup archive name."))
- .required("type", StringSchema::new("Backup type.")
+ .required("archive-name", StringSchema::new("Backup archive name."))
+ .required("backup-type", StringSchema::new("Backup type.")
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
- .required("id", StringSchema::new("Backup ID."))
- .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+ .required("backup-id", StringSchema::new("Backup ID."))
+ .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308))
-
+ .optional(
+ "chunk-size",
+ IntegerSchema::new("Chunk size in bytes. Must be a power of 2.")
+ .minimum(64*1024)
+ .maximum(4096*1024)
+ .default(4096*1024)
+ )
)
}
) -> Result<BoxFut, Error> {
let store = tools::required_string_param(¶m, "store")?;
- let archive_name = tools::required_string_param(¶m, "archive_name")?;
+ let mut archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned();
+
+ if !archive_name.ends_with(".catar") {
+ bail!("wrong archive extension");
+ } else {
+ archive_name.push_str(".didx");
+ }
- let backup_type = tools::required_string_param(¶m, "type")?;
- let backup_id = tools::required_string_param(¶m, "id")?;
- let backup_time = tools::required_integer_param(¶m, "time")?;
- let backup_time = Utc.timestamp(backup_time, 0);
+ let backup_type = tools::required_string_param(¶m, "backup-type")?;
+ let backup_id = tools::required_string_param(¶m, "backup-id")?;
+ let backup_time = tools::required_integer_param(¶m, "backup-time")?;
- println!("Download {}.catar from {} ({}/{}/{}/{}.didx)", archive_name, store,
- backup_type, backup_id, backup_time, archive_name);
+ println!("Download {} from {} ({}/{}/{}/{})", archive_name, store,
+ backup_type, backup_id, Local.timestamp(backup_time, 0), archive_name);
let datastore = DataStore::lookup_datastore(store)?;
- let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
-
- let mut full_archive_name = PathBuf::from(archive_name);
- full_archive_name.set_extension("didx");
+ let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
- path.push(full_archive_name);
+ let mut path = backup_dir.relative_path();
+ path.push(archive_name);
let index = datastore.open_dynamic_reader(path)?;
let reader = BufferedDynamicReader::new(index);
download_catar,
ObjectSchema::new("Download .catar backup file.")
.required("store", StringSchema::new("Datastore name."))
- .required("archive_name", StringSchema::new("Backup archive name."))
- .required("type", StringSchema::new("Backup type.")
+ .required("archive-name", StringSchema::new("Backup archive name."))
+ .required("backup-type", StringSchema::new("Backup type.")
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
- .required("id", StringSchema::new("Backup ID."))
- .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+ .required("backup-id", StringSchema::new("Backup ID."))
+ .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308))
)