use serde_json::Value;
use std::io::Write;
use futures::*;
-use std::path::PathBuf;
+//use std::path::PathBuf;
use std::sync::Arc;
use hyper::Body;
archive_name.push_str(".didx");
- let backup_type = tools::required_string_param(¶m, "type")?;
- let backup_id = tools::required_string_param(¶m, "id")?;
- let backup_time = tools::required_integer_param(¶m, "time")?;
+ let backup_type = tools::required_string_param(¶m, "backup-type")?;
+ let backup_id = tools::required_string_param(¶m, "backup-id")?;
+ let backup_time = tools::required_integer_param(¶m, "backup-time")?;
println!("Upload {}/{}/{}/{}/{}", store, backup_type, backup_id, backup_time, archive_name);
verify_chunk_size(chunk_size)?;
let datastore = DataStore::lookup_datastore(store)?;
+ let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
- let (mut path, _new) = datastore.create_backup_dir(
- backup_type, backup_id, Local.timestamp(backup_time, 0))?;
+ let (mut path, _new) = datastore.create_backup_dir(&backup_dir)?;
path.push(archive_name);
ObjectSchema::new("Upload .catar backup file.")
.required("store", StringSchema::new("Datastore name."))
.required("archive-name", StringSchema::new("Backup archive name."))
- .required("type", StringSchema::new("Backup type.")
+ .required("backup-type", StringSchema::new("Backup type.")
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
- .required("id", StringSchema::new("Backup ID."))
- .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+ .required("backup-id", StringSchema::new("Backup ID."))
+ .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308))
.optional(
"chunk-size",
) -> Result<BoxFut, Error> {
let store = tools::required_string_param(¶m, "store")?;
- let archive_name = tools::required_string_param(¶m, "archive-name")?;
+ let mut archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned();
- let backup_type = tools::required_string_param(¶m, "type")?;
- let backup_id = tools::required_string_param(¶m, "id")?;
- let backup_time = tools::required_integer_param(¶m, "time")?;
- let backup_time = Local.timestamp(backup_time, 0);
+ if !archive_name.ends_with(".catar") {
+ bail!("wrong archive extension");
+ } else {
+ archive_name.push_str(".didx");
+ }
+
+ let backup_type = tools::required_string_param(¶m, "backup-type")?;
+ let backup_id = tools::required_string_param(¶m, "backup-id")?;
+ let backup_time = tools::required_integer_param(¶m, "backup-time")?;
- println!("Download {}.catar from {} ({}/{}/{}/{}.didx)", archive_name, store,
- backup_type, backup_id, backup_time, archive_name);
+ println!("Download {} from {} ({}/{}/{}/{})", archive_name, store,
+ backup_type, backup_id, Local.timestamp(backup_time, 0), archive_name);
let datastore = DataStore::lookup_datastore(store)?;
- let backup_dir = BackupDir {
- group: BackupGroup {
- backup_type: backup_type.to_string(),
- backup_id: backup_id.to_string(),
- },
- backup_time,
- };
+ let backup_dir = BackupDir::new(backup_type, backup_id, backup_time);
let mut path = backup_dir.relative_path();
-
- let mut full_archive_name = PathBuf::from(archive_name);
- full_archive_name.set_extension("didx");
-
- path.push(full_archive_name);
+ path.push(archive_name);
let index = datastore.open_dynamic_reader(path)?;
let reader = BufferedDynamicReader::new(index);
ObjectSchema::new("Download .catar backup file.")
.required("store", StringSchema::new("Datastore name."))
.required("archive-name", StringSchema::new("Backup archive name."))
- .required("type", StringSchema::new("Backup type.")
+ .required("backup-type", StringSchema::new("Backup type.")
.format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
- .required("id", StringSchema::new("Backup ID."))
- .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+ .required("backup-id", StringSchema::new("Backup ID."))
+ .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308))
)