bail!("got wrong content-type for catar archive upload");
}
- let chunk_size = 4*1024*1024;
+ let chunk_size = param["chunk-size"].as_u64().unwrap_or(4096*1024);
+ verify_chunk_size(chunk_size)?;
let datastore = DataStore::lookup_datastore(store)?;
path.push(archive_name);
- let index = datastore.create_dynamic_writer(path, chunk_size)?;
+ let index = datastore.create_dynamic_writer(path, chunk_size as usize)?;
let upload = UploadCaTar { stream: req_body, index, count: 0};
.required("id", StringSchema::new("Backup ID."))
.required("time", IntegerSchema::new("Backup time (Unix epoch.)")
.minimum(1547797308))
-
+ .optional(
+ "chunk-size",
+ IntegerSchema::new("Chunk size in bytes. Must be a power of 2.")
+ .minimum(64*1024)
+ .maximum(4096*1024)
+ .default(4096*1024)
+ )
)
}
// TODO: what about sysctl setting vm.vfs_cache_pressure (0 - 100) ?
+pub fn verify_chunk_size(size: u64) -> Result<(), Error> {
+
+ static SIZES: [u64; 7] = [64*1024, 128*1024, 256*1024, 512*1024, 1024*1024, 2048*1024, 4096*1024];
+
+ if !SIZES.contains(&size) {
+ bail!("Got unsupported chunk size '{}'", size);
+ }
+ Ok(())
+}
+
fn digest_to_prefix(digest: &[u8]) -> PathBuf {
let mut buf = Vec::<u8>::with_capacity(2+1+2+1);
use proxmox_backup::api_schema::*;
use proxmox_backup::api_schema::router::*;
use proxmox_backup::client::*;
-//use proxmox_backup::backup::chunk_store::*;
+use proxmox_backup::backup::*;
//use proxmox_backup::backup::image_index::*;
//use proxmox_backup::config::datastore;
//use proxmox_backup::catar::encoder::*;
use hyper::Body;
use std::sync::Arc;
-fn backup_directory(repo: &BackupRepository, body: Body, archive_name: &str) -> Result<(), Error> {
+fn backup_directory(
+ repo: &BackupRepository,
+ body: Body,
+ archive_name: &str,
+ chunk_size: Option<u64>,
+) -> Result<(), Error> {
let client = HttpClient::new(&repo.host, &repo.user);
let epoch = std::time::SystemTime::now().duration_since(
std::time::SystemTime::UNIX_EPOCH)?.as_secs();
- let query = url::form_urlencoded::Serializer::new(String::new())
+ let mut query = url::form_urlencoded::Serializer::new(String::new());
+
+ query
.append_pair("archive_name", archive_name)
.append_pair("type", "host")
.append_pair("id", &tools::nodename())
- .append_pair("time", &epoch.to_string())
- .finish();
+ .append_pair("time", &epoch.to_string());
+
+ if let Some(size) = chunk_size {
+ query.append_pair("chunk-size", &size.to_string());
+ }
+
+ let query = query.finish();
let path = format!("api2/json/admin/datastore/{}/catar?{}", repo.store, query);
let repo = BackupRepository::parse(repo_url)?;
- let mut _chunk_size = 4*1024*1024;
-
- if let Some(size) = param["chunk-size"].as_u64() {
- static SIZES: [u64; 7] = [64, 128, 256, 512, 1024, 2048, 4096];
+ let chunk_size_opt = param["chunk-size"].as_u64().map(|v| v*1024);
- if SIZES.contains(&size) {
- _chunk_size = (size as usize) * 1024;
- } else {
- bail!("Got unsupported chunk size '{}'", size);
- }
+ if let Some(size) = chunk_size_opt {
+ verify_chunk_size(size)?;
}
let stat = match nix::sys::stat::stat(filename) {
let body = Body::wrap_stream(stream);
- backup_directory(&repo, body, target)?;
+ backup_directory(&repo, body, target, chunk_size_opt)?;
} else if (stat.st_mode & (libc::S_IFREG|libc::S_IFBLK)) != 0 {
println!("Backup image '{}' to '{:?}'", filename, repo);