path.push(&file_name);
env.log(format!("download {:?}", path.clone()));
+
+ let index: Option<Box<dyn IndexFile + Send>> = match archive_type(&file_name)? {
+ ArchiveType::FixedIndex => {
+ let index = env.datastore.open_fixed_reader(&path)?;
+ Some(Box::new(index))
+ }
+ ArchiveType::DynamicIndex => {
+ let index = env.datastore.open_dynamic_reader(&path)?;
+ Some(Box::new(index))
+ }
+ _ => { None }
+ };
+
+ if let Some(index) = index {
+ env.log(format!("register chunks in '{}' as downloadable.", file_name));
+
+ for pos in 0..index.index_count() {
+ let info = index.chunk_info(pos).unwrap();
+ env.register_chunk(info.digest);
+ }
+ }
helpers::create_download_response(path).await
}.boxed()
let digest_str = tools::required_string_param(¶m, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?;
+ if !env.check_chunk_access(digest) {
+ env.log(format!("attempted to download chunk {} which is not in registered chunk list", digest_str));
+ return Err(http_err!(UNAUTHORIZED, "download chunk {} not allowed", digest_str));
+ }
+
let (path, _) = env.datastore.chunk_path(&digest);
let path2 = path.clone();
-//use anyhow::{bail, format_err, Error};
-use std::sync::Arc;
+use std::sync::{Arc,RwLock};
+use std::collections::HashSet;
use serde_json::{json, Value};
pub worker: Arc<WorkerTask>,
pub datastore: Arc<DataStore>,
pub backup_dir: BackupDir,
- // state: Arc<Mutex<SharedBackupState>>
+ allowed_chunks: Arc<RwLock<HashSet<[u8;32]>>>,
}
impl ReaderEnvironment {
debug: false,
formatter: &JSON_FORMATTER,
backup_dir,
- //state: Arc::new(Mutex::new(state)),
+ allowed_chunks: Arc::new(RwLock::new(HashSet::new())),
}
}
if self.debug { self.worker.log(msg); }
}
+
+ pub fn register_chunk(&self, digest: [u8;32]) {
+ let mut allowed_chunks = self.allowed_chunks.write().unwrap();
+ allowed_chunks.insert(digest);
+ }
+
+ pub fn check_chunk_access(&self, digest: [u8;32]) -> bool {
+ self.allowed_chunks.read().unwrap().contains(&digest)
+ }
}
impl RpcEnvironment for ReaderEnvironment {