]> git.proxmox.com Git - proxmox-backup.git/blobdiff - src/bin/proxmox-backup-client.rs
client: add test for keyfile_parameters
[proxmox-backup.git] / src / bin / proxmox-backup-client.rs
index 80b1ff22547ede39ef2cff9e6bd1976bf240f8a9..4a783abe66ef02b385624d41da86634640c751c6 100644 (file)
@@ -12,6 +12,7 @@ use futures::future::FutureExt;
 use futures::stream::{StreamExt, TryStreamExt};
 use serde_json::{json, Value};
 use tokio::sync::mpsc;
+use tokio_stream::wrappers::ReceiverStream;
 use xdg::BaseDirectories;
 
 use pathpatterns::{MatchEntry, MatchType, PatternFlag};
@@ -40,6 +41,7 @@ use proxmox_backup::pxar::catalog::*;
 use proxmox_backup::backup::{
     archive_type,
     decrypt_key,
+    rsa_encrypt_key_config,
     verify_chunk_size,
     ArchiveType,
     AsyncReadChunk,
@@ -54,8 +56,10 @@ use proxmox_backup::backup::{
     CryptConfig,
     CryptMode,
     DynamicIndexReader,
+    ENCRYPTED_KEY_BLOB_NAME,
     FixedChunkStream,
     FixedIndexReader,
+    KeyConfig,
     IndexFile,
     MANIFEST_BLOB_NAME,
     Shell,
@@ -207,34 +211,11 @@ fn connect_do(server: &str, port: u16, auth_id: &Authid) -> Result<HttpClient, E
         Err(NotPresent) => None,
     };
 
-    let options = HttpClientOptions::new()
-        .prefix(Some("proxmox-backup".to_string()))
-        .password(password)
-        .interactive(true)
-        .fingerprint(fingerprint)
-        .fingerprint_cache(true)
-        .ticket_cache(true);
+    let options = HttpClientOptions::new_interactive(password, fingerprint);
 
     HttpClient::new(server, port, auth_id, options)
 }
 
-async fn view_task_result(
-    client: HttpClient,
-    result: Value,
-    output_format: &str,
-) -> Result<(), Error> {
-    let data = &result["data"];
-    if output_format == "text" {
-        if let Some(upid) = data.as_str() {
-            display_task_log(client, upid, true).await?;
-        }
-    } else {
-        format_and_print_result(&data, &output_format);
-    }
-
-    Ok(())
-}
-
 async fn api_datastore_list_snapshots(
     client: &HttpClient,
     store: &str,
@@ -276,34 +257,24 @@ pub async fn api_datastore_latest_snapshot(
 
 async fn backup_directory<P: AsRef<Path>>(
     client: &BackupWriter,
-    previous_manifest: Option<Arc<BackupManifest>>,
     dir_path: P,
     archive_name: &str,
     chunk_size: Option<usize>,
-    device_set: Option<HashSet<u64>>,
-    verbose: bool,
-    skip_lost_and_found: bool,
     catalog: Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
-    exclude_pattern: Vec<MatchEntry>,
-    entries_max: usize,
-    compress: bool,
-    encrypt: bool,
+    pxar_create_options: proxmox_backup::pxar::PxarCreateOptions,
+    upload_options: UploadOptions,
 ) -> Result<BackupStats, Error> {
 
     let pxar_stream = PxarBackupStream::open(
         dir_path.as_ref(),
-        device_set,
-        verbose,
-        skip_lost_and_found,
         catalog,
-        exclude_pattern,
-        entries_max,
+        pxar_create_options,
     )?;
     let mut chunk_stream = ChunkStream::new(pxar_stream, chunk_size);
 
-    let (mut tx, rx) = mpsc::channel(10); // allow to buffer 10 chunks
+    let (tx, rx) = mpsc::channel(10); // allow to buffer 10 chunks
 
-    let stream = rx
+    let stream = ReceiverStream::new(rx)
         .map_err(Error::from);
 
     // spawn chunker inside a separate task so that it can run parallel
@@ -313,8 +284,12 @@ async fn backup_directory<P: AsRef<Path>>(
         }
     });
 
+    if upload_options.fixed_size.is_some() {
+        bail!("cannot backup directory with fixed chunk size!");
+    }
+
     let stats = client
-        .upload_stream(previous_manifest, archive_name, stream, "dynamic", None, compress, encrypt)
+        .upload_stream(archive_name, stream, upload_options)
         .await?;
 
     Ok(stats)
@@ -322,14 +297,10 @@ async fn backup_directory<P: AsRef<Path>>(
 
 async fn backup_image<P: AsRef<Path>>(
     client: &BackupWriter,
-    previous_manifest: Option<Arc<BackupManifest>>,
     image_path: P,
     archive_name: &str,
-    image_size: u64,
     chunk_size: Option<usize>,
-    compress: bool,
-    encrypt: bool,
-    _verbose: bool,
+    upload_options: UploadOptions,
 ) -> Result<BackupStats, Error> {
 
     let path = image_path.as_ref().to_owned();
@@ -341,8 +312,12 @@ async fn backup_image<P: AsRef<Path>>(
 
     let stream = FixedChunkStream::new(stream, chunk_size.unwrap_or(4*1024*1024));
 
+    if upload_options.fixed_size.is_none() {
+        bail!("cannot backup image with dynamic chunk size!");
+    }
+
     let stats = client
-        .upload_stream(previous_manifest, archive_name, stream, "fixed", Some(image_size), compress, encrypt)
+        .upload_stream(archive_name, stream, upload_options)
         .await?;
 
     Ok(stats)
@@ -409,9 +384,9 @@ async fn list_backup_groups(param: Value) -> Result<Value, Error> {
 
     let mut data: Value = result["data"].take();
 
-    let info = &proxmox_backup::api2::admin::datastore::API_RETURN_SCHEMA_LIST_GROUPS;
+    let return_type = &proxmox_backup::api2::admin::datastore::API_METHOD_LIST_GROUPS.returns;
 
-    format_and_print_result_full(&mut data, info, &output_format, &options);
+    format_and_print_result_full(&mut data, return_type, &output_format, &options);
 
     Ok(Value::Null)
 }
@@ -577,32 +552,38 @@ async fn start_garbage_collection(param: Value) -> Result<Value, Error> {
 
     record_repository(&repo);
 
-    view_task_result(client, result, &output_format).await?;
+    view_task_result(&mut client, result, &output_format).await?;
 
     Ok(Value::Null)
 }
 
+struct CatalogUploadResult {
+    catalog_writer: Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
+    result: tokio::sync::oneshot::Receiver<Result<BackupStats, Error>>,
+}
+
 fn spawn_catalog_upload(
     client: Arc<BackupWriter>,
     encrypt: bool,
-) -> Result<
-        (
-            Arc<Mutex<CatalogWriter<crate::tools::StdChannelWriter>>>,
-            tokio::sync::oneshot::Receiver<Result<BackupStats, Error>>
-        ), Error>
-{
+) -> Result<CatalogUploadResult, Error> {
     let (catalog_tx, catalog_rx) = std::sync::mpsc::sync_channel(10); // allow to buffer 10 writes
     let catalog_stream = crate::tools::StdChannelStream(catalog_rx);
     let catalog_chunk_size = 512*1024;
     let catalog_chunk_stream = ChunkStream::new(catalog_stream, Some(catalog_chunk_size));
 
-    let catalog = Arc::new(Mutex::new(CatalogWriter::new(crate::tools::StdChannelWriter::new(catalog_tx))?));
+    let catalog_writer = Arc::new(Mutex::new(CatalogWriter::new(crate::tools::StdChannelWriter::new(catalog_tx))?));
 
     let (catalog_result_tx, catalog_result_rx) = tokio::sync::oneshot::channel();
 
+    let upload_options = UploadOptions {
+        encrypt,
+        compress: true,
+        ..UploadOptions::default()
+    };
+
     tokio::spawn(async move {
         let catalog_upload_result = client
-            .upload_stream(None, CATALOG_NAME, catalog_chunk_stream, "dynamic", None, true, encrypt)
+            .upload_stream(CATALOG_NAME, catalog_chunk_stream, upload_options)
             .await;
 
         if let Err(ref err) = catalog_upload_result {
@@ -613,7 +594,7 @@ fn spawn_catalog_upload(
         let _ = catalog_result_tx.send(catalog_upload_result);
     });
 
-    Ok((catalog, catalog_result_rx))
+    Ok(CatalogUploadResult { catalog_writer, result: catalog_result_rx })
 }
 
 fn keyfile_parameters(param: &Value) -> Result<(Option<Vec<u8>>, CryptMode), Error> {
@@ -644,7 +625,7 @@ fn keyfile_parameters(param: &Value) -> Result<(Option<Vec<u8>>, CryptMode), Err
         (None, None) => None,
         (Some(_), Some(_)) => bail!("--keyfile and --keyfd are mutually exclusive"),
         (Some(keyfile), None) => {
-            println!("Using encryption key file: {}", keyfile);
+            eprintln!("Using encryption key file: {}", keyfile);
             Some(file_get_contents(keyfile)?)
         },
         (None, Some(fd)) => {
@@ -654,7 +635,7 @@ fn keyfile_parameters(param: &Value) -> Result<(Option<Vec<u8>>, CryptMode), Err
                 .map_err(|err| {
                     format_err!("error reading encryption key from fd {}: {}", fd, err)
                 })?;
-            println!("Using encryption key from file descriptor");
+            eprintln!("Using encryption key from file descriptor");
             Some(data)
         }
     };
@@ -663,7 +644,7 @@ fn keyfile_parameters(param: &Value) -> Result<(Option<Vec<u8>>, CryptMode), Err
         // no parameters:
         (None, None) => match key::read_optional_default_encryption_key()? {
             Some(key) => {
-                println!("Encrypting with default encryption key!");
+                eprintln!("Encrypting with default encryption key!");
                 (Some(key), CryptMode::Encrypt)
             },
             None => (None, CryptMode::None),
@@ -676,7 +657,7 @@ fn keyfile_parameters(param: &Value) -> Result<(Option<Vec<u8>>, CryptMode), Err
         (None, Some(crypt_mode)) => match key::read_optional_default_encryption_key()? {
             None => bail!("--crypt-mode without --keyfile and no default key file available"),
             Some(key) => {
-                println!("Encrypting with default encryption key!");
+                eprintln!("Encrypting with default encryption key!");
                 (Some(key), crypt_mode)
             },
         }
@@ -694,6 +675,129 @@ fn keyfile_parameters(param: &Value) -> Result<(Option<Vec<u8>>, CryptMode), Err
     })
 }
 
+#[test]
+// WARNING: there must only be one test for keyfile_parameters as the default key handling is not
+// safe w.r.t. concurrency
+fn test_keyfile_parameters_handling() -> Result<(), Error> {
+    let some_key = Some(vec![1;1]);
+    let default_key = Some(vec![2;1]);
+
+    let no_key_res: (Option<Vec<u8>>, CryptMode) = (None, CryptMode::None);
+    let some_key_res = (some_key.clone(), CryptMode::Encrypt);
+    let some_key_sign_res = (some_key.clone(), CryptMode::SignOnly);
+    let default_key_res = (default_key.clone(), CryptMode::Encrypt);
+    let default_key_sign_res = (default_key.clone(), CryptMode::SignOnly);
+
+    let keypath = "./tests/keyfile.test";
+    replace_file(&keypath, some_key.as_ref().unwrap(), CreateOptions::default())?;
+    let invalid_keypath = "./tests/invalid_keyfile.test";
+
+    // no params, no default key == no key
+    let res = keyfile_parameters(&json!({}));
+    assert_eq!(res.unwrap(), no_key_res);
+
+    // keyfile param == key from keyfile
+    let res = keyfile_parameters(&json!({"keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_res);
+
+    // crypt mode none == no key
+    let res = keyfile_parameters(&json!({"crypt-mode": "none"}));
+    assert_eq!(res.unwrap(), no_key_res);
+
+    // crypt mode encrypt/sign-only, no keyfile, no default key == Error
+    assert!(keyfile_parameters(&json!({"crypt-mode": "sign-only"})).is_err());
+    assert!(keyfile_parameters(&json!({"crypt-mode": "encrypt"})).is_err());
+
+    // crypt mode none with explicit key == Error
+    assert!(keyfile_parameters(&json!({"crypt-mode": "none", "keyfile": keypath})).is_err());
+
+    // crypt mode sign-only/encrypt with keyfile == key from keyfile with correct mode
+    let res = keyfile_parameters(&json!({"crypt-mode": "sign-only", "keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_sign_res);
+    let res = keyfile_parameters(&json!({"crypt-mode": "encrypt", "keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_res);
+
+    // invalid keyfile parameter always errors
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "none"})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "sign-only"})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "encrypt"})).is_err());
+
+    // now set a default key
+    unsafe { key::set_test_encryption_key(Ok(default_key.clone())); }
+
+    // and repeat
+
+    // no params but default key == default key
+    let res = keyfile_parameters(&json!({}));
+    assert_eq!(res.unwrap(), default_key_res);
+
+    // keyfile param == key from keyfile
+    let res = keyfile_parameters(&json!({"keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_res);
+
+    // crypt mode none == no key
+    let res = keyfile_parameters(&json!({"crypt-mode": "none"}));
+    assert_eq!(res.unwrap(), no_key_res);
+
+    // crypt mode encrypt/sign-only, no keyfile, default key == default key with correct mode
+    let res = keyfile_parameters(&json!({"crypt-mode": "sign-only"}));
+    assert_eq!(res.unwrap(), default_key_sign_res);
+    let res = keyfile_parameters(&json!({"crypt-mode": "encrypt"}));
+    assert_eq!(res.unwrap(), default_key_res);
+
+    // crypt mode none with explicit key == Error
+    assert!(keyfile_parameters(&json!({"crypt-mode": "none", "keyfile": keypath})).is_err());
+
+    // crypt mode sign-only/encrypt with keyfile == key from keyfile with correct mode
+    let res = keyfile_parameters(&json!({"crypt-mode": "sign-only", "keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_sign_res);
+    let res = keyfile_parameters(&json!({"crypt-mode": "encrypt", "keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_res);
+
+    // invalid keyfile parameter always errors
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "none"})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "sign-only"})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "encrypt"})).is_err());
+
+    // now make default key retrieval error
+    unsafe { key::set_test_encryption_key(Err(format_err!("test error"))); }
+
+    // and repeat
+
+    // no params, default key retrieval errors == Error
+    assert!(keyfile_parameters(&json!({})).is_err());
+
+    // keyfile param == key from keyfile
+    let res = keyfile_parameters(&json!({"keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_res);
+
+    // crypt mode none == no key
+    let res = keyfile_parameters(&json!({"crypt-mode": "none"}));
+    assert_eq!(res.unwrap(), no_key_res);
+
+    // crypt mode encrypt/sign-only, no keyfile, default key error == Error
+    assert!(keyfile_parameters(&json!({"crypt-mode": "sign-only"})).is_err());
+    assert!(keyfile_parameters(&json!({"crypt-mode": "encrypt"})).is_err());
+
+    // crypt mode none with explicit key == Error
+    assert!(keyfile_parameters(&json!({"crypt-mode": "none", "keyfile": keypath})).is_err());
+
+    // crypt mode sign-only/encrypt with keyfile == key from keyfile with correct mode
+    let res = keyfile_parameters(&json!({"crypt-mode": "sign-only", "keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_sign_res);
+    let res = keyfile_parameters(&json!({"crypt-mode": "encrypt", "keyfile": keypath}));
+    assert_eq!(res.unwrap(), some_key_res);
+
+    // invalid keyfile parameter always errors
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "none"})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "sign-only"})).is_err());
+    assert!(keyfile_parameters(&json!({"keyfile": invalid_keypath, "crypt-mode": "encrypt"})).is_err());
+    Ok(())
+}
+
 #[api(
    input: {
        properties: {
@@ -894,7 +998,7 @@ async fn create_backup(
         }
     }
 
-    let backup_time = backup_time_opt.unwrap_or_else(|| epoch_i64());
+    let backup_time = backup_time_opt.unwrap_or_else(epoch_i64);
 
     let client = connect(&repo)?;
     record_repository(&repo);
@@ -915,11 +1019,15 @@ async fn create_backup(
 
             let crypt_config = CryptConfig::new(key)?;
 
-            match key::find_master_pubkey()? {
+            match key::find_default_master_pubkey()? {
                 Some(ref path) if path.exists() => {
                     let pem_data = file_get_contents(path)?;
                     let rsa = openssl::rsa::Rsa::public_key_from_pem(&pem_data)?;
-                    let enc_key = crypt_config.generate_rsa_encoded_key(rsa, created)?;
+
+                    let mut key_config = KeyConfig::without_password(key)?;
+                    key_config.created = created; // keep original value
+
+                    let enc_key = rsa_encrypt_key_config(rsa, &key_config)?;
                     println!("Master key '{:?}'", path);
 
                     (Some(Arc::new(crypt_config)), Some(enc_key))
@@ -982,65 +1090,91 @@ async fn create_backup(
     let mut manifest = BackupManifest::new(snapshot);
 
     let mut catalog = None;
-    let mut catalog_result_tx = None;
+    let mut catalog_result_rx = None;
 
     for (backup_type, filename, target, size) in upload_list {
         match backup_type {
             BackupSpecificationType::CONFIG => {
+                let upload_options = UploadOptions {
+                    compress: true,
+                    encrypt: crypt_mode == CryptMode::Encrypt,
+                    ..UploadOptions::default()
+                };
+
                 println!("Upload config file '{}' to '{}' as {}", filename, repo, target);
                 let stats = client
-                    .upload_blob_from_file(&filename, &target, true, crypt_mode == CryptMode::Encrypt)
+                    .upload_blob_from_file(&filename, &target, upload_options)
                     .await?;
                 manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
             }
             BackupSpecificationType::LOGFILE => { // fixme: remove - not needed anymore ?
+                let upload_options = UploadOptions {
+                    compress: true,
+                    encrypt: crypt_mode == CryptMode::Encrypt,
+                    ..UploadOptions::default()
+                };
+
                 println!("Upload log file '{}' to '{}' as {}", filename, repo, target);
                 let stats = client
-                    .upload_blob_from_file(&filename, &target, true, crypt_mode == CryptMode::Encrypt)
+                    .upload_blob_from_file(&filename, &target, upload_options)
                     .await?;
                 manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
             }
             BackupSpecificationType::PXAR => {
                 // start catalog upload on first use
                 if catalog.is_none() {
-                    let (cat, res) = spawn_catalog_upload(client.clone(), crypt_mode == CryptMode::Encrypt)?;
-                    catalog = Some(cat);
-                    catalog_result_tx = Some(res);
+                    let catalog_upload_res = spawn_catalog_upload(client.clone(), crypt_mode == CryptMode::Encrypt)?;
+                    catalog = Some(catalog_upload_res.catalog_writer);
+                    catalog_result_rx = Some(catalog_upload_res.result);
                 }
                 let catalog = catalog.as_ref().unwrap();
 
                 println!("Upload directory '{}' to '{}' as {}", filename, repo, target);
                 catalog.lock().unwrap().start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?;
+
+                let pxar_options = proxmox_backup::pxar::PxarCreateOptions {
+                    device_set: devices.clone(),
+                    patterns: pattern_list.clone(),
+                    entries_max: entries_max as usize,
+                    skip_lost_and_found,
+                    verbose,
+                };
+
+                let upload_options = UploadOptions {
+                    previous_manifest: previous_manifest.clone(),
+                    compress: true,
+                    encrypt: crypt_mode == CryptMode::Encrypt,
+                    ..UploadOptions::default()
+                };
+
                 let stats = backup_directory(
                     &client,
-                    previous_manifest.clone(),
                     &filename,
                     &target,
                     chunk_size_opt,
-                    devices.clone(),
-                    verbose,
-                    skip_lost_and_found,
                     catalog.clone(),
-                    pattern_list.clone(),
-                    entries_max as usize,
-                    true,
-                    crypt_mode == CryptMode::Encrypt,
+                    pxar_options,
+                    upload_options,
                 ).await?;
                 manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
                 catalog.lock().unwrap().end_directory()?;
             }
             BackupSpecificationType::IMAGE => {
                 println!("Upload image '{}' to '{:?}' as {}", filename, repo, target);
+
+                let upload_options = UploadOptions {
+                    previous_manifest: previous_manifest.clone(),
+                    fixed_size: Some(size),
+                    compress: true,
+                    encrypt: crypt_mode == CryptMode::Encrypt,
+                };
+
                 let stats = backup_image(
                     &client,
-                    previous_manifest.clone(),
-                     &filename,
+                    &filename,
                     &target,
-                    size,
                     chunk_size_opt,
-                    true,
-                    crypt_mode == CryptMode::Encrypt,
-                    verbose,
+                    upload_options,
                 ).await?;
                 manifest.add_file(target, stats.size, stats.csum, crypt_mode)?;
             }
@@ -1057,28 +1191,21 @@ async fn create_backup(
 
         drop(catalog); // close upload stream
 
-        if let Some(catalog_result_rx) = catalog_result_tx {
+        if let Some(catalog_result_rx) = catalog_result_rx {
             let stats = catalog_result_rx.await??;
             manifest.add_file(CATALOG_NAME.to_owned(), stats.size, stats.csum, crypt_mode)?;
         }
     }
 
     if let Some(rsa_encrypted_key) = rsa_encrypted_key {
-        let target = "rsa-encrypted.key.blob";
+        let target = ENCRYPTED_KEY_BLOB_NAME;
         println!("Upload RSA encoded key to '{:?}' as {}", repo, target);
+        let options = UploadOptions { compress: false, encrypt: false, ..UploadOptions::default() };
         let stats = client
-            .upload_blob_from_data(rsa_encrypted_key, target, false, false)
+            .upload_blob_from_data(rsa_encrypted_key, target, options)
             .await?;
         manifest.add_file(target.to_string(), stats.size, stats.csum, crypt_mode)?;
 
-        // openssl rsautl -decrypt -inkey master-private.pem -in rsa-encrypted.key -out t
-        /*
-        let mut buffer2 = vec![0u8; rsa.size() as usize];
-        let pem_data = file_get_contents("master-private.pem")?;
-        let rsa = openssl::rsa::Rsa::private_key_from_pem(&pem_data)?;
-        let len = rsa.private_decrypt(&buffer, &mut buffer2, openssl::rsa::Padding::PKCS1)?;
-        println!("TEST {} {:?}", len, buffer2);
-         */
     }
     // create manifest (index.json)
     // manifests are never encrypted, but include a signature
@@ -1087,8 +1214,9 @@ async fn create_backup(
 
 
     if verbose { println!("Upload index.json to '{}'", repo) };
+    let options = UploadOptions { compress: true, encrypt: false, ..UploadOptions::default() };
     client
-        .upload_blob_from_data(manifest.into_bytes(), MANIFEST_BLOB_NAME, true, false)
+        .upload_blob_from_data(manifest.into_bytes(), MANIFEST_BLOB_NAME, options)
         .await?;
 
     client.finish().await?;
@@ -1257,7 +1385,7 @@ async fn restore(param: Value) -> Result<Value, Error> {
         None => None,
         Some(key) => {
             let (key, _, fingerprint) = decrypt_key(&key, &key::get_encryption_key_password)?;
-            println!("Encryption key fingerprint: '{}'", fingerprint);
+            eprintln!("Encryption key fingerprint: '{}'", fingerprint);
             Some(Arc::new(CryptConfig::new(key)?))
         }
     };
@@ -1272,10 +1400,15 @@ async fn restore(param: Value) -> Result<Value, Error> {
         true,
     ).await?;
 
+    let (archive_name, archive_type) = parse_archive_type(archive_name);
+
     let (manifest, backup_index_data) = client.download_manifest().await?;
-    manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
 
-    let (archive_name, archive_type) = parse_archive_type(archive_name);
+    if archive_name == ENCRYPTED_KEY_BLOB_NAME && crypt_config.is_none() {
+        eprintln!("Restoring encrypted key blob without original key - skipping manifest fingerprint check!")
+    } else {
+        manifest.check_fingerprint(crypt_config.as_ref().map(Arc::as_ref))?;
+    }
 
     if archive_name == MANIFEST_BLOB_NAME {
         if let Some(target) = target {
@@ -1321,20 +1454,24 @@ async fn restore(param: Value) -> Result<Value, Error> {
 
         let mut reader = BufferedDynamicReader::new(index, chunk_reader);
 
+        let options = proxmox_backup::pxar::PxarExtractOptions {
+            match_list: &[],
+            extract_match_default: true,
+            allow_existing_dirs,
+            on_error: None,
+        };
+
         if let Some(target) = target {
             proxmox_backup::pxar::extract_archive(
                 pxar::decoder::Decoder::from_std(reader)?,
                 Path::new(target),
-                &[],
-                true,
                 proxmox_backup::pxar::Flags::DEFAULT,
-                allow_existing_dirs,
                 |path| {
                     if verbose {
                         println!("{:?}", path);
                     }
                 },
-                None,
+                options,
             )
             .map_err(|err| format_err!("error extracting archive - {}", err))?;
         } else {
@@ -1451,18 +1588,18 @@ async fn prune_async(mut param: Value) -> Result<Value, Error> {
         .column(ColumnConfig::new("keep").renderer(render_prune_action).header("action"))
         ;
 
-    let info = &proxmox_backup::api2::admin::datastore::API_RETURN_SCHEMA_PRUNE;
+    let return_type = &proxmox_backup::api2::admin::datastore::API_METHOD_PRUNE.returns;
 
     let mut data = result["data"].take();
 
     if quiet {
         let list: Vec<Value> = data.as_array().unwrap().iter().filter(|item| {
             item["keep"].as_bool() == Some(false)
-        }).map(|v| v.clone()).collect();
+        }).cloned().collect();
         data = list.into();
     }
 
-    format_and_print_result_full(&mut data, info, &output_format, &options);
+    format_and_print_result_full(&mut data, return_type, &output_format, &options);
 
     Ok(Value::Null)
 }
@@ -1515,9 +1652,9 @@ async fn status(param: Value) -> Result<Value, Error> {
         .column(ColumnConfig::new("used").renderer(render_total_percentage))
         .column(ColumnConfig::new("avail").renderer(render_total_percentage));
 
-    let schema = &API_RETURN_SCHEMA_STATUS;
+    let return_type = &API_METHOD_STATUS.returns;
 
-    format_and_print_result_full(&mut data, schema, &output_format, &options);
+    format_and_print_result_full(&mut data, return_type, &output_format, &options);
 
     Ok(Value::Null)
 }
@@ -1528,13 +1665,9 @@ async fn try_get(repo: &BackupRepository, url: &str) -> Value {
     let fingerprint = std::env::var(ENV_VAR_PBS_FINGERPRINT).ok();
     let password = std::env::var(ENV_VAR_PBS_PASSWORD).ok();
 
-    let options = HttpClientOptions::new()
-        .prefix(Some("proxmox-backup".to_string()))
-        .password(password)
-        .interactive(false)
-        .fingerprint(fingerprint)
-        .fingerprint_cache(true)
-        .ticket_cache(true);
+    // ticket cache, but no questions asked
+    let options = HttpClientOptions::new_interactive(password, fingerprint)
+        .interactive(false);
 
     let client = match HttpClient::new(repo.host(), repo.port(), repo.auth_id(), options) {
         Ok(v) => v,