Ok(query.finish())
}
+
+pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
+ match param[name].as_str() {
+ Some(s) => Ok(s),
+ None => bail!("missing parameter '{}'", name),
+ }
+}
+
+pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
+ match param[name].as_str() {
+ Some(s) => Ok(s),
+ None => bail!("missing property '{}'", name),
+ }
+}
+
+pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
+ match param[name].as_i64() {
+ Some(s) => Ok(s),
+ None => bail!("missing parameter '{}'", name),
+ }
+}
+
+pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
+ match param[name].as_i64() {
+ Some(s) => Ok(s),
+ None => bail!("missing property '{}'", name),
+ }
+}
+
+pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
+ match param[name].as_array() {
+ Some(s) => Ok(&s),
+ None => bail!("missing parameter '{}'", name),
+ }
+}
+
+pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
+ match param[name].as_array() {
+ Some(s) => Ok(&s),
+ None => bail!("missing property '{}'", name),
+ }
+}
use pxar::EntryKind;
use pbs_client::pxar::create_zip;
+use pbs_tools::json::{required_integer_param, required_string_param};
use crate::api2::types::*;
use crate::api2::node::rrd::create_value_from_rrd;
use crate::config::cached_user_info::CachedUserInfo;
use crate::server::{jobstate::Job, WorkerTask};
-use crate::tools::{
- self,
- AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream,
-};
+use crate::tools::{AsyncChannelWriter, AsyncReaderStream, WrappedReaderStream};
use crate::config::acl::{
PRIV_DATASTORE_AUDIT,
) -> ApiResponseFuture {
async move {
- let store = tools::required_string_param(¶m, "store")?;
+ let store = required_string_param(¶m, "store")?;
let datastore = DataStore::lookup_datastore(store)?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
- let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
+ let file_name = required_string_param(¶m, "file-name")?.to_owned();
- let backup_type = tools::required_string_param(¶m, "backup-type")?;
- let backup_id = tools::required_string_param(¶m, "backup-id")?;
- let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+ let backup_type = required_string_param(¶m, "backup-type")?;
+ let backup_id = required_string_param(¶m, "backup-id")?;
+ let backup_time = required_integer_param(¶m, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
) -> ApiResponseFuture {
async move {
- let store = tools::required_string_param(¶m, "store")?;
+ let store = required_string_param(¶m, "store")?;
let datastore = DataStore::lookup_datastore(store)?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
- let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
+ let file_name = required_string_param(¶m, "file-name")?.to_owned();
- let backup_type = tools::required_string_param(¶m, "backup-type")?;
- let backup_id = tools::required_string_param(¶m, "backup-id")?;
- let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+ let backup_type = required_string_param(¶m, "backup-type")?;
+ let backup_id = required_string_param(¶m, "backup-id")?;
+ let backup_time = required_integer_param(¶m, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
) -> ApiResponseFuture {
async move {
- let store = tools::required_string_param(¶m, "store")?;
+ let store = required_string_param(¶m, "store")?;
let datastore = DataStore::lookup_datastore(store)?;
let file_name = CLIENT_LOG_BLOB_NAME;
- let backup_type = tools::required_string_param(¶m, "backup-type")?;
- let backup_id = tools::required_string_param(¶m, "backup-id")?;
- let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+ let backup_type = required_string_param(¶m, "backup-type")?;
+ let backup_id = required_string_param(¶m, "backup-id")?;
+ let backup_time = required_integer_param(¶m, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
) -> ApiResponseFuture {
async move {
- let store = tools::required_string_param(¶m, "store")?;
+ let store = required_string_param(¶m, "store")?;
let datastore = DataStore::lookup_datastore(&store)?;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
- let filepath = tools::required_string_param(¶m, "filepath")?.to_owned();
+ let filepath = required_string_param(¶m, "filepath")?.to_owned();
- let backup_type = tools::required_string_param(¶m, "backup-type")?;
- let backup_id = tools::required_string_param(¶m, "backup-id")?;
- let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+ let backup_type = required_string_param(¶m, "backup-type")?;
+ let backup_id = required_string_param(¶m, "backup-id")?;
+ let backup_time = required_integer_param(¶m, "backup-time")?;
let backup_dir = BackupDir::new(backup_type, backup_id, backup_time)?;
use proxmox::api::schema::*;
use pbs_tools::fs::lock_dir_noblock_shared;
+use pbs_tools::json::{required_array_param, required_integer_param, required_string_param};
use pbs_datastore::PROXMOX_BACKUP_PROTOCOL_ID_V1;
-use crate::tools;
use crate::server::{WorkerTask, H2Service};
use crate::backup::*;
use crate::api2::types::*;
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
- let store = tools::required_string_param(¶m, "store")?.to_owned();
+ let store = required_string_param(¶m, "store")?.to_owned();
let user_info = CachedUserInfo::new()?;
user_info.check_privs(&auth_id, &["datastore", &store], PRIV_DATASTORE_BACKUP, false)?;
let datastore = DataStore::lookup_datastore(&store)?;
- let backup_type = tools::required_string_param(¶m, "backup-type")?;
- let backup_id = tools::required_string_param(¶m, "backup-id")?;
- let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+ let backup_type = required_string_param(¶m, "backup-type")?;
+ let backup_id = required_string_param(¶m, "backup-id")?;
+ let backup_time = required_integer_param(¶m, "backup-time")?;
let protocols = parts
.headers
let env: &BackupEnvironment = rpcenv.as_ref();
- let name = tools::required_string_param(¶m, "archive-name")?.to_owned();
+ let name = required_string_param(¶m, "archive-name")?.to_owned();
let archive_name = name.clone();
if !archive_name.ends_with(".didx") {
let env: &BackupEnvironment = rpcenv.as_ref();
- let name = tools::required_string_param(¶m, "archive-name")?.to_owned();
- let size = tools::required_integer_param(¶m, "size")? as usize;
+ let name = required_string_param(¶m, "archive-name")?.to_owned();
+ let size = required_integer_param(¶m, "size")? as usize;
let reuse_csum = param["reuse-csum"].as_str();
let archive_name = name.clone();
rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> {
- let wid = tools::required_integer_param(¶m, "wid")? as usize;
- let digest_list = tools::required_array_param(¶m, "digest-list")?;
- let offset_list = tools::required_array_param(¶m, "offset-list")?;
+ let wid = required_integer_param(¶m, "wid")? as usize;
+ let digest_list = required_array_param(¶m, "digest-list")?;
+ let offset_list = required_array_param(¶m, "offset-list")?;
if offset_list.len() != digest_list.len() {
bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len());
rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> {
- let wid = tools::required_integer_param(¶m, "wid")? as usize;
- let digest_list = tools::required_array_param(¶m, "digest-list")?;
- let offset_list = tools::required_array_param(¶m, "offset-list")?;
+ let wid = required_integer_param(¶m, "wid")? as usize;
+ let digest_list = required_array_param(¶m, "digest-list")?;
+ let offset_list = required_array_param(¶m, "offset-list")?;
if offset_list.len() != digest_list.len() {
bail!("offset list has wrong length ({} != {})", offset_list.len(), digest_list.len());
rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> {
- let wid = tools::required_integer_param(¶m, "wid")? as usize;
- let chunk_count = tools::required_integer_param(¶m, "chunk-count")? as u64;
- let size = tools::required_integer_param(¶m, "size")? as u64;
- let csum_str = tools::required_string_param(¶m, "csum")?;
+ let wid = required_integer_param(¶m, "wid")? as usize;
+ let chunk_count = required_integer_param(¶m, "chunk-count")? as u64;
+ let size = required_integer_param(¶m, "size")? as u64;
+ let csum_str = required_string_param(¶m, "csum")?;
let csum = proxmox::tools::hex_to_digest(csum_str)?;
let env: &BackupEnvironment = rpcenv.as_ref();
rpcenv: &mut dyn RpcEnvironment,
) -> Result<Value, Error> {
- let wid = tools::required_integer_param(¶m, "wid")? as usize;
- let chunk_count = tools::required_integer_param(¶m, "chunk-count")? as u64;
- let size = tools::required_integer_param(¶m, "size")? as u64;
- let csum_str = tools::required_string_param(¶m, "csum")?;
+ let wid = required_integer_param(¶m, "wid")? as usize;
+ let chunk_count = required_integer_param(¶m, "chunk-count")? as u64;
+ let size = required_integer_param(¶m, "size")? as u64;
+ let csum_str = required_string_param(¶m, "csum")?;
let csum = proxmox::tools::hex_to_digest(csum_str)?;
let env: &BackupEnvironment = rpcenv.as_ref();
async move {
let env: &BackupEnvironment = rpcenv.as_ref();
- let archive_name = tools::required_string_param(¶m, "archive-name")?.to_owned();
+ let archive_name = required_string_param(¶m, "archive-name")?.to_owned();
let last_backup = match &env.last_backup {
Some(info) => info,
use proxmox::api::{ApiResponseFuture, ApiHandler, ApiMethod, RpcEnvironment};
use proxmox::api::schema::*;
+use pbs_tools::json::{required_integer_param, required_string_param};
+
use crate::api2::types::*;
use crate::backup::*;
-use crate::tools;
use super::environment::*;
) -> ApiResponseFuture {
async move {
- let wid = tools::required_integer_param(¶m, "wid")? as usize;
- let size = tools::required_integer_param(¶m, "size")? as u32;
- let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
+ let wid = required_integer_param(¶m, "wid")? as usize;
+ let size = required_integer_param(¶m, "size")? as u32;
+ let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
- let digest_str = tools::required_string_param(¶m, "digest")?;
+ let digest_str = required_string_param(¶m, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?;
let env: &BackupEnvironment = rpcenv.as_ref();
) -> ApiResponseFuture {
async move {
- let wid = tools::required_integer_param(¶m, "wid")? as usize;
- let size = tools::required_integer_param(¶m, "size")? as u32;
- let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as u32;
+ let wid = required_integer_param(¶m, "wid")? as usize;
+ let size = required_integer_param(¶m, "size")? as u32;
+ let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
- let digest_str = tools::required_string_param(¶m, "digest")?;
+ let digest_str = required_string_param(¶m, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?;
let env: &BackupEnvironment = rpcenv.as_ref();
) -> ApiResponseFuture {
async move {
- let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
- let encoded_size = tools::required_integer_param(¶m, "encoded-size")? as usize;
+ let file_name = required_string_param(¶m, "file-name")?.to_owned();
+ let encoded_size = required_integer_param(¶m, "encoded-size")? as usize;
let env: &BackupEnvironment = rpcenv.as_ref();
param: Value,
) -> Result<Value, Error> {
- let name = crate::tools::required_string_param(¶m, "name")?.to_owned();
+ let name = pbs_tools::json::required_string_param(¶m, "name")?.to_owned();
let version = param["version"].as_str();
let pkg_info = apt::list_installed_apt_packages(|data| {
}
let userid = auth_id.user();
- let ticket = tools::required_string_param(¶m, "vncticket")?;
- let port: u16 = tools::required_integer_param(¶m, "port")? as u16;
+ let ticket = pbs_tools::json::required_string_param(¶m, "vncticket")?;
+ let port: u16 = pbs_tools::json::required_integer_param(¶m, "port")? as u16;
// will be checked again by termproxy
Ticket::<Empty>::parse(ticket)?
param: Value,
) -> Result<(), Error> {
- let interface_type = crate::tools::required_string_param(¶m, "type")?;
+ let interface_type = pbs_tools::json::required_string_param(¶m, "type")?;
let interface_type: NetworkInterfaceType = serde_json::from_value(interface_type.into())?;
let _lock = open_file_locked(network::NETWORK_LOCKFILE, std::time::Duration::new(10, 0), true)?;
use proxmox::api::router::SubdirMap;
use proxmox::{identity, list_subdirs_api_method, sortable};
-use crate::tools;
-
use crate::api2::types::*;
use crate::api2::pull::check_pull_privs;
fn extract_upid(param: &Value) -> Result<UPID, Error> {
- let upid_str = tools::required_string_param(¶m, "upid")?;
+ let upid_str = pbs_tools::json::required_string_param(¶m, "upid")?;
upid_str.parse::<UPID>()
}
};
use pbs_tools::fs::lock_dir_noblock_shared;
+use pbs_tools::json::{required_integer_param, required_string_param};
use pbs_datastore::PROXMOX_BACKUP_READER_PROTOCOL_ID_V1;
use crate::{
WorkerTask,
H2Service,
},
- tools,
config::{
acl::{
PRIV_DATASTORE_READ,
let debug = param["debug"].as_bool().unwrap_or(false);
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
- let store = tools::required_string_param(¶m, "store")?.to_owned();
+ let store = required_string_param(¶m, "store")?.to_owned();
let user_info = CachedUserInfo::new()?;
let privs = user_info.lookup_privs(&auth_id, &["datastore", &store]);
let datastore = DataStore::lookup_datastore(&store)?;
- let backup_type = tools::required_string_param(¶m, "backup-type")?;
- let backup_id = tools::required_string_param(¶m, "backup-id")?;
- let backup_time = tools::required_integer_param(¶m, "backup-time")?;
+ let backup_type = required_string_param(¶m, "backup-type")?;
+ let backup_id = required_string_param(¶m, "backup-id")?;
+ let backup_time = required_integer_param(¶m, "backup-time")?;
let protocols = parts
.headers
async move {
let env: &ReaderEnvironment = rpcenv.as_ref();
- let file_name = tools::required_string_param(¶m, "file-name")?.to_owned();
+ let file_name = required_string_param(¶m, "file-name")?.to_owned();
let mut path = env.datastore.base_path();
path.push(env.backup_dir.relative_path());
async move {
let env: &ReaderEnvironment = rpcenv.as_ref();
- let digest_str = tools::required_string_param(¶m, "digest")?;
+ let digest_str = required_string_param(¶m, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?;
if !env.check_chunk_access(digest) {
let env: &ReaderEnvironment = rpcenv.as_ref();
let env2 = env.clone();
- let digest_str = tools::required_string_param(¶m, "digest")?;
+ let digest_str = required_string_param(¶m, "digest")?;
let digest = proxmox::tools::hex_to_digest(digest_str)?;
let (path, _) = env.datastore.chunk_path(&digest);
use pbs_datastore::prune::PruneOptions;
use pbs_tools::sync::StdChannelWriter;
use pbs_tools::tokio::TokioWriterAdapter;
+use pbs_tools::json;
use proxmox_backup::backup::{
BufferedDynamicReader,
encrypt: bool,
) -> Result<CatalogUploadResult, Error> {
let (catalog_tx, catalog_rx) = std::sync::mpsc::sync_channel(10); // allow to buffer 10 writes
- let catalog_stream = crate::tools::StdChannelStream(catalog_rx);
+ let catalog_stream = tools::StdChannelStream(catalog_rx);
let catalog_chunk_size = 512*1024;
let catalog_chunk_stream = ChunkStream::new(catalog_stream, Some(catalog_chunk_size));
let repo = extract_repository_from_value(¶m)?;
- let backupspec_list = tools::required_array_param(¶m, "backupspec")?;
+ let backupspec_list = json::required_array_param(¶m, "backupspec")?;
let all_file_systems = param["all-file-systems"].as_bool().unwrap_or(false);
let allow_existing_dirs = param["allow-existing-dirs"].as_bool().unwrap_or(false);
- let archive_name = tools::required_string_param(¶m, "archive-name")?;
+ let archive_name = json::required_string_param(¶m, "archive-name")?;
let client = connect(&repo)?;
record_repository(&repo);
- let path = tools::required_string_param(¶m, "snapshot")?;
+ let path = json::required_string_param(¶m, "snapshot")?;
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
let group: BackupGroup = path.parse()?;
(snapshot.group().backup_type().to_owned(), snapshot.group().backup_id().to_owned(), snapshot.backup_time())
};
- let target = tools::required_string_param(¶m, "target")?;
+ let target = json::required_string_param(¶m, "target")?;
let target = if target == "-" { None } else { Some(target) };
let crypto = crypto_parameters(¶m)?;
use pbs_client::{connect_to_localhost, display_task_log, view_task_result};
use pbs_tools::percent_encoding::percent_encode_component;
+use pbs_tools::json::required_string_param;
-use proxmox_backup::tools;
use proxmox_backup::config;
use proxmox_backup::api2::{self, types::* };
use proxmox_backup::server::wait_for_local_worker;
let output_format = get_output_format(¶m);
- let store = tools::required_string_param(¶m, "store")?;
+ let store = required_string_param(¶m, "store")?;
let mut client = connect_to_localhost()?;
let output_format = get_output_format(¶m);
- let store = tools::required_string_param(¶m, "store")?;
+ let store = required_string_param(¶m, "store")?;
let client = connect_to_localhost()?;
/// Display the task log.
async fn task_log(param: Value) -> Result<Value, Error> {
- let upid = tools::required_string_param(¶m, "upid")?;
+ let upid = required_string_param(¶m, "upid")?;
let mut client = connect_to_localhost()?;
/// Try to stop a specific task.
async fn task_stop(param: Value) -> Result<Value, Error> {
- let upid_str = tools::required_string_param(¶m, "upid")?;
+ let upid_str = required_string_param(¶m, "upid")?;
let mut client = connect_to_localhost()?;
use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_client::{BackupReader, RemoteChunkReader};
-
-use proxmox_backup::tools;
+use pbs_tools::json::required_string_param;
use crate::{
REPO_URL_SCHEMA,
let repo = extract_repository_from_value(¶m)?;
- let path = tools::required_string_param(¶m, "snapshot")?;
+ let path = required_string_param(¶m, "snapshot")?;
let snapshot: BackupDir = path.parse()?;
let crypto = crypto_parameters(¶m)?;
async fn catalog_shell(param: Value) -> Result<(), Error> {
let repo = extract_repository_from_value(¶m)?;
let client = connect(&repo)?;
- let path = tools::required_string_param(¶m, "snapshot")?;
- let archive_name = tools::required_string_param(¶m, "archive-name")?;
+ let path = required_string_param(¶m, "snapshot")?;
+ let archive_name = required_string_param(¶m, "archive-name")?;
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
let group: BackupGroup = path.parse()?;
use pbs_client::tools::key_source::get_encryption_key_password;
use pbs_client::{BackupReader, RemoteChunkReader};
+use pbs_tools::json::required_string_param;
use proxmox_backup::tools;
use proxmox_backup::backup::{
async fn mount_do(param: Value, pipe: Option<Fd>) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let archive_name = tools::required_string_param(¶m, "archive-name")?;
+ let archive_name = required_string_param(¶m, "archive-name")?;
let client = connect(&repo)?;
let target = param["target"].as_str();
record_repository(&repo);
- let path = tools::required_string_param(¶m, "snapshot")?;
+ let path = required_string_param(¶m, "snapshot")?;
let (backup_type, backup_id, backup_time) = if path.matches('/').count() == 1 {
let group: BackupGroup = path.parse()?;
api_datastore_latest_snapshot(&client, repo.store(), group).await?
};
use pbs_client::tools::key_source::get_encryption_key_password;
+use pbs_tools::json::required_string_param;
use proxmox_backup::{
- tools,
api2::types::*,
backup::{
CryptMode,
let repo = extract_repository_from_value(¶m)?;
- let path = tools::required_string_param(¶m, "snapshot")?;
+ let path = required_string_param(¶m, "snapshot")?;
let snapshot: BackupDir = path.parse()?;
let output_format = get_output_format(¶m);
let repo = extract_repository_from_value(¶m)?;
- let path = tools::required_string_param(¶m, "snapshot")?;
+ let path = required_string_param(¶m, "snapshot")?;
let snapshot: BackupDir = path.parse()?;
let mut client = connect(&repo)?;
/// Upload backup log file.
async fn upload_log(param: Value) -> Result<Value, Error> {
- let logfile = tools::required_string_param(¶m, "logfile")?;
+ let logfile = required_string_param(¶m, "logfile")?;
let repo = extract_repository_from_value(¶m)?;
- let snapshot = tools::required_string_param(¶m, "snapshot")?;
+ let snapshot = required_string_param(¶m, "snapshot")?;
let snapshot: BackupDir = snapshot.parse()?;
let mut client = connect(&repo)?;
/// Show notes
async fn show_notes(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let path = tools::required_string_param(¶m, "snapshot")?;
+ let path = required_string_param(¶m, "snapshot")?;
let snapshot: BackupDir = path.parse()?;
let client = connect(&repo)?;
/// Update Notes
async fn update_notes(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let path = tools::required_string_param(¶m, "snapshot")?;
- let notes = tools::required_string_param(¶m, "notes")?;
+ let path = required_string_param(¶m, "snapshot")?;
+ let notes = required_string_param(¶m, "notes")?;
let snapshot: BackupDir = path.parse()?;
let mut client = connect(&repo)?;
use proxmox::api::{api, cli::*};
-use pbs_tools::percent_encoding::percent_encode_component;
use pbs_client::display_task_log;
-
-use proxmox_backup::tools;
+use pbs_tools::percent_encoding::percent_encode_component;
+use pbs_tools::json::required_string_param;
use proxmox_backup::api2::types::UPID_SCHEMA;
async fn task_log(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let upid = tools::required_string_param(¶m, "upid")?;
+ let upid = required_string_param(¶m, "upid")?;
let mut client = connect(&repo)?;
async fn task_stop(param: Value) -> Result<Value, Error> {
let repo = extract_repository_from_value(¶m)?;
- let upid_str = tools::required_string_param(¶m, "upid")?;
+ let upid_str = required_string_param(¶m, "upid")?;
let mut client = connect(&repo)?;
use pbs_client::pxar::{create_archive, Flags, PxarCreateOptions, ENCODER_MAX_ENTRIES};
use pbs_tools::fs::read_subdir;
+use pbs_tools::json::required_string_param;
use pbs_tools::zip::zip_directory;
use proxmox_backup::api2::types::*;
use proxmox_backup::backup::DirEntryAttribute;
-use proxmox_backup::tools;
use pxar::encoder::aio::TokioWriter;
Err(_) => bail!("maximum concurrent download limit reached, please wait for another restore to finish before attempting a new one"),
};
- let path = tools::required_string_param(¶m, "path")?;
+ let path = required_string_param(¶m, "path")?;
let mut path = base64::decode(path)?;
if let Some(b'/') = path.last() {
path.pop();
use std::os::unix::io::RawFd;
use anyhow::{bail, format_err, Error};
-use serde_json::Value;
use openssl::hash::{hash, DigestBytes, MessageDigest};
pub use proxmox::tools::fd::Fd;
fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>;
}
-pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
- match param[name].as_str() {
- Some(s) => Ok(s),
- None => bail!("missing parameter '{}'", name),
- }
-}
-
-pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
- match param[name].as_str() {
- Some(s) => Ok(s),
- None => bail!("missing property '{}'", name),
- }
-}
-
-pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
- match param[name].as_i64() {
- Some(s) => Ok(s),
- None => bail!("missing parameter '{}'", name),
- }
-}
-
-pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
- match param[name].as_i64() {
- Some(s) => Ok(s),
- None => bail!("missing property '{}'", name),
- }
-}
-
-pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
- match param[name].as_array() {
- Some(s) => Ok(&s),
- None => bail!("missing parameter '{}'", name),
- }
-}
-
-pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
- match param[name].as_array() {
- Some(s) => Ok(&s),
- None => bail!("missing property '{}'", name),
- }
-}
-
/// Shortcut for md5 sums.
pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> {
hash(MessageDigest::md5(), data).map_err(Error::from)