//!
//! This is a collection of small and useful tools.
use std::any::Any;
+use std::borrow::Borrow;
use std::collections::HashMap;
use std::hash::BuildHasher;
use std::fs::File;
-use std::io::{self, BufRead, ErrorKind, Read, Seek, SeekFrom};
+use std::io::{self, BufRead, Read, Seek, SeekFrom};
use std::os::unix::io::RawFd;
use std::path::Path;
use anyhow::{bail, format_err, Error};
use serde_json::Value;
use openssl::hash::{hash, DigestBytes, MessageDigest};
-use percent_encoding::AsciiSet;
-
-use proxmox::tools::vec;
+use percent_encoding::{utf8_percent_encode, AsciiSet};
pub use proxmox::tools::fd::Fd;
pub mod acl;
+pub mod apt;
pub mod async_io;
pub mod borrow;
pub mod cert;
pub mod daemon;
pub mod disks;
-pub mod fs;
pub mod format;
+pub mod fs;
+pub mod fuse_loop;
+pub mod http;
+pub mod json;
+pub mod logrotate;
+pub mod loopdev;
pub mod lru_cache;
+pub mod nom;
pub mod runtime;
-pub mod ticket;
+pub mod serde_filter;
+pub mod socket;
pub mod statistics;
+pub mod subscription;
pub mod systemd;
-pub mod nom;
-pub mod logrotate;
-pub mod loopdev;
-pub mod fuse_loop;
-pub mod socket;
+pub mod ticket;
+pub mod xattr;
pub mod zip;
+pub mod sgutils2;
+pub mod paperkey;
-mod parallel_handler;
-pub use parallel_handler::*;
+pub mod parallel_handler;
+pub use parallel_handler::ParallelHandler;
mod wrapped_reader_stream;
-pub use wrapped_reader_stream::*;
+pub use wrapped_reader_stream::{AsyncReaderStream, StdChannelStream, WrappedReaderStream};
mod async_channel_writer;
-pub use async_channel_writer::*;
-
+pub use async_channel_writer::AsyncChannelWriter;
mod std_channel_writer;
-pub use std_channel_writer::*;
+pub use std_channel_writer::StdChannelWriter;
-pub mod xattr;
+mod tokio_writer_adapter;
+pub use tokio_writer_adapter::TokioWriterAdapter;
mod process_locker;
-pub use process_locker::*;
+pub use process_locker::{ProcessLocker, ProcessLockExclusiveGuard, ProcessLockSharedGuard};
mod file_logger;
-pub use file_logger::*;
+pub use file_logger::{FileLogger, FileLogOptions};
mod broadcast_future;
-pub use broadcast_future::*;
+pub use broadcast_future::{BroadcastData, BroadcastFuture};
/// The `BufferedRead` trait provides a single function
/// `buffered_read`. It returns a reference to an internal buffer. The
fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>;
}
-/// Split a file into equal sized chunks. The last chunk may be
-/// smaller. Note: We cannot implement an `Iterator`, because iterators
-/// cannot return a borrowed buffer ref (we want zero-copy)
-pub fn file_chunker<C, R>(mut file: R, chunk_size: usize, mut chunk_cb: C) -> Result<(), Error>
-where
- C: FnMut(usize, &[u8]) -> Result<bool, Error>,
- R: Read,
-{
- const READ_BUFFER_SIZE: usize = 4 * 1024 * 1024; // 4M
-
- if chunk_size > READ_BUFFER_SIZE {
- bail!("chunk size too large!");
- }
-
- let mut buf = vec::undefined(READ_BUFFER_SIZE);
-
- let mut pos = 0;
- let mut file_pos = 0;
- loop {
- let mut eof = false;
- let mut tmp = &mut buf[..];
- // try to read large portions, at least chunk_size
- while pos < chunk_size {
- match file.read(tmp) {
- Ok(0) => {
- eof = true;
- break;
- }
- Ok(n) => {
- pos += n;
- if pos > chunk_size {
- break;
- }
- tmp = &mut tmp[n..];
- }
- Err(ref e) if e.kind() == ErrorKind::Interrupted => { /* try again */ }
- Err(e) => bail!("read chunk failed - {}", e.to_string()),
- }
- }
- let mut start = 0;
- while start + chunk_size <= pos {
- if !(chunk_cb)(file_pos, &buf[start..start + chunk_size])? {
- break;
- }
- file_pos += chunk_size;
- start += chunk_size;
- }
- if eof {
- if start < pos {
- (chunk_cb)(file_pos, &buf[start..pos])?;
- //file_pos += pos - start;
- }
- break;
- } else {
- let rest = pos - start;
- if rest > 0 {
- let ptr = buf.as_mut_ptr();
- unsafe {
- std::ptr::copy_nonoverlapping(ptr.add(start), ptr, rest);
- }
- pos = rest;
- } else {
- pos = 0;
- }
- }
- }
-
- Ok(())
-}
-
pub fn json_object_to_query(data: Value) -> Result<String, Error> {
let mut query = url::form_urlencoded::Serializer::new(String::new());
}
}
-pub fn required_integer_param<'a>(param: &'a Value, name: &str) -> Result<i64, Error> {
+pub fn required_integer_param(param: &Value, name: &str) -> Result<i64, Error> {
match param[name].as_i64() {
Some(s) => Ok(s),
None => bail!("missing parameter '{}'", name),
}
}
-pub fn required_integer_property<'a>(param: &'a Value, name: &str) -> Result<i64, Error> {
+pub fn required_integer_property(param: &Value, name: &str) -> Result<i64, Error> {
match param[name].as_i64() {
Some(s) => Ok(s),
None => bail!("missing property '{}'", name),
}
}
-pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<Vec<Value>, Error> {
+pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
match param[name].as_array() {
- Some(s) => Ok(s.to_vec()),
+ Some(s) => Ok(&s),
None => bail!("missing parameter '{}'", name),
}
}
-pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<Vec<Value>, Error> {
+pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<&'a [Value], Error> {
match param[name].as_array() {
- Some(s) => Ok(s.to_vec()),
+ Some(s) => Ok(&s),
None => bail!("missing property '{}'", name),
}
}
-pub fn complete_file_name<S: BuildHasher>(arg: &str, _param: &HashMap<String, String, S>) -> Vec<String> {
+pub fn complete_file_name<S>(arg: &str, _param: &HashMap<String, String, S>) -> Vec<String>
+where
+ S: BuildHasher,
+{
let mut result = vec![];
use nix::fcntl::AtFlags;
pub fn get_hardware_address() -> Result<String, Error> {
static FILENAME: &str = "/etc/ssh/ssh_host_rsa_key.pub";
- let contents = proxmox::tools::fs::file_get_contents(FILENAME)?;
- let digest = md5sum(&contents)?;
+ let contents = proxmox::tools::fs::file_get_contents(FILENAME)
+ .map_err(|e| format_err!("Error getting host key - {}", e))?;
+ let digest = md5sum(&contents)
+ .map_err(|e| format_err!("Error digesting host key - {}", e))?;
- Ok(proxmox::tools::bin_to_hex(&digest))
+ Ok(proxmox::tools::bin_to_hex(&digest).to_uppercase())
}
pub fn assert_if_modified(digest1: &str, digest2: &str) -> Result<(), Error> {
None
}
-pub fn join(data: &Vec<String>, sep: char) -> String {
+/// percent encode a url component
+pub fn percent_encode_component(comp: &str) -> String {
+ utf8_percent_encode(comp, percent_encoding::NON_ALPHANUMERIC).to_string()
+}
+
+pub fn join<S: Borrow<str>>(data: &[S], sep: char) -> String {
let mut list = String::new();
for item in data {
if !list.is_empty() {
list.push(sep);
}
- list.push_str(item);
+ list.push_str(item.borrow());
}
list
/// This function fails with a reasonable error message if checksums do not match.
pub fn detect_modified_configuration_file(digest1: &[u8;32], digest2: &[u8;32]) -> Result<(), Error> {
if digest1 != digest2 {
- bail!("detected modified configuration - file changed by other user? Try again.");
+ bail!("detected modified configuration - file changed by other user? Try again.");
}
Ok(())
}
let output = command.output()
.map_err(|err| format_err!("failed to execute {:?} - {}", command, err))?;
- let output = crate::tools::command_output_as_string(output, exit_code_check)
+ let output = command_output_as_string(output, exit_code_check)
.map_err(|err| format_err!("command {:?} failed - {}", command, err))?;
Ok(output)
loop {
let count = match file.read(&mut buffer) {
+ Ok(0) => break,
Ok(count) => count,
Err(ref err) if err.kind() == std::io::ErrorKind::Interrupted => {
continue;
}
Err(err) => return Err(err.into()),
};
- if count == 0 {
- break;
- }
size += count as u64;
hasher.update(&buffer[..count]);
}
Ok((csum, size))
}
+
+/// Create the base run-directory.
+///
+/// This exists to fixate the permissions for the run *base* directory while allowing intermediate
+/// directories after it to have different permissions.
+pub fn create_run_dir() -> Result<(), Error> {
+ let _: bool = proxmox::tools::fs::create_path(PROXMOX_BACKUP_RUN_DIR_M!(), None, None)?;
+ Ok(())
+}