]> git.proxmox.com Git - proxmox-backup.git/blob - src/tools.rs
server/worker_task: add TaskListInfoIterator
[proxmox-backup.git] / src / tools.rs
1 //! Tools and utilities
2 //!
3 //! This is a collection of small and useful tools.
4 use std::any::Any;
5 use std::collections::HashMap;
6 use std::hash::BuildHasher;
7 use std::fs::File;
8 use std::io::{self, BufRead, ErrorKind, Read, Seek, SeekFrom};
9 use std::os::unix::io::RawFd;
10 use std::path::Path;
11
12 use anyhow::{bail, format_err, Error};
13 use serde_json::Value;
14 use openssl::hash::{hash, DigestBytes, MessageDigest};
15 use percent_encoding::AsciiSet;
16
17 use proxmox::tools::vec;
18
19 pub use proxmox::tools::fd::Fd;
20
21 pub mod acl;
22 pub mod async_io;
23 pub mod borrow;
24 pub mod cert;
25 pub mod daemon;
26 pub mod disks;
27 pub mod fs;
28 pub mod format;
29 pub mod lru_cache;
30 pub mod runtime;
31 pub mod ticket;
32 pub mod statistics;
33 pub mod systemd;
34 pub mod nom;
35 pub mod logrotate;
36
37 mod parallel_handler;
38 pub use parallel_handler::*;
39
40 mod wrapped_reader_stream;
41 pub use wrapped_reader_stream::*;
42
43 mod std_channel_writer;
44 pub use std_channel_writer::*;
45
46 pub mod xattr;
47
48 mod process_locker;
49 pub use process_locker::*;
50
51 mod file_logger;
52 pub use file_logger::*;
53
54 mod broadcast_future;
55 pub use broadcast_future::*;
56
57 /// The `BufferedRead` trait provides a single function
58 /// `buffered_read`. It returns a reference to an internal buffer. The
59 /// purpose of this traid is to avoid unnecessary data copies.
60 pub trait BufferedRead {
61 /// This functions tries to fill the internal buffers, then
62 /// returns a reference to the available data. It returns an empty
63 /// buffer if `offset` points to the end of the file.
64 fn buffered_read(&mut self, offset: u64) -> Result<&[u8], Error>;
65 }
66
67 /// Split a file into equal sized chunks. The last chunk may be
68 /// smaller. Note: We cannot implement an `Iterator`, because iterators
69 /// cannot return a borrowed buffer ref (we want zero-copy)
70 pub fn file_chunker<C, R>(mut file: R, chunk_size: usize, mut chunk_cb: C) -> Result<(), Error>
71 where
72 C: FnMut(usize, &[u8]) -> Result<bool, Error>,
73 R: Read,
74 {
75 const READ_BUFFER_SIZE: usize = 4 * 1024 * 1024; // 4M
76
77 if chunk_size > READ_BUFFER_SIZE {
78 bail!("chunk size too large!");
79 }
80
81 let mut buf = vec::undefined(READ_BUFFER_SIZE);
82
83 let mut pos = 0;
84 let mut file_pos = 0;
85 loop {
86 let mut eof = false;
87 let mut tmp = &mut buf[..];
88 // try to read large portions, at least chunk_size
89 while pos < chunk_size {
90 match file.read(tmp) {
91 Ok(0) => {
92 eof = true;
93 break;
94 }
95 Ok(n) => {
96 pos += n;
97 if pos > chunk_size {
98 break;
99 }
100 tmp = &mut tmp[n..];
101 }
102 Err(ref e) if e.kind() == ErrorKind::Interrupted => { /* try again */ }
103 Err(e) => bail!("read chunk failed - {}", e.to_string()),
104 }
105 }
106 let mut start = 0;
107 while start + chunk_size <= pos {
108 if !(chunk_cb)(file_pos, &buf[start..start + chunk_size])? {
109 break;
110 }
111 file_pos += chunk_size;
112 start += chunk_size;
113 }
114 if eof {
115 if start < pos {
116 (chunk_cb)(file_pos, &buf[start..pos])?;
117 //file_pos += pos - start;
118 }
119 break;
120 } else {
121 let rest = pos - start;
122 if rest > 0 {
123 let ptr = buf.as_mut_ptr();
124 unsafe {
125 std::ptr::copy_nonoverlapping(ptr.add(start), ptr, rest);
126 }
127 pos = rest;
128 } else {
129 pos = 0;
130 }
131 }
132 }
133
134 Ok(())
135 }
136
137 pub fn json_object_to_query(data: Value) -> Result<String, Error> {
138 let mut query = url::form_urlencoded::Serializer::new(String::new());
139
140 let object = data.as_object().ok_or_else(|| {
141 format_err!("json_object_to_query: got wrong data type (expected object).")
142 })?;
143
144 for (key, value) in object {
145 match value {
146 Value::Bool(b) => {
147 query.append_pair(key, &b.to_string());
148 }
149 Value::Number(n) => {
150 query.append_pair(key, &n.to_string());
151 }
152 Value::String(s) => {
153 query.append_pair(key, &s);
154 }
155 Value::Array(arr) => {
156 for element in arr {
157 match element {
158 Value::Bool(b) => {
159 query.append_pair(key, &b.to_string());
160 }
161 Value::Number(n) => {
162 query.append_pair(key, &n.to_string());
163 }
164 Value::String(s) => {
165 query.append_pair(key, &s);
166 }
167 _ => bail!(
168 "json_object_to_query: unable to handle complex array data types."
169 ),
170 }
171 }
172 }
173 _ => bail!("json_object_to_query: unable to handle complex data types."),
174 }
175 }
176
177 Ok(query.finish())
178 }
179
180 pub fn required_string_param<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
181 match param[name].as_str() {
182 Some(s) => Ok(s),
183 None => bail!("missing parameter '{}'", name),
184 }
185 }
186
187 pub fn required_string_property<'a>(param: &'a Value, name: &str) -> Result<&'a str, Error> {
188 match param[name].as_str() {
189 Some(s) => Ok(s),
190 None => bail!("missing property '{}'", name),
191 }
192 }
193
194 pub fn required_integer_param<'a>(param: &'a Value, name: &str) -> Result<i64, Error> {
195 match param[name].as_i64() {
196 Some(s) => Ok(s),
197 None => bail!("missing parameter '{}'", name),
198 }
199 }
200
201 pub fn required_integer_property<'a>(param: &'a Value, name: &str) -> Result<i64, Error> {
202 match param[name].as_i64() {
203 Some(s) => Ok(s),
204 None => bail!("missing property '{}'", name),
205 }
206 }
207
208 pub fn required_array_param<'a>(param: &'a Value, name: &str) -> Result<Vec<Value>, Error> {
209 match param[name].as_array() {
210 Some(s) => Ok(s.to_vec()),
211 None => bail!("missing parameter '{}'", name),
212 }
213 }
214
215 pub fn required_array_property<'a>(param: &'a Value, name: &str) -> Result<Vec<Value>, Error> {
216 match param[name].as_array() {
217 Some(s) => Ok(s.to_vec()),
218 None => bail!("missing property '{}'", name),
219 }
220 }
221
222 pub fn complete_file_name<S: BuildHasher>(arg: &str, _param: &HashMap<String, String, S>) -> Vec<String> {
223 let mut result = vec![];
224
225 use nix::fcntl::AtFlags;
226 use nix::fcntl::OFlag;
227 use nix::sys::stat::Mode;
228
229 let mut dirname = std::path::PathBuf::from(if arg.is_empty() { "./" } else { arg });
230
231 let is_dir = match nix::sys::stat::fstatat(libc::AT_FDCWD, &dirname, AtFlags::empty()) {
232 Ok(stat) => (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR,
233 Err(_) => false,
234 };
235
236 if !is_dir {
237 if let Some(parent) = dirname.parent() {
238 dirname = parent.to_owned();
239 }
240 }
241
242 let mut dir =
243 match nix::dir::Dir::openat(libc::AT_FDCWD, &dirname, OFlag::O_DIRECTORY, Mode::empty()) {
244 Ok(d) => d,
245 Err(_) => return result,
246 };
247
248 for item in dir.iter() {
249 if let Ok(entry) = item {
250 if let Ok(name) = entry.file_name().to_str() {
251 if name == "." || name == ".." {
252 continue;
253 }
254 let mut newpath = dirname.clone();
255 newpath.push(name);
256
257 if let Ok(stat) =
258 nix::sys::stat::fstatat(libc::AT_FDCWD, &newpath, AtFlags::empty())
259 {
260 if (stat.st_mode & libc::S_IFMT) == libc::S_IFDIR {
261 newpath.push("");
262 if let Some(newpath) = newpath.to_str() {
263 result.push(newpath.to_owned());
264 }
265 continue;
266 }
267 }
268 if let Some(newpath) = newpath.to_str() {
269 result.push(newpath.to_owned());
270 }
271 }
272 }
273 }
274
275 result
276 }
277
278 /// Scan directory for matching file names.
279 ///
280 /// Scan through all directory entries and call `callback()` function
281 /// if the entry name matches the regular expression. This function
282 /// used unix `openat()`, so you can pass absolute or relative file
283 /// names. This function simply skips non-UTF8 encoded names.
284 pub fn scandir<P, F>(
285 dirfd: RawFd,
286 path: &P,
287 regex: &regex::Regex,
288 mut callback: F,
289 ) -> Result<(), Error>
290 where
291 F: FnMut(RawFd, &str, nix::dir::Type) -> Result<(), Error>,
292 P: ?Sized + nix::NixPath,
293 {
294 for entry in self::fs::scan_subdir(dirfd, path, regex)? {
295 let entry = entry?;
296 let file_type = match entry.file_type() {
297 Some(file_type) => file_type,
298 None => bail!("unable to detect file type"),
299 };
300
301 callback(
302 entry.parent_fd(),
303 unsafe { entry.file_name_utf8_unchecked() },
304 file_type,
305 )?;
306 }
307 Ok(())
308 }
309
310 /// Shortcut for md5 sums.
311 pub fn md5sum(data: &[u8]) -> Result<DigestBytes, Error> {
312 hash(MessageDigest::md5(), data).map_err(Error::from)
313 }
314
315 pub fn get_hardware_address() -> Result<String, Error> {
316 static FILENAME: &str = "/etc/ssh/ssh_host_rsa_key.pub";
317
318 let contents = proxmox::tools::fs::file_get_contents(FILENAME)?;
319 let digest = md5sum(&contents)?;
320
321 Ok(proxmox::tools::bin_to_hex(&digest))
322 }
323
324 pub fn assert_if_modified(digest1: &str, digest2: &str) -> Result<(), Error> {
325 if digest1 != digest2 {
326 bail!("detected modified configuration - file changed by other user? Try again.");
327 }
328 Ok(())
329 }
330
331 /// Extract a specific cookie from cookie header.
332 /// We assume cookie_name is already url encoded.
333 pub fn extract_cookie(cookie: &str, cookie_name: &str) -> Option<String> {
334 for pair in cookie.split(';') {
335 let (name, value) = match pair.find('=') {
336 Some(i) => (pair[..i].trim(), pair[(i + 1)..].trim()),
337 None => return None, // Cookie format error
338 };
339
340 if name == cookie_name {
341 use percent_encoding::percent_decode;
342 if let Ok(value) = percent_decode(value.as_bytes()).decode_utf8() {
343 return Some(value.into());
344 } else {
345 return None; // Cookie format error
346 }
347 }
348 }
349
350 None
351 }
352
353 pub fn join(data: &Vec<String>, sep: char) -> String {
354 let mut list = String::new();
355
356 for item in data {
357 if !list.is_empty() {
358 list.push(sep);
359 }
360 list.push_str(item);
361 }
362
363 list
364 }
365
366 /// Detect modified configuration files
367 ///
368 /// This function fails with a reasonable error message if checksums do not match.
369 pub fn detect_modified_configuration_file(digest1: &[u8;32], digest2: &[u8;32]) -> Result<(), Error> {
370 if digest1 != digest2 {
371 bail!("detected modified configuration - file changed by other user? Try again.");
372 }
373 Ok(())
374 }
375
376 /// normalize uri path
377 ///
378 /// Do not allow ".", "..", or hidden files ".XXXX"
379 /// Also remove empty path components
380 pub fn normalize_uri_path(path: &str) -> Result<(String, Vec<&str>), Error> {
381 let items = path.split('/');
382
383 let mut path = String::new();
384 let mut components = vec![];
385
386 for name in items {
387 if name.is_empty() {
388 continue;
389 }
390 if name.starts_with('.') {
391 bail!("Path contains illegal components.");
392 }
393 path.push('/');
394 path.push_str(name);
395 components.push(name);
396 }
397
398 Ok((path, components))
399 }
400
401 /// Helper to check result from std::process::Command output
402 ///
403 /// The exit_code_check() function should return true if the exit code
404 /// is considered successful.
405 pub fn command_output(
406 output: std::process::Output,
407 exit_code_check: Option<fn(i32) -> bool>,
408 ) -> Result<String, Error> {
409
410 if !output.status.success() {
411 match output.status.code() {
412 Some(code) => {
413 let is_ok = match exit_code_check {
414 Some(check_fn) => check_fn(code),
415 None => code == 0,
416 };
417 if !is_ok {
418 let msg = String::from_utf8(output.stderr)
419 .map(|m| if m.is_empty() { String::from("no error message") } else { m })
420 .unwrap_or_else(|_| String::from("non utf8 error message (suppressed)"));
421
422 bail!("status code: {} - {}", code, msg);
423 }
424 }
425 None => bail!("terminated by signal"),
426 }
427 }
428
429 let output = String::from_utf8(output.stdout)?;
430
431 Ok(output)
432 }
433
434 pub fn run_command(
435 mut command: std::process::Command,
436 exit_code_check: Option<fn(i32) -> bool>,
437 ) -> Result<String, Error> {
438
439 let output = command.output()
440 .map_err(|err| format_err!("failed to execute {:?} - {}", command, err))?;
441
442 let output = crate::tools::command_output(output, exit_code_check)
443 .map_err(|err| format_err!("command {:?} failed - {}", command, err))?;
444
445 Ok(output)
446 }
447
448 pub fn fd_change_cloexec(fd: RawFd, on: bool) -> Result<(), Error> {
449 use nix::fcntl::{fcntl, FdFlag, F_GETFD, F_SETFD};
450 let mut flags = FdFlag::from_bits(fcntl(fd, F_GETFD)?)
451 .ok_or_else(|| format_err!("unhandled file flags"))?; // nix crate is stupid this way...
452 flags.set(FdFlag::FD_CLOEXEC, on);
453 fcntl(fd, F_SETFD(flags))?;
454 Ok(())
455 }
456
457 static mut SHUTDOWN_REQUESTED: bool = false;
458
459 pub fn request_shutdown() {
460 unsafe {
461 SHUTDOWN_REQUESTED = true;
462 }
463 crate::server::server_shutdown();
464 }
465
466 #[inline(always)]
467 pub fn shutdown_requested() -> bool {
468 unsafe { SHUTDOWN_REQUESTED }
469 }
470
471 pub fn fail_on_shutdown() -> Result<(), Error> {
472 if shutdown_requested() {
473 bail!("Server shutdown requested - aborting task");
474 }
475 Ok(())
476 }
477
478 /// safe wrapper for `nix::unistd::pipe2` defaulting to `O_CLOEXEC` and guarding the file
479 /// descriptors.
480 pub fn pipe() -> Result<(Fd, Fd), Error> {
481 let (pin, pout) = nix::unistd::pipe2(nix::fcntl::OFlag::O_CLOEXEC)?;
482 Ok((Fd(pin), Fd(pout)))
483 }
484
485 /// safe wrapper for `nix::sys::socket::socketpair` defaulting to `O_CLOEXEC` and guarding the file
486 /// descriptors.
487 pub fn socketpair() -> Result<(Fd, Fd), Error> {
488 use nix::sys::socket;
489 let (pa, pb) = socket::socketpair(
490 socket::AddressFamily::Unix,
491 socket::SockType::Stream,
492 None,
493 socket::SockFlag::SOCK_CLOEXEC,
494 )?;
495 Ok((Fd(pa), Fd(pb)))
496 }
497
498
499 /// An easy way to convert types to Any
500 ///
501 /// Mostly useful to downcast trait objects (see RpcEnvironment).
502 pub trait AsAny {
503 fn as_any(&self) -> &dyn Any;
504 }
505
506 impl<T: Any> AsAny for T {
507 fn as_any(&self) -> &dyn Any {
508 self
509 }
510 }
511
512 /// This used to be: `SIMPLE_ENCODE_SET` plus space, `"`, `#`, `<`, `>`, backtick, `?`, `{`, `}`
513 pub const DEFAULT_ENCODE_SET: &AsciiSet = &percent_encoding::CONTROLS // 0..1f and 7e
514 // The SIMPLE_ENCODE_SET adds space and anything >= 0x7e (7e itself is already included above)
515 .add(0x20)
516 .add(0x7f)
517 // the DEFAULT_ENCODE_SET added:
518 .add(b' ')
519 .add(b'"')
520 .add(b'#')
521 .add(b'<')
522 .add(b'>')
523 .add(b'`')
524 .add(b'?')
525 .add(b'{')
526 .add(b'}');
527
528 /// Get an iterator over lines of a file, skipping empty lines and comments (lines starting with a
529 /// `#`).
530 pub fn file_get_non_comment_lines<P: AsRef<Path>>(
531 path: P,
532 ) -> Result<impl Iterator<Item = io::Result<String>>, Error> {
533 let path = path.as_ref();
534
535 Ok(io::BufReader::new(
536 File::open(path).map_err(|err| format_err!("error opening {:?}: {}", path, err))?,
537 )
538 .lines()
539 .filter_map(|line| match line {
540 Ok(line) => {
541 let line = line.trim();
542 if line.is_empty() || line.starts_with('#') {
543 None
544 } else {
545 Some(Ok(line.to_string()))
546 }
547 }
548 Err(err) => Some(Err(err)),
549 }))
550 }
551
552 pub fn setup_safe_path_env() {
553 std::env::set_var("PATH", "/sbin:/bin:/usr/sbin:/usr/bin");
554 // Make %ENV safer - as suggested by https://perldoc.perl.org/perlsec.html
555 for name in &["IFS", "CDPATH", "ENV", "BASH_ENV"] {
556 std::env::remove_var(name);
557 }
558 }
559
560 pub fn strip_ascii_whitespace(line: &[u8]) -> &[u8] {
561 let line = match line.iter().position(|&b| !b.is_ascii_whitespace()) {
562 Some(n) => &line[n..],
563 None => return &[],
564 };
565 match line.iter().rev().position(|&b| !b.is_ascii_whitespace()) {
566 Some(n) => &line[..(line.len() - n)],
567 None => &[],
568 }
569 }
570
571 /// Seeks to start of file and computes the SHA256 hash
572 pub fn compute_file_csum(file: &mut File) -> Result<([u8; 32], u64), Error> {
573
574 file.seek(SeekFrom::Start(0))?;
575
576 let mut hasher = openssl::sha::Sha256::new();
577 let mut buffer = proxmox::tools::vec::undefined(256*1024);
578 let mut size: u64 = 0;
579
580 loop {
581 let count = match file.read(&mut buffer) {
582 Ok(count) => count,
583 Err(ref err) if err.kind() == std::io::ErrorKind::Interrupted => {
584 continue;
585 }
586 Err(err) => return Err(err.into()),
587 };
588 if count == 0 {
589 break;
590 }
591 size += count as u64;
592 hasher.update(&buffer[..count]);
593 }
594
595 let csum = hasher.finish();
596
597 Ok((csum, size))
598 }