]> git.proxmox.com Git - proxmox-backup.git/blob - pbs-client/src/tools/mod.rs
update to first proxmox crate split
[proxmox-backup.git] / pbs-client / src / tools / mod.rs
1 //! Shared tools useful for common CLI clients.
2 use std::collections::HashMap;
3 use std::fs::File;
4 use std::os::unix::io::FromRawFd;
5 use std::env::VarError::{NotUnicode, NotPresent};
6 use std::io::{BufReader, BufRead};
7 use std::process::Command;
8
9 use anyhow::{bail, format_err, Context, Error};
10 use serde_json::{json, Value};
11 use xdg::BaseDirectories;
12
13 use proxmox_schema::*;
14 use proxmox_router::cli::shellword_split;
15 use proxmox::tools::fs::file_get_json;
16
17 use pbs_api_types::{BACKUP_REPO_URL, Authid, UserWithTokens};
18 use pbs_datastore::BackupDir;
19 use pbs_tools::json::json_object_to_query;
20
21 use crate::{BackupRepository, HttpClient, HttpClientOptions};
22
23 pub mod key_source;
24
25 const ENV_VAR_PBS_FINGERPRINT: &str = "PBS_FINGERPRINT";
26 const ENV_VAR_PBS_PASSWORD: &str = "PBS_PASSWORD";
27
28 pub const REPO_URL_SCHEMA: Schema = StringSchema::new("Repository URL.")
29 .format(&BACKUP_REPO_URL)
30 .max_length(256)
31 .schema();
32
33 pub const CHUNK_SIZE_SCHEMA: Schema = IntegerSchema::new("Chunk size in KB. Must be a power of 2.")
34 .minimum(64)
35 .maximum(4096)
36 .default(4096)
37 .schema();
38
39 /// Helper to read a secret through a environment variable (ENV).
40 ///
41 /// Tries the following variable names in order and returns the value
42 /// it will resolve for the first defined one:
43 ///
44 /// BASE_NAME => use value from ENV(BASE_NAME) directly as secret
45 /// BASE_NAME_FD => read the secret from the specified file descriptor
46 /// BASE_NAME_FILE => read the secret from the specified file name
47 /// BASE_NAME_CMD => read the secret from specified command first line of output on stdout
48 ///
49 /// Only return the first line of data (without CRLF).
50 pub fn get_secret_from_env(base_name: &str) -> Result<Option<String>, Error> {
51
52 let firstline = |data: String| -> String {
53 match data.lines().next() {
54 Some(line) => line.to_string(),
55 None => String::new(),
56 }
57 };
58
59 let firstline_file = |file: &mut File| -> Result<String, Error> {
60 let reader = BufReader::new(file);
61 match reader.lines().next() {
62 Some(Ok(line)) => Ok(line),
63 Some(Err(err)) => Err(err.into()),
64 None => Ok(String::new()),
65 }
66 };
67
68 match std::env::var(base_name) {
69 Ok(p) => return Ok(Some(firstline(p))),
70 Err(NotUnicode(_)) => bail!(format!("{} contains bad characters", base_name)),
71 Err(NotPresent) => {},
72 };
73
74 let env_name = format!("{}_FD", base_name);
75 match std::env::var(&env_name) {
76 Ok(fd_str) => {
77 let fd: i32 = fd_str.parse()
78 .map_err(|err| format_err!("unable to parse file descriptor in ENV({}): {}", env_name, err))?;
79 let mut file = unsafe { File::from_raw_fd(fd) };
80 return Ok(Some(firstline_file(&mut file)?));
81 }
82 Err(NotUnicode(_)) => bail!(format!("{} contains bad characters", env_name)),
83 Err(NotPresent) => {},
84 }
85
86 let env_name = format!("{}_FILE", base_name);
87 match std::env::var(&env_name) {
88 Ok(filename) => {
89 let mut file = std::fs::File::open(filename)
90 .map_err(|err| format_err!("unable to open file in ENV({}): {}", env_name, err))?;
91 return Ok(Some(firstline_file(&mut file)?));
92 }
93 Err(NotUnicode(_)) => bail!(format!("{} contains bad characters", env_name)),
94 Err(NotPresent) => {},
95 }
96
97 let env_name = format!("{}_CMD", base_name);
98 match std::env::var(&env_name) {
99 Ok(ref command) => {
100 let args = shellword_split(command)?;
101 let mut command = Command::new(&args[0]);
102 command.args(&args[1..]);
103 let output = pbs_tools::run_command(command, None)?;
104 return Ok(Some(firstline(output)));
105 }
106 Err(NotUnicode(_)) => bail!(format!("{} contains bad characters", env_name)),
107 Err(NotPresent) => {},
108 }
109
110 Ok(None)
111 }
112
113 pub fn get_default_repository() -> Option<String> {
114 std::env::var("PBS_REPOSITORY").ok()
115 }
116
117 pub fn extract_repository_from_value(param: &Value) -> Result<BackupRepository, Error> {
118 let repo_url = param["repository"]
119 .as_str()
120 .map(String::from)
121 .or_else(get_default_repository)
122 .ok_or_else(|| format_err!("unable to get (default) repository"))?;
123
124 let repo: BackupRepository = repo_url.parse()?;
125
126 Ok(repo)
127 }
128
129 pub fn extract_repository_from_map(param: &HashMap<String, String>) -> Option<BackupRepository> {
130 param
131 .get("repository")
132 .map(String::from)
133 .or_else(get_default_repository)
134 .and_then(|repo_url| repo_url.parse::<BackupRepository>().ok())
135 }
136
137 pub fn connect(repo: &BackupRepository) -> Result<HttpClient, Error> {
138 connect_do(repo.host(), repo.port(), repo.auth_id())
139 .map_err(|err| format_err!("error building client for repository {} - {}", repo, err))
140 }
141
142 fn connect_do(server: &str, port: u16, auth_id: &Authid) -> Result<HttpClient, Error> {
143 let fingerprint = std::env::var(ENV_VAR_PBS_FINGERPRINT).ok();
144
145 let password = get_secret_from_env(ENV_VAR_PBS_PASSWORD)?;
146 let options = HttpClientOptions::new_interactive(password, fingerprint);
147
148 HttpClient::new(server, port, auth_id, options)
149 }
150
151 /// like get, but simply ignore errors and return Null instead
152 pub async fn try_get(repo: &BackupRepository, url: &str) -> Value {
153
154 let fingerprint = std::env::var(ENV_VAR_PBS_FINGERPRINT).ok();
155 let password = get_secret_from_env(ENV_VAR_PBS_PASSWORD).unwrap_or(None);
156
157 // ticket cache, but no questions asked
158 let options = HttpClientOptions::new_interactive(password, fingerprint)
159 .interactive(false);
160
161 let client = match HttpClient::new(repo.host(), repo.port(), repo.auth_id(), options) {
162 Ok(v) => v,
163 _ => return Value::Null,
164 };
165
166 let mut resp = match client.get(url, None).await {
167 Ok(v) => v,
168 _ => return Value::Null,
169 };
170
171 if let Some(map) = resp.as_object_mut() {
172 if let Some(data) = map.remove("data") {
173 return data;
174 }
175 }
176 Value::Null
177 }
178
179 pub fn complete_backup_group(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
180 pbs_runtime::main(async { complete_backup_group_do(param).await })
181 }
182
183 pub async fn complete_backup_group_do(param: &HashMap<String, String>) -> Vec<String> {
184
185 let mut result = vec![];
186
187 let repo = match extract_repository_from_map(param) {
188 Some(v) => v,
189 _ => return result,
190 };
191
192 let path = format!("api2/json/admin/datastore/{}/groups", repo.store());
193
194 let data = try_get(&repo, &path).await;
195
196 if let Some(list) = data.as_array() {
197 for item in list {
198 if let (Some(backup_id), Some(backup_type)) =
199 (item["backup-id"].as_str(), item["backup-type"].as_str())
200 {
201 result.push(format!("{}/{}", backup_type, backup_id));
202 }
203 }
204 }
205
206 result
207 }
208
209 pub fn complete_group_or_snapshot(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
210 pbs_runtime::main(async { complete_group_or_snapshot_do(arg, param).await })
211 }
212
213 pub async fn complete_group_or_snapshot_do(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
214
215 if arg.matches('/').count() < 2 {
216 let groups = complete_backup_group_do(param).await;
217 let mut result = vec![];
218 for group in groups {
219 result.push(group.to_string());
220 result.push(format!("{}/", group));
221 }
222 return result;
223 }
224
225 complete_backup_snapshot_do(param).await
226 }
227
228 pub fn complete_backup_snapshot(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
229 pbs_runtime::main(async { complete_backup_snapshot_do(param).await })
230 }
231
232 pub async fn complete_backup_snapshot_do(param: &HashMap<String, String>) -> Vec<String> {
233
234 let mut result = vec![];
235
236 let repo = match extract_repository_from_map(param) {
237 Some(v) => v,
238 _ => return result,
239 };
240
241 let path = format!("api2/json/admin/datastore/{}/snapshots", repo.store());
242
243 let data = try_get(&repo, &path).await;
244
245 if let Some(list) = data.as_array() {
246 for item in list {
247 if let (Some(backup_id), Some(backup_type), Some(backup_time)) =
248 (item["backup-id"].as_str(), item["backup-type"].as_str(), item["backup-time"].as_i64())
249 {
250 if let Ok(snapshot) = BackupDir::new(backup_type, backup_id, backup_time) {
251 result.push(snapshot.relative_path().to_str().unwrap().to_owned());
252 }
253 }
254 }
255 }
256
257 result
258 }
259
260 pub fn complete_server_file_name(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
261 pbs_runtime::main(async { complete_server_file_name_do(param).await })
262 }
263
264 pub async fn complete_server_file_name_do(param: &HashMap<String, String>) -> Vec<String> {
265
266 let mut result = vec![];
267
268 let repo = match extract_repository_from_map(param) {
269 Some(v) => v,
270 _ => return result,
271 };
272
273 let snapshot: BackupDir = match param.get("snapshot") {
274 Some(path) => {
275 match path.parse() {
276 Ok(v) => v,
277 _ => return result,
278 }
279 }
280 _ => return result,
281 };
282
283 let query = json_object_to_query(json!({
284 "backup-type": snapshot.group().backup_type(),
285 "backup-id": snapshot.group().backup_id(),
286 "backup-time": snapshot.backup_time(),
287 })).unwrap();
288
289 let path = format!("api2/json/admin/datastore/{}/files?{}", repo.store(), query);
290
291 let data = try_get(&repo, &path).await;
292
293 if let Some(list) = data.as_array() {
294 for item in list {
295 if let Some(filename) = item["filename"].as_str() {
296 result.push(filename.to_owned());
297 }
298 }
299 }
300
301 result
302 }
303
304 pub fn complete_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
305 complete_server_file_name(arg, param)
306 .iter()
307 .map(|v| pbs_tools::format::strip_server_file_extension(&v).to_owned())
308 .collect()
309 }
310
311 pub fn complete_pxar_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
312 complete_server_file_name(arg, param)
313 .iter()
314 .filter_map(|name| {
315 if name.ends_with(".pxar.didx") {
316 Some(pbs_tools::format::strip_server_file_extension(name).to_owned())
317 } else {
318 None
319 }
320 })
321 .collect()
322 }
323
324 pub fn complete_img_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
325 complete_server_file_name(arg, param)
326 .iter()
327 .filter_map(|name| {
328 if name.ends_with(".img.fidx") {
329 Some(pbs_tools::format::strip_server_file_extension(name).to_owned())
330 } else {
331 None
332 }
333 })
334 .collect()
335 }
336
337 pub fn complete_chunk_size(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
338
339 let mut result = vec![];
340
341 let mut size = 64;
342 loop {
343 result.push(size.to_string());
344 size *= 2;
345 if size > 4096 { break; }
346 }
347
348 result
349 }
350
351 pub fn complete_auth_id(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
352 pbs_runtime::main(async { complete_auth_id_do(param).await })
353 }
354
355 pub async fn complete_auth_id_do(param: &HashMap<String, String>) -> Vec<String> {
356
357 let mut result = vec![];
358
359 let repo = match extract_repository_from_map(param) {
360 Some(v) => v,
361 _ => return result,
362 };
363
364 let data = try_get(&repo, "api2/json/access/users?include_tokens=true").await;
365
366 if let Ok(parsed) = serde_json::from_value::<Vec<UserWithTokens>>(data) {
367 for user in parsed {
368 result.push(user.userid.to_string());
369 for token in user.tokens {
370 result.push(token.tokenid.to_string());
371 }
372 }
373 };
374
375 result
376 }
377
378 pub fn complete_repository(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
379 let mut result = vec![];
380
381 let base = match BaseDirectories::with_prefix("proxmox-backup") {
382 Ok(v) => v,
383 _ => return result,
384 };
385
386 // usually $HOME/.cache/proxmox-backup/repo-list
387 let path = match base.place_cache_file("repo-list") {
388 Ok(v) => v,
389 _ => return result,
390 };
391
392 let data = file_get_json(&path, None).unwrap_or_else(|_| json!({}));
393
394 if let Some(map) = data.as_object() {
395 for (repo, _count) in map {
396 result.push(repo.to_owned());
397 }
398 }
399
400 result
401 }
402
403 pub fn complete_backup_source(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
404 let mut result = vec![];
405
406 let data: Vec<&str> = arg.splitn(2, ':').collect();
407
408 if data.len() != 2 {
409 result.push(String::from("root.pxar:/"));
410 result.push(String::from("etc.pxar:/etc"));
411 return result;
412 }
413
414 let files = pbs_tools::fs::complete_file_name(data[1], param);
415
416 for file in files {
417 result.push(format!("{}:{}", data[0], file));
418 }
419
420 result
421 }
422
423 pub fn base_directories() -> Result<xdg::BaseDirectories, Error> {
424 xdg::BaseDirectories::with_prefix("proxmox-backup").map_err(Error::from)
425 }
426
427 /// Convenience helper for better error messages:
428 pub fn find_xdg_file(
429 file_name: impl AsRef<std::path::Path>,
430 description: &'static str,
431 ) -> Result<Option<std::path::PathBuf>, Error> {
432 let file_name = file_name.as_ref();
433 base_directories()
434 .map(|base| base.find_config_file(file_name))
435 .with_context(|| format!("error searching for {}", description))
436 }
437
438 pub fn place_xdg_file(
439 file_name: impl AsRef<std::path::Path>,
440 description: &'static str,
441 ) -> Result<std::path::PathBuf, Error> {
442 let file_name = file_name.as_ref();
443 base_directories()
444 .and_then(|base| base.place_config_file(file_name).map_err(Error::from))
445 .with_context(|| format!("failed to place {} in xdg home", description))
446 }