]> git.proxmox.com Git - proxmox-backup.git/blob - pbs-client/src/tools/mod.rs
move client to pbs-client subcrate
[proxmox-backup.git] / pbs-client / src / tools / mod.rs
1 //! Shared tools useful for common CLI clients.
2 use std::collections::HashMap;
3
4 use anyhow::{bail, format_err, Context, Error};
5 use serde_json::{json, Value};
6 use xdg::BaseDirectories;
7
8 use proxmox::{
9 api::schema::*,
10 tools::fs::file_get_json,
11 };
12
13 use pbs_api_types::{BACKUP_REPO_URL, Authid, UserWithTokens};
14 use pbs_datastore::BackupDir;
15 use pbs_tools::json::json_object_to_query;
16
17 use crate::{BackupRepository, HttpClient, HttpClientOptions};
18
19 pub mod key_source;
20
21 const ENV_VAR_PBS_FINGERPRINT: &str = "PBS_FINGERPRINT";
22 const ENV_VAR_PBS_PASSWORD: &str = "PBS_PASSWORD";
23
24 pub const REPO_URL_SCHEMA: Schema = StringSchema::new("Repository URL.")
25 .format(&BACKUP_REPO_URL)
26 .max_length(256)
27 .schema();
28
29 pub const CHUNK_SIZE_SCHEMA: Schema = IntegerSchema::new("Chunk size in KB. Must be a power of 2.")
30 .minimum(64)
31 .maximum(4096)
32 .default(4096)
33 .schema();
34
35 pub fn get_default_repository() -> Option<String> {
36 std::env::var("PBS_REPOSITORY").ok()
37 }
38
39 pub fn extract_repository_from_value(param: &Value) -> Result<BackupRepository, Error> {
40 let repo_url = param["repository"]
41 .as_str()
42 .map(String::from)
43 .or_else(get_default_repository)
44 .ok_or_else(|| format_err!("unable to get (default) repository"))?;
45
46 let repo: BackupRepository = repo_url.parse()?;
47
48 Ok(repo)
49 }
50
51 pub fn extract_repository_from_map(param: &HashMap<String, String>) -> Option<BackupRepository> {
52 param
53 .get("repository")
54 .map(String::from)
55 .or_else(get_default_repository)
56 .and_then(|repo_url| repo_url.parse::<BackupRepository>().ok())
57 }
58
59 pub fn connect(repo: &BackupRepository) -> Result<HttpClient, Error> {
60 connect_do(repo.host(), repo.port(), repo.auth_id())
61 .map_err(|err| format_err!("error building client for repository {} - {}", repo, err))
62 }
63
64 fn connect_do(server: &str, port: u16, auth_id: &Authid) -> Result<HttpClient, Error> {
65 let fingerprint = std::env::var(ENV_VAR_PBS_FINGERPRINT).ok();
66
67 use std::env::VarError::*;
68 let password = match std::env::var(ENV_VAR_PBS_PASSWORD) {
69 Ok(p) => Some(p),
70 Err(NotUnicode(_)) => bail!(format!("{} contains bad characters", ENV_VAR_PBS_PASSWORD)),
71 Err(NotPresent) => None,
72 };
73
74 let options = HttpClientOptions::new_interactive(password, fingerprint);
75
76 HttpClient::new(server, port, auth_id, options)
77 }
78
79 /// like get, but simply ignore errors and return Null instead
80 pub async fn try_get(repo: &BackupRepository, url: &str) -> Value {
81
82 let fingerprint = std::env::var(ENV_VAR_PBS_FINGERPRINT).ok();
83 let password = std::env::var(ENV_VAR_PBS_PASSWORD).ok();
84
85 // ticket cache, but no questions asked
86 let options = HttpClientOptions::new_interactive(password, fingerprint)
87 .interactive(false);
88
89 let client = match HttpClient::new(repo.host(), repo.port(), repo.auth_id(), options) {
90 Ok(v) => v,
91 _ => return Value::Null,
92 };
93
94 let mut resp = match client.get(url, None).await {
95 Ok(v) => v,
96 _ => return Value::Null,
97 };
98
99 if let Some(map) = resp.as_object_mut() {
100 if let Some(data) = map.remove("data") {
101 return data;
102 }
103 }
104 Value::Null
105 }
106
107 pub fn complete_backup_group(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
108 pbs_runtime::main(async { complete_backup_group_do(param).await })
109 }
110
111 pub async fn complete_backup_group_do(param: &HashMap<String, String>) -> Vec<String> {
112
113 let mut result = vec![];
114
115 let repo = match extract_repository_from_map(param) {
116 Some(v) => v,
117 _ => return result,
118 };
119
120 let path = format!("api2/json/admin/datastore/{}/groups", repo.store());
121
122 let data = try_get(&repo, &path).await;
123
124 if let Some(list) = data.as_array() {
125 for item in list {
126 if let (Some(backup_id), Some(backup_type)) =
127 (item["backup-id"].as_str(), item["backup-type"].as_str())
128 {
129 result.push(format!("{}/{}", backup_type, backup_id));
130 }
131 }
132 }
133
134 result
135 }
136
137 pub fn complete_group_or_snapshot(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
138 pbs_runtime::main(async { complete_group_or_snapshot_do(arg, param).await })
139 }
140
141 pub async fn complete_group_or_snapshot_do(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
142
143 if arg.matches('/').count() < 2 {
144 let groups = complete_backup_group_do(param).await;
145 let mut result = vec![];
146 for group in groups {
147 result.push(group.to_string());
148 result.push(format!("{}/", group));
149 }
150 return result;
151 }
152
153 complete_backup_snapshot_do(param).await
154 }
155
156 pub fn complete_backup_snapshot(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
157 pbs_runtime::main(async { complete_backup_snapshot_do(param).await })
158 }
159
160 pub async fn complete_backup_snapshot_do(param: &HashMap<String, String>) -> Vec<String> {
161
162 let mut result = vec![];
163
164 let repo = match extract_repository_from_map(param) {
165 Some(v) => v,
166 _ => return result,
167 };
168
169 let path = format!("api2/json/admin/datastore/{}/snapshots", repo.store());
170
171 let data = try_get(&repo, &path).await;
172
173 if let Some(list) = data.as_array() {
174 for item in list {
175 if let (Some(backup_id), Some(backup_type), Some(backup_time)) =
176 (item["backup-id"].as_str(), item["backup-type"].as_str(), item["backup-time"].as_i64())
177 {
178 if let Ok(snapshot) = BackupDir::new(backup_type, backup_id, backup_time) {
179 result.push(snapshot.relative_path().to_str().unwrap().to_owned());
180 }
181 }
182 }
183 }
184
185 result
186 }
187
188 pub fn complete_server_file_name(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
189 pbs_runtime::main(async { complete_server_file_name_do(param).await })
190 }
191
192 pub async fn complete_server_file_name_do(param: &HashMap<String, String>) -> Vec<String> {
193
194 let mut result = vec![];
195
196 let repo = match extract_repository_from_map(param) {
197 Some(v) => v,
198 _ => return result,
199 };
200
201 let snapshot: BackupDir = match param.get("snapshot") {
202 Some(path) => {
203 match path.parse() {
204 Ok(v) => v,
205 _ => return result,
206 }
207 }
208 _ => return result,
209 };
210
211 let query = json_object_to_query(json!({
212 "backup-type": snapshot.group().backup_type(),
213 "backup-id": snapshot.group().backup_id(),
214 "backup-time": snapshot.backup_time(),
215 })).unwrap();
216
217 let path = format!("api2/json/admin/datastore/{}/files?{}", repo.store(), query);
218
219 let data = try_get(&repo, &path).await;
220
221 if let Some(list) = data.as_array() {
222 for item in list {
223 if let Some(filename) = item["filename"].as_str() {
224 result.push(filename.to_owned());
225 }
226 }
227 }
228
229 result
230 }
231
232 pub fn complete_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
233 complete_server_file_name(arg, param)
234 .iter()
235 .map(|v| pbs_tools::format::strip_server_file_extension(&v))
236 .collect()
237 }
238
239 pub fn complete_pxar_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
240 complete_server_file_name(arg, param)
241 .iter()
242 .filter_map(|name| {
243 if name.ends_with(".pxar.didx") {
244 Some(pbs_tools::format::strip_server_file_extension(name))
245 } else {
246 None
247 }
248 })
249 .collect()
250 }
251
252 pub fn complete_img_archive_name(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
253 complete_server_file_name(arg, param)
254 .iter()
255 .filter_map(|name| {
256 if name.ends_with(".img.fidx") {
257 Some(pbs_tools::format::strip_server_file_extension(name))
258 } else {
259 None
260 }
261 })
262 .collect()
263 }
264
265 pub fn complete_chunk_size(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
266
267 let mut result = vec![];
268
269 let mut size = 64;
270 loop {
271 result.push(size.to_string());
272 size *= 2;
273 if size > 4096 { break; }
274 }
275
276 result
277 }
278
279 pub fn complete_auth_id(_arg: &str, param: &HashMap<String, String>) -> Vec<String> {
280 pbs_runtime::main(async { complete_auth_id_do(param).await })
281 }
282
283 pub async fn complete_auth_id_do(param: &HashMap<String, String>) -> Vec<String> {
284
285 let mut result = vec![];
286
287 let repo = match extract_repository_from_map(param) {
288 Some(v) => v,
289 _ => return result,
290 };
291
292 let data = try_get(&repo, "api2/json/access/users?include_tokens=true").await;
293
294 if let Ok(parsed) = serde_json::from_value::<Vec<UserWithTokens>>(data) {
295 for user in parsed {
296 result.push(user.userid.to_string());
297 for token in user.tokens {
298 result.push(token.tokenid.to_string());
299 }
300 }
301 };
302
303 result
304 }
305
306 pub fn complete_repository(_arg: &str, _param: &HashMap<String, String>) -> Vec<String> {
307 let mut result = vec![];
308
309 let base = match BaseDirectories::with_prefix("proxmox-backup") {
310 Ok(v) => v,
311 _ => return result,
312 };
313
314 // usually $HOME/.cache/proxmox-backup/repo-list
315 let path = match base.place_cache_file("repo-list") {
316 Ok(v) => v,
317 _ => return result,
318 };
319
320 let data = file_get_json(&path, None).unwrap_or_else(|_| json!({}));
321
322 if let Some(map) = data.as_object() {
323 for (repo, _count) in map {
324 result.push(repo.to_owned());
325 }
326 }
327
328 result
329 }
330
331 pub fn complete_backup_source(arg: &str, param: &HashMap<String, String>) -> Vec<String> {
332 let mut result = vec![];
333
334 let data: Vec<&str> = arg.splitn(2, ':').collect();
335
336 if data.len() != 2 {
337 result.push(String::from("root.pxar:/"));
338 result.push(String::from("etc.pxar:/etc"));
339 return result;
340 }
341
342 let files = pbs_tools::fs::complete_file_name(data[1], param);
343
344 for file in files {
345 result.push(format!("{}:{}", data[0], file));
346 }
347
348 result
349 }
350
351 pub fn base_directories() -> Result<xdg::BaseDirectories, Error> {
352 xdg::BaseDirectories::with_prefix("proxmox-backup").map_err(Error::from)
353 }
354
355 /// Convenience helper for better error messages:
356 pub fn find_xdg_file(
357 file_name: impl AsRef<std::path::Path>,
358 description: &'static str,
359 ) -> Result<Option<std::path::PathBuf>, Error> {
360 let file_name = file_name.as_ref();
361 base_directories()
362 .map(|base| base.find_config_file(file_name))
363 .with_context(|| format!("error searching for {}", description))
364 }
365
366 pub fn place_xdg_file(
367 file_name: impl AsRef<std::path::Path>,
368 description: &'static str,
369 ) -> Result<std::path::PathBuf, Error> {
370 let file_name = file_name.as_ref();
371 base_directories()
372 .and_then(|base| base.place_config_file(file_name).map_err(Error::from))
373 .with_context(|| format!("failed to place {} in xdg home", description))
374 }