]> git.proxmox.com Git - proxmox-backup.git/blob - src/bin/proxmox-backup-client.rs
docs/glossary.rst: add hyperlinks
[proxmox-backup.git] / src / bin / proxmox-backup-client.rs
1 extern crate proxmox_backup;
2
3 use failure::*;
4 //use std::os::unix::io::AsRawFd;
5 use chrono::{Local, TimeZone};
6
7 use proxmox_backup::tools;
8 use proxmox_backup::cli::*;
9 use proxmox_backup::api_schema::*;
10 use proxmox_backup::api_schema::router::*;
11 use proxmox_backup::client::*;
12 use proxmox_backup::backup::*;
13 //use proxmox_backup::backup::image_index::*;
14 //use proxmox_backup::config::datastore;
15 //use proxmox_backup::catar::encoder::*;
16 //use proxmox_backup::backup::datastore::*;
17
18 use serde_json::{Value};
19 use hyper::Body;
20 use std::sync::Arc;
21 use regex::Regex;
22
23 use lazy_static::lazy_static;
24
25 lazy_static! {
26 static ref BACKUPSPEC_REGEX: Regex = Regex::new(r"^([a-zA-Z0-9_-]+):(.+)$").unwrap();
27 }
28
29 fn backup_directory(
30 client: &mut HttpClient,
31 repo: &BackupRepository,
32 body: Body,
33 archive_name: &str,
34 backup_time: u64,
35 chunk_size: Option<u64>,
36 ) -> Result<(), Error> {
37
38 let mut query = url::form_urlencoded::Serializer::new(String::new());
39
40 query
41 .append_pair("archive_name", archive_name)
42 .append_pair("type", "host")
43 .append_pair("id", &tools::nodename())
44 .append_pair("time", &backup_time.to_string());
45
46 if let Some(size) = chunk_size {
47 query.append_pair("chunk-size", &size.to_string());
48 }
49
50 let query = query.finish();
51
52 let path = format!("api2/json/admin/datastore/{}/catar?{}", repo.store, query);
53
54 client.upload("application/x-proxmox-backup-catar", body, &path)?;
55
56 Ok(())
57 }
58
59 /****
60 fn backup_image(datastore: &DataStore, file: &std::fs::File, size: usize, target: &str, chunk_size: usize) -> Result<(), Error> {
61
62 let mut target = PathBuf::from(target);
63
64 if let Some(ext) = target.extension() {
65 if ext != "fidx" {
66 bail!("got wrong file extension - expected '.fidx'");
67 }
68 } else {
69 target.set_extension("fidx");
70 }
71
72 let mut index = datastore.create_image_writer(&target, size, chunk_size)?;
73
74 tools::file_chunker(file, chunk_size, |pos, chunk| {
75 index.add_chunk(pos, chunk)?;
76 Ok(true)
77 })?;
78
79 index.close()?; // commit changes
80
81 Ok(())
82 }
83 */
84
85 fn list_backups(
86 param: Value,
87 _info: &ApiMethod,
88 _rpcenv: &mut RpcEnvironment,
89 ) -> Result<Value, Error> {
90
91 let repo_url = tools::required_string_param(&param, "repository")?;
92 let repo = BackupRepository::parse(repo_url)?;
93
94 let mut client = HttpClient::new(&repo.host, &repo.user);
95
96 let path = format!("api2/json/admin/datastore/{}/backups", repo.store);
97
98 let result = client.get(&path)?;
99
100 // fixme: implement and use output formatter instead ..
101 let list = result["data"].as_array().unwrap();
102
103 for item in list {
104
105 let id = item["backup_id"].as_str(). unwrap();
106 let btype = item["backup_type"].as_str(). unwrap();
107 let epoch = item["backup_time"].as_i64(). unwrap();
108
109 let time_str = Local.timestamp(epoch, 0).format("%c");
110
111 let files = item["files"].as_array().unwrap();
112
113 for file in files {
114 let filename = file.as_str().unwrap();
115 println!("| {} | {} | {} | {}", btype, id, time_str, filename);
116 }
117 }
118
119 //Ok(result)
120 Ok(Value::Null)
121 }
122
123 fn start_garbage_collection(
124 param: Value,
125 _info: &ApiMethod,
126 _rpcenv: &mut RpcEnvironment,
127 ) -> Result<Value, Error> {
128
129 let repo_url = tools::required_string_param(&param, "repository")?;
130 let repo = BackupRepository::parse(repo_url)?;
131
132 let mut client = HttpClient::new(&repo.host, &repo.user);
133
134 let path = format!("api2/json/admin/datastore/{}/gc", repo.store);
135
136 let result = client.post(&path)?;
137
138 Ok(result)
139 }
140
141 fn parse_backupspec(value: &str) -> Result<(&str, &str), Error> {
142
143 if let Some(caps) = BACKUPSPEC_REGEX.captures(value) {
144 return Ok((caps.get(1).unwrap().as_str(), caps.get(2).unwrap().as_str()));
145 }
146 bail!("unable to parse directory specification '{}'", value);
147 }
148
149 fn create_backup(
150 param: Value,
151 _info: &ApiMethod,
152 _rpcenv: &mut RpcEnvironment,
153 ) -> Result<Value, Error> {
154
155 let repo_url = tools::required_string_param(&param, "repository")?;
156
157 let backupspec_list = tools::required_array_param(&param, "backupspec")?;
158
159 let repo = BackupRepository::parse(repo_url)?;
160
161 let chunk_size_opt = param["chunk-size"].as_u64().map(|v| v*1024);
162
163 if let Some(size) = chunk_size_opt {
164 verify_chunk_size(size)?;
165 }
166
167 let mut upload_list = vec![];
168
169 for backupspec in backupspec_list {
170 let (target, filename) = parse_backupspec(backupspec.as_str().unwrap())?;
171
172 let stat = match nix::sys::stat::stat(filename) {
173 Ok(s) => s,
174 Err(err) => bail!("unable to access '{}' - {}", filename, err),
175 };
176
177 if (stat.st_mode & libc::S_IFDIR) != 0 {
178 let stream = CaTarBackupStream::open(filename)?;
179
180 let body = Body::wrap_stream(stream);
181
182 let target = format!("{}.catar", target);
183
184 upload_list.push((body, filename.to_owned(), target));
185
186 } else if (stat.st_mode & (libc::S_IFREG|libc::S_IFBLK)) != 0 {
187 if stat.st_size <= 0 { bail!("got strange file size '{}'", stat.st_size); }
188 let _size = stat.st_size as usize;
189
190 panic!("implement me");
191
192 //backup_image(&datastore, &file, size, &target, chunk_size)?;
193
194 // let idx = datastore.open_image_reader(target)?;
195 // idx.print_info();
196
197 } else {
198 bail!("unsupported file type (expected a directory, file or block device)");
199 }
200 }
201
202 let backup_time = std::time::SystemTime::now().duration_since(
203 std::time::SystemTime::UNIX_EPOCH)?.as_secs();
204
205 let mut client = HttpClient::new(&repo.host, &repo.user);
206
207 for (body, filename, target) in upload_list {
208 println!("Upload '{}' to '{:?}'", filename, repo);
209 backup_directory(&mut client, &repo, body, &target, backup_time, chunk_size_opt)?;
210 }
211
212 //datastore.garbage_collection()?;
213
214 Ok(Value::Null)
215
216 }
217
218 pub fn complete_backup_source(arg: &str) -> Vec<String> {
219
220 let mut result = vec![];
221
222 let data: Vec<&str> = arg.splitn(2, ':').collect();
223
224 if data.len() != 2 { return result; }
225
226 let files = tools::complete_file_name(data[1]);
227
228 for file in files {
229 result.push(format!("{}:{}", data[0], file));
230 }
231
232 result
233 }
234
235 fn prune(
236 mut param: Value,
237 _info: &ApiMethod,
238 _rpcenv: &mut RpcEnvironment,
239 ) -> Result<Value, Error> {
240
241 let repo_url = tools::required_string_param(&param, "repository")?;
242 let repo = BackupRepository::parse(repo_url)?;
243
244 let mut client = HttpClient::new(&repo.host, &repo.user);
245
246 let path = format!("api2/json/admin/datastore/{}/prune", repo.store);
247
248 param.as_object_mut().unwrap().remove("repository");
249
250 let result = client.post_json(&path, param)?;
251
252 Ok(result)
253 }
254
255 fn main() {
256
257 let repo_url_schema: Arc<Schema> = Arc::new(
258 StringSchema::new("Repository URL.")
259 .format(BACKUP_REPO_URL.clone())
260 .max_length(256)
261 .into()
262 );
263
264 let backup_source_schema: Arc<Schema> = Arc::new(
265 StringSchema::new("Backup source specification ([<label>:<path>]).")
266 .format(Arc::new(ApiStringFormat::Pattern(&BACKUPSPEC_REGEX)))
267 .into()
268 );
269
270 let create_cmd_def = CliCommand::new(
271 ApiMethod::new(
272 create_backup,
273 ObjectSchema::new("Create backup.")
274 .required("repository", repo_url_schema.clone())
275 .required(
276 "backupspec",
277 ArraySchema::new(
278 "List of backup source specifications ([<label>:<path>] ...)",
279 backup_source_schema,
280 ).min_length(1)
281 )
282 .optional(
283 "chunk-size",
284 IntegerSchema::new("Chunk size in KB. Must be a power of 2.")
285 .minimum(64)
286 .maximum(4096)
287 .default(4096)
288 )
289 ))
290 .arg_param(vec!["repository", "backupspec"])
291 .completion_cb("backupspec", complete_backup_source);
292
293 let list_cmd_def = CliCommand::new(
294 ApiMethod::new(
295 list_backups,
296 ObjectSchema::new("List backups.")
297 .required("repository", repo_url_schema.clone())
298 ))
299 .arg_param(vec!["repository"]);
300
301 let garbage_collect_cmd_def = CliCommand::new(
302 ApiMethod::new(
303 start_garbage_collection,
304 ObjectSchema::new("Start garbage collection for a specific repository.")
305 .required("repository", repo_url_schema.clone())
306 ))
307 .arg_param(vec!["repository"]);
308
309 let prune_cmd_def = CliCommand::new(
310 ApiMethod::new(
311 prune,
312 proxmox_backup::api2::admin::datastore::add_common_prune_prameters(
313 ObjectSchema::new("Prune backup repository.")
314 .required("repository", repo_url_schema.clone())
315 )
316 ))
317 .arg_param(vec!["repository"]);
318 let cmd_def = CliCommandMap::new()
319 .insert("create".to_owned(), create_cmd_def.into())
320 .insert("garbage-collect".to_owned(), garbage_collect_cmd_def.into())
321 .insert("list".to_owned(), list_cmd_def.into())
322 .insert("prune".to_owned(), prune_cmd_def.into());
323
324 run_cli_command(cmd_def.into());
325 }