]> git.proxmox.com Git - proxmox-backup.git/blob - src/api2/admin/sync.rs
tree-wide: fix needless borrows
[proxmox-backup.git] / src / api2 / admin / sync.rs
1 //! Datastore Synchronization Job Management
2
3 use anyhow::{bail, format_err, Error};
4 use serde_json::Value;
5
6 use proxmox_sys::sortable;
7 use proxmox_router::{
8 list_subdirs_api_method, ApiMethod, Router, RpcEnvironment, RpcEnvironmentType, SubdirMap,
9 Permission,
10 };
11 use proxmox_schema::api;
12
13 use pbs_api_types::{DATASTORE_SCHEMA, JOB_ID_SCHEMA, Authid, SyncJobConfig, SyncJobStatus};
14 use pbs_config::sync;
15 use pbs_config::CachedUserInfo;
16
17 use crate::{
18 api2::{
19 pull::do_sync_job,
20 config::sync::{
21 check_sync_job_modify_access,
22 check_sync_job_read_access,
23 },
24 },
25 server::{
26 jobstate::{
27 Job,
28 JobState,
29 compute_schedule_status,
30 },
31 },
32 };
33
34 #[api(
35 input: {
36 properties: {
37 store: {
38 schema: DATASTORE_SCHEMA,
39 optional: true,
40 },
41 },
42 },
43 returns: {
44 description: "List configured jobs and their status.",
45 type: Array,
46 items: { type: SyncJobStatus },
47 },
48 access: {
49 description: "Limited to sync jobs where user has Datastore.Audit on target datastore, and Remote.Audit on source remote.",
50 permission: &Permission::Anybody,
51 },
52 )]
53 /// List all sync jobs
54 pub fn list_sync_jobs(
55 store: Option<String>,
56 _param: Value,
57 mut rpcenv: &mut dyn RpcEnvironment,
58 ) -> Result<Vec<SyncJobStatus>, Error> {
59
60 let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
61 let user_info = CachedUserInfo::new()?;
62
63 let (config, digest) = sync::config()?;
64
65 let job_config_iter = config
66 .convert_to_typed_array("sync")?
67 .into_iter()
68 .filter(|job: &SyncJobConfig| {
69 if let Some(store) = &store {
70 &job.store == store
71 } else {
72 true
73 }
74 })
75 .filter(|job: &SyncJobConfig| {
76 check_sync_job_read_access(&user_info, &auth_id, job)
77 });
78
79 let mut list = Vec::new();
80
81 for job in job_config_iter {
82 let last_state = JobState::load("syncjob", &job.id)
83 .map_err(|err| format_err!("could not open statefile for {}: {}", &job.id, err))?;
84
85 let status = compute_schedule_status(&last_state, job.schedule.as_deref())?;
86
87 list.push(SyncJobStatus { config: job, status });
88 }
89
90 rpcenv["digest"] = hex::encode(&digest).into();
91
92 Ok(list)
93 }
94
95 #[api(
96 input: {
97 properties: {
98 id: {
99 schema: JOB_ID_SCHEMA,
100 }
101 }
102 },
103 access: {
104 description: "User needs Datastore.Backup on target datastore, and Remote.Read on source remote. Additionally, remove_vanished requires Datastore.Prune, and any owner other than the user themselves requires Datastore.Modify",
105 permission: &Permission::Anybody,
106 },
107 )]
108 /// Runs the sync jobs manually.
109 pub fn run_sync_job(
110 id: String,
111 _info: &ApiMethod,
112 rpcenv: &mut dyn RpcEnvironment,
113 ) -> Result<String, Error> {
114 let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
115 let user_info = CachedUserInfo::new()?;
116
117 let (config, _digest) = sync::config()?;
118 let sync_job: SyncJobConfig = config.lookup("sync", &id)?;
119
120 if !check_sync_job_modify_access(&user_info, &auth_id, &sync_job) {
121 bail!("permission check failed");
122 }
123
124 let job = Job::new("syncjob", &id)?;
125
126 let to_stdout = rpcenv.env_type() == RpcEnvironmentType::CLI;
127
128 let upid_str = do_sync_job(job, sync_job, &auth_id, None, to_stdout)?;
129
130 Ok(upid_str)
131 }
132
133 #[sortable]
134 const SYNC_INFO_SUBDIRS: SubdirMap = &[
135 (
136 "run",
137 &Router::new()
138 .post(&API_METHOD_RUN_SYNC_JOB)
139 ),
140 ];
141
142 const SYNC_INFO_ROUTER: Router = Router::new()
143 .get(&list_subdirs_api_method!(SYNC_INFO_SUBDIRS))
144 .subdirs(SYNC_INFO_SUBDIRS);
145
146
147 pub const ROUTER: Router = Router::new()
148 .get(&API_METHOD_LIST_SYNC_JOBS)
149 .match_all("id", &SYNC_INFO_ROUTER);