]> git.proxmox.com Git - proxmox-backup.git/blobdiff - src/api2/admin/datastore.rs
src/api2/admin/datastore.rs: use new WorkerTask::new_thread()
[proxmox-backup.git] / src / api2 / admin / datastore.rs
index dccbed74ed68140c8a12f0a56aa83f28a8acc680..ca7fe09cb851fc1c96c720c51758ed4734d26f78 100644 (file)
@@ -1,5 +1,6 @@
 use failure::*;
 
+use crate::tools;
 use crate::api_schema::*;
 use crate::api_schema::router::*;
 //use crate::server::rest::*;
@@ -15,15 +16,17 @@ use std::sync::Arc;
 use crate::config::datastore;
 
 use crate::backup::*;
+use crate::server::WorkerTask;
 
-mod catar;
+mod pxar;
+mod upload;
 
 fn group_backups(backup_list: Vec<BackupInfo>) -> HashMap<String, Vec<BackupInfo>> {
 
     let mut group_hash = HashMap::new();
 
     for info in backup_list {
-        let group_id = format!("{}/{}", info.backup_dir.group.backup_type, info.backup_dir.group.backup_id);
+        let group_id = info.backup_dir.group().group_path().to_str().unwrap().to_owned();
         let time_list = group_hash.entry(group_id).or_insert(vec![]);
         time_list.push(info);
     }
@@ -39,7 +42,7 @@ fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
 ){
     let mut hash = HashSet::new();
     for info in list {
-        let local_time = info.backup_dir.backup_time.with_timezone(&Local);
+        let local_time = info.backup_dir.backup_time().with_timezone(&Local);
         if hash.len() >= keep as usize { break; }
         let backup_id = info.backup_dir.relative_path();
         let sel_id: String = select_id(local_time, &info);
@@ -51,7 +54,7 @@ fn mark_selections<F: Fn(DateTime<Local>, &BackupInfo) -> String> (
     }
 }
 
-fn get_group_list(
+fn list_groups(
     param: Value,
     _info: &ApiMethod,
     _rpcenv: &mut RpcEnvironment,
@@ -69,22 +72,109 @@ fn get_group_list(
 
     for (_group_id, mut list) in group_hash {
 
-        list.sort_unstable_by(|a, b| b.backup_dir.backup_time.cmp(&a.backup_dir.backup_time)); // new backups first
+        BackupInfo::sort_list(&mut list, false);
 
         let info = &list[0];
-        let group = &info.backup_dir.group;
+        let group = info.backup_dir.group();
 
         groups.push(json!({
-            "backup_type": group.backup_type,
-            "backup_id": group.backup_id,
-            "last_backup": info.backup_dir.backup_time.timestamp(),
-            "num_backups": list.len() as u64,
+            "backup-type": group.backup_type(),
+            "backup-id": group.backup_id(),
+            "last-backup": info.backup_dir.backup_time().timestamp(),
+            "backup-count": list.len() as u64,
+            "files": info.files,
         }));
     }
 
     Ok(json!(groups))
 }
 
+fn list_snapshot_files (
+    param: Value,
+    _info: &ApiMethod,
+    _rpcenv: &mut RpcEnvironment,
+) -> Result<Value, Error> {
+
+    let store = tools::required_string_param(&param, "store")?;
+    let backup_type = tools::required_string_param(&param, "backup-type")?;
+    let backup_id = tools::required_string_param(&param, "backup-id")?;
+    let backup_time = tools::required_integer_param(&param, "backup-time")?;
+
+    let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
+
+    let datastore = DataStore::lookup_datastore(store)?;
+
+    let files = datastore.list_files(&snapshot)?;
+
+    Ok(json!(files))
+}
+
+fn delete_snapshots (
+    param: Value,
+    _info: &ApiMethod,
+    _rpcenv: &mut RpcEnvironment,
+) -> Result<Value, Error> {
+
+    let store = tools::required_string_param(&param, "store")?;
+    let backup_type = tools::required_string_param(&param, "backup-type")?;
+    let backup_id = tools::required_string_param(&param, "backup-id")?;
+    let backup_time = tools::required_integer_param(&param, "backup-time")?;
+
+    let snapshot = BackupDir::new(backup_type, backup_id, backup_time);
+
+    let datastore = DataStore::lookup_datastore(store)?;
+
+    datastore.remove_backup_dir(&snapshot)?;
+
+    Ok(Value::Null)
+}
+
+fn list_snapshots (
+    param: Value,
+    _info: &ApiMethod,
+    _rpcenv: &mut RpcEnvironment,
+) -> Result<Value, Error> {
+
+    let store = tools::required_string_param(&param, "store")?;
+    let backup_type = tools::required_string_param(&param, "backup-type")?;
+    let backup_id = tools::required_string_param(&param, "backup-id")?;
+
+    let group = BackupGroup::new(backup_type, backup_id);
+
+    let datastore = DataStore::lookup_datastore(store)?;
+
+    let backup_list = datastore.list_backups()?;
+
+    let mut group_hash = group_backups(backup_list);
+
+    let group_id = group.group_path().to_str().unwrap().to_owned();
+
+    let group_snapshots = match group_hash.get_mut(&group_id) {
+        Some(data) => {
+            // new backups first
+            BackupInfo::sort_list(data, false);
+            data
+        }
+        None => bail!("Backup group '{}' does not exists.", group_id),
+    };
+
+    let mut snapshots = vec![];
+
+    for info in group_snapshots {
+
+        let group = info.backup_dir.group();
+
+        snapshots.push(json!({
+            "backup-type": group.backup_type(),
+            "backup-id": group.backup_id(),
+            "backup-time": info.backup_dir.backup_time().timestamp(),
+            "files": info.files,
+        }));
+    }
+
+    Ok(json!(snapshots))
+}
+
 fn prune(
     param: Value,
     _info: &ApiMethod,
@@ -105,7 +195,7 @@ fn prune(
 
         let mut mark = HashSet::new();
 
-        list.sort_unstable_by(|a, b| b.backup_dir.backup_time.cmp(&a.backup_dir.backup_time)); // new backups first
+        BackupInfo::sort_list(&mut list, false);
 
         if let Some(keep_last) = param["keep-last"].as_u64() {
             list.iter().take(keep_last as usize).for_each(|info| {
@@ -137,10 +227,10 @@ fn prune(
             });
         }
 
-        let mut remove_list: Vec<&BackupInfo> = list.iter()
+        let mut remove_list: Vec<BackupInfo> = list.into_iter()
             .filter(|info| !mark.contains(&info.backup_dir.relative_path())).collect();
 
-        remove_list.sort_unstable_by(|a, b| a.backup_dir.backup_time.cmp(&b.backup_dir.backup_time)); // oldest backups first
+        BackupInfo::sort_list(&mut remove_list, true);
 
         for info in remove_list {
             datastore.remove_backup_dir(&info.backup_dir)?;
@@ -193,22 +283,28 @@ fn api_method_prune() -> ApiMethod {
     )
 }
 
-// this is just a test for mutability/mutex handling  - will remove later
 fn start_garbage_collection(
     param: Value,
     _info: &ApiMethod,
-    _rpcenv: &mut RpcEnvironment,
+    rpcenv: &mut RpcEnvironment,
 ) -> Result<Value, Error> {
 
-    let store = param["store"].as_str().unwrap();
+    let store = param["store"].as_str().unwrap().to_string();
 
-    let datastore = DataStore::lookup_datastore(store)?;
+    let datastore = DataStore::lookup_datastore(&store)?;
 
     println!("Starting garbage collection on store {}", store);
 
-    datastore.garbage_collection()?;
+    let to_stdout = if rpcenv.env_type() == RpcEnvironmentType::CLI { true } else { false };
 
-    Ok(json!(null))
+    let upid_str = WorkerTask::new_thread(
+        "garbage_collection", Some(store.clone()), "root@pam", to_stdout, move |worker|
+        {
+            worker.log(format!("starting garbage collection on store {}", store));
+            datastore.garbage_collection()
+        })?;
+
+    Ok(json!(upid_str))
 }
 
 pub fn api_method_start_garbage_collection() -> ApiMethod {
@@ -257,9 +353,9 @@ fn get_backup_list(
 
     for info in datastore.list_backups()? {
         list.push(json!({
-            "backup_type": info.backup_dir.group.backup_type,
-            "backup_id": info.backup_dir.group.backup_id,
-            "backup_time": info.backup_dir.backup_time.timestamp(),
+            "backup-type": info.backup_dir.group().backup_type(),
+            "backup-id": info.backup_dir.group().backup_id(),
+            "backup-time": info.backup_dir.backup_time().timestamp(),
             "files": info.files,
         }));
     }
@@ -291,9 +387,10 @@ pub fn router() -> Router {
         .get(ApiMethod::new(
             |_,_,_| Ok(json!([
                 {"subdir": "backups" },
-                {"subdir": "catar" },
+                {"subdir": "pxar" },
                 {"subdir": "gc" },
                 {"subdir": "groups" },
+                {"subdir": "snapshots" },
                 {"subdir": "status" },
                 {"subdir": "prune" },
            ])),
@@ -308,22 +405,65 @@ pub fn router() -> Router {
                     ObjectSchema::new("List backups.")
                         .required("store", store_schema.clone()))))
         .subdir(
-            "catar",
+            "pxar",
+            Router::new()
+                .download(pxar::api_method_download_pxar())
+                .upload(pxar::api_method_upload_pxar()))
+        .subdir(
+            "test-upload",
             Router::new()
-                .download(catar::api_method_download_catar())
-                .upload(catar::api_method_upload_catar()))
+                .upgrade(upload::api_method_upgrade_upload()))
         .subdir(
             "gc",
             Router::new()
                 .get(api_method_garbage_collection_status())
                 .post(api_method_start_garbage_collection()))
+        .subdir(
+            "files",
+            Router::new()
+                .get(
+                    ApiMethod::new(
+                        list_snapshot_files,
+                        ObjectSchema::new("List snapshot files.")
+                            .required("store", store_schema.clone())
+                            .required("backup-type", StringSchema::new("Backup type."))
+                            .required("backup-id", StringSchema::new("Backup ID."))
+                            .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
+                                      .minimum(1547797308))
+                    )
+                )
+        )
         .subdir(
             "groups",
             Router::new()
                 .get(ApiMethod::new(
-                    get_group_list,
+                    list_groups,
                     ObjectSchema::new("List backup groups.")
                         .required("store", store_schema.clone()))))
+        .subdir(
+            "snapshots",
+            Router::new()
+                .get(
+                    ApiMethod::new(
+                        list_snapshots,
+                        ObjectSchema::new("List backup groups.")
+                            .required("store", store_schema.clone())
+                            .required("backup-type", StringSchema::new("Backup type."))
+                            .required("backup-id", StringSchema::new("Backup ID."))
+                    )
+                )
+                .delete(
+                    ApiMethod::new(
+                        delete_snapshots,
+                        ObjectSchema::new("Delete backup snapshot.")
+                            .required("store", store_schema.clone())
+                            .required("backup-type", StringSchema::new("Backup type."))
+                            .required("backup-id", StringSchema::new("Backup ID."))
+                            .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
+                                      .minimum(1547797308))
+                    )
+                )
+        )
         .subdir(
             "prune",
             Router::new()