--- /dev/null
+use crate::api::schema::*;
+use crate::api::router::*;
+use serde_json::{json};
+
+pub mod datastore;
+
+pub fn router() -> Router {
+
+ let route = Router::new()
+ .get(ApiMethod::new(
+ |_,_| Ok(json!([
+ {"subdir": "datastore"}
+ ])),
+ ObjectSchema::new("Directory index.")))
+ .subdir("datastore", datastore::router());
+
+ route
+}
--- /dev/null
+use failure::*;
+
+use crate::api::schema::*;
+use crate::api::router::*;
+//use crate::server::rest::*;
+use serde_json::{json, Value};
+
+//use hyper::StatusCode;
+//use hyper::rt::{Future, Stream};
+
+use crate::config::datastore;
+
+use crate::backup::datastore::*;
+
+mod catar;
+
+// this is just a test for mutability/mutex handling - will remove later
+fn start_garbage_collection(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ let store = param["store"].as_str().unwrap();
+
+ let datastore = DataStore::lookup_datastore(store)?;
+
+ println!("Starting garbage collection on store {}", store);
+
+ datastore.garbage_collection()?;
+
+ Ok(json!(null))
+}
+
+pub fn api_method_start_garbage_collection() -> ApiMethod {
+ ApiMethod::new(
+ start_garbage_collection,
+ ObjectSchema::new("Start garbage collection.")
+ .required("store", StringSchema::new("Datastore name."))
+ )
+}
+
+fn garbage_collection_status(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ let store = param["store"].as_str().unwrap();
+
+ println!("Garbage collection status on store {}", store);
+
+ Ok(json!(null))
+
+}
+
+pub fn api_method_garbage_collection_status() -> ApiMethod {
+ ApiMethod::new(
+ garbage_collection_status,
+ ObjectSchema::new("Garbage collection status.")
+ .required("store", StringSchema::new("Datastore name."))
+ )
+}
+
+fn get_backup_list(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ let config = datastore::config()?;
+
+ let store = param["store"].as_str().unwrap();
+
+ let datastore = DataStore::lookup_datastore(store)?;
+
+ let mut list = vec![];
+
+ for info in datastore.list_backups()? {
+ list.push(json!({
+ "backup_type": info.backup_type,
+ "backup_id": info.backup_id,
+ "backup_time": info.backup_time.timestamp(),
+ }));
+ }
+
+ let result = json!(list);
+
+ Ok(result)
+}
+
+fn get_datastore_list(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ let config = datastore::config()?;
+
+ Ok(config.convert_to_array("store"))
+}
+
+
+pub fn router() -> Router {
+
+ let datastore_info = Router::new()
+ .get(ApiMethod::new(
+ |_,_| Ok(json!([
+ {"subdir": "backups" },
+ {"subdir": "catar" },
+ {"subdir": "status"},
+ {"subdir": "gc" }
+ ])),
+ ObjectSchema::new("Directory index.")
+ .required("store", StringSchema::new("Datastore name.")))
+ )
+ .subdir(
+ "backups",
+ Router::new()
+ .get(ApiMethod::new(
+ get_backup_list,
+ ObjectSchema::new("List backups.")
+ .required("store", StringSchema::new("Datastore name.")))))
+ .subdir(
+ "catar",
+ Router::new()
+ .download(catar::api_method_download_catar())
+ .upload(catar::api_method_upload_catar()))
+ .subdir(
+ "gc",
+ Router::new()
+ .get(api_method_garbage_collection_status())
+ .post(api_method_start_garbage_collection()));
+
+
+
+ let route = Router::new()
+ .get(ApiMethod::new(
+ get_datastore_list,
+ ObjectSchema::new("Directory index.")))
+ .match_all("store", datastore_info);
+
+
+
+ route
+}
--- /dev/null
+use failure::*;
+
+use crate::tools;
+use crate::tools::wrapped_reader_stream::*;
+use crate::backup::datastore::*;
+use crate::backup::archive_index::*;
+//use crate::server::rest::*;
+use crate::api::schema::*;
+use crate::api::router::*;
+
+use chrono::{Utc, TimeZone};
+
+use serde_json::Value;
+use std::io::Write;
+use futures::*;
+use std::path::PathBuf;
+use std::sync::Arc;
+
+use hyper::Body;
+use hyper::http::request::Parts;
+
+pub struct UploadCaTar {
+ stream: Body,
+ index: ArchiveIndexWriter,
+ count: usize,
+}
+
+impl Future for UploadCaTar {
+ type Item = ();
+ type Error = failure::Error;
+
+ fn poll(&mut self) -> Poll<(), failure::Error> {
+ loop {
+ match try_ready!(self.stream.poll()) {
+ Some(chunk) => {
+ self.count += chunk.len();
+ if let Err(err) = self.index.write(&chunk) {
+ bail!("writing chunk failed - {}", err);
+ }
+ }
+ None => {
+ self.index.close()?;
+ return Ok(Async::Ready(()))
+ }
+ }
+ }
+ }
+}
+
+fn upload_catar(parts: Parts, req_body: Body, param: Value, _info: &ApiAsyncMethod) -> Result<BoxFut, Error> {
+
+ let store = tools::required_string_param(¶m, "store")?;
+ let archive_name = tools::required_string_param(¶m, "archive_name")?;
+
+ let backup_type = tools::required_string_param(¶m, "type")?;
+ let backup_id = tools::required_string_param(¶m, "id")?;
+ let backup_time = tools::required_integer_param(¶m, "time")?;
+
+ println!("Upload {}.catar to {} ({}/{}/{}/{}.aidx)", archive_name, store,
+ backup_type, backup_id, backup_time, archive_name);
+
+ let content_type = parts.headers.get(http::header::CONTENT_TYPE)
+ .ok_or(format_err!("missing content-type header"))?;
+
+ if content_type != "application/x-proxmox-backup-catar" {
+ bail!("got wrong content-type for catar archive upload");
+ }
+
+ let chunk_size = 4*1024*1024;
+
+ let datastore = DataStore::lookup_datastore(store)?;
+
+ let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
+
+ let mut full_archive_name = PathBuf::from(archive_name);
+ full_archive_name.set_extension("aidx");
+
+ path.push(full_archive_name);
+
+ let index = datastore.create_archive_writer(path, chunk_size)?;
+
+ let upload = UploadCaTar { stream: req_body, index, count: 0};
+
+ let resp = upload.and_then(|_| {
+
+ let response = http::Response::builder()
+ .status(200)
+ .body(hyper::Body::empty())
+ .unwrap();
+
+ Ok(response)
+ });
+
+ Ok(Box::new(resp))
+}
+
+pub fn api_method_upload_catar() -> ApiAsyncMethod {
+ ApiAsyncMethod::new(
+ upload_catar,
+ ObjectSchema::new("Upload .catar backup file.")
+ .required("store", StringSchema::new("Datastore name."))
+ .required("archive_name", StringSchema::new("Backup archive name."))
+ .required("type", StringSchema::new("Backup type.")
+ .format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
+ .required("id", StringSchema::new("Backup ID."))
+ .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+ .minimum(1547797308))
+
+ )
+}
+
+fn download_catar(_parts: Parts, _req_body: Body, param: Value, _info: &ApiAsyncMethod) -> Result<BoxFut, Error> {
+
+ let store = tools::required_string_param(¶m, "store")?;
+ let archive_name = tools::required_string_param(¶m, "archive_name")?;
+
+ let backup_type = tools::required_string_param(¶m, "type")?;
+ let backup_id = tools::required_string_param(¶m, "id")?;
+ let backup_time = tools::required_integer_param(¶m, "time")?;
+ let backup_time = Utc.timestamp(backup_time, 0);
+
+ println!("Download {}.catar from {} ({}/{}/{}/{}.aidx)", archive_name, store,
+ backup_type, backup_id, backup_time, archive_name);
+
+ let datastore = DataStore::lookup_datastore(store)?;
+
+ let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
+
+ let mut full_archive_name = PathBuf::from(archive_name);
+ full_archive_name.set_extension("aidx");
+
+ path.push(full_archive_name);
+
+ let index = datastore.open_archive_reader(path)?;
+ let reader = BufferedArchiveReader::new(index);
+ let stream = WrappedReaderStream::new(reader);
+
+ // fixme: set size, content type?
+ let response = http::Response::builder()
+ .status(200)
+ .body(Body::wrap_stream(stream))?;
+
+ Ok(Box::new(future::ok(response)))
+}
+
+pub fn api_method_download_catar() -> ApiAsyncMethod {
+ ApiAsyncMethod::new(
+ download_catar,
+ ObjectSchema::new("Download .catar backup file.")
+ .required("store", StringSchema::new("Datastore name."))
+ .required("archive_name", StringSchema::new("Backup archive name."))
+ .required("type", StringSchema::new("Backup type.")
+ .format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
+ .required("id", StringSchema::new("Backup ID."))
+ .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
+ .minimum(1547797308))
+
+ )
+}
--- /dev/null
+//use failure::*;
+//use std::collections::HashMap;
+
+use crate::api::schema::*;
+use crate::api::router::*;
+use serde_json::{json};
+
+pub mod datastore;
+
+pub fn router() -> Router {
+
+ let route = Router::new()
+ .get(ApiMethod::new(
+ |_,_| Ok(json!([
+ {"subdir": "datastore"}
+ ])),
+ ObjectSchema::new("Directory index.")))
+ .subdir("datastore", datastore::router());
+
+
+ route
+}
--- /dev/null
+use failure::*;
+//use std::collections::HashMap;
+
+use crate::api::schema::*;
+use crate::api::router::*;
+use crate::backup::chunk_store::*;
+use serde_json::{json, Value};
+use std::path::PathBuf;
+
+use crate::config::datastore;
+
+pub fn get() -> ApiMethod {
+ ApiMethod::new(
+ get_datastore_list,
+ ObjectSchema::new("Directory index."))
+}
+
+fn get_datastore_list(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ let config = datastore::config()?;
+
+ Ok(config.convert_to_array("name"))
+}
+
+pub fn post() -> ApiMethod {
+ ApiMethod::new(
+ create_datastore,
+ ObjectSchema::new("Create new datastore.")
+ .required("name", StringSchema::new("Datastore name."))
+ .required("path", StringSchema::new("Directory path (must exist)."))
+ )
+}
+
+fn create_datastore(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ // fixme: locking ?
+
+ let mut config = datastore::config()?;
+
+ let name = param["name"].as_str().unwrap();
+
+ if let Some(_) = config.sections.get(name) {
+ bail!("datastore '{}' already exists.", name);
+ }
+
+ let path: PathBuf = param["path"].as_str().unwrap().into();
+ let _store = ChunkStore::create(name, path)?;
+
+ let datastore = json!({
+ "path": param["path"]
+ });
+
+ config.set_data(name, "datastore", datastore);
+
+ datastore::save_config(&config)?;
+
+ Ok(Value::Null)
+}
+
+pub fn delete() -> ApiMethod {
+ ApiMethod::new(
+ delete_datastore,
+ ObjectSchema::new("Remove a datastore configuration.")
+ .required("name", StringSchema::new("Datastore name.")))
+}
+
+fn delete_datastore(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+ println!("This is a test {}", param);
+
+ // fixme: locking ?
+ // fixme: check digest ?
+
+ let mut config = datastore::config()?;
+
+ let name = param["name"].as_str().unwrap();
+
+ match config.sections.get(name) {
+ Some(_) => { config.sections.remove(name); },
+ None => bail!("datastore '{}' does not exist.", name),
+ }
+
+ datastore::save_config(&config)?;
+
+ Ok(Value::Null)
+}
+
+pub fn router() -> Router {
+
+ let route = Router::new()
+ .get(get())
+ .post(post())
+ .delete(delete());
+
+
+ route
+}
--- /dev/null
+use failure::*;
+
+use crate::api::schema::*;
+use crate::api::router::*;
+use serde_json::{json, Value};
+
+const PROXMOX_PKG_VERSION: &'static str = env!("PROXMOX_PKG_VERSION");
+const PROXMOX_PKG_RELEASE: &'static str = env!("PROXMOX_PKG_RELEASE");
+const PROXMOX_PKG_REPOID: &'static str = env!("PROXMOX_PKG_REPOID");
+
+fn get_version(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
+
+ Ok(json!({
+ "version": PROXMOX_PKG_VERSION,
+ "release": PROXMOX_PKG_RELEASE,
+ "repoid": PROXMOX_PKG_REPOID
+ }))
+}
+
+pub fn router() -> Router {
+
+ let route = Router::new()
+ .get(ApiMethod::new(
+ get_version,
+ ObjectSchema::new("Proxmox Backup Server API version.")));
+
+ route
+}
+++ /dev/null
-use failure::*;
-
-use crate::api::schema::*;
-use crate::api::router::*;
-use serde_json::{json, Value};
-
-pub mod config;
-pub mod admin;
-mod version;
-
-fn test_sync_api_handler(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
- println!("This is a test {}", param);
-
- // let force: Option<bool> = Some(false);
-
- //if let Some(force) = param.force {
- //}
-
- let _force = param["force"].as_bool()
- .ok_or_else(|| format_err!("missing parameter 'force'"))?;
-
- if let Some(_force) = param["force"].as_bool() {
- }
-
- Ok(json!(null))
-}
-
-pub fn router() -> Router {
-
- let route4 = Router::new()
- .get(ApiMethod::new(
- |param, _info| {
- println!("This is a clousure handler: {}", param);
-
- Ok(json!(null))
- },
- ObjectSchema::new("Another Endpoint."))
- .returns(Schema::Null));
-
-
- let nodeinfo = Router::new()
- .get(ApiMethod::new(
- test_sync_api_handler,
- ObjectSchema::new("This is a simple test.")
- .optional("force", BooleanSchema::new("Test for boolean options")))
- )
- .subdir("subdir3", route4);
-
- let nodes = Router::new()
- .match_all("node", nodeinfo);
-
-
- let route = Router::new()
- .get(ApiMethod::new(
- |_,_| Ok(json!([
- {"subdir": "config"},
- {"subdir": "admin"},
- {"subdir": "version"},
- {"subdir": "nodes"}
- ])),
- ObjectSchema::new("Directory index.")))
- .subdir("admin", admin::router())
- .subdir("config", config::router())
- .subdir("version", version::router())
- .subdir("nodes", nodes);
-
- route
-}
+++ /dev/null
-use crate::api::schema::*;
-use crate::api::router::*;
-use serde_json::{json};
-
-pub mod datastore;
-
-pub fn router() -> Router {
-
- let route = Router::new()
- .get(ApiMethod::new(
- |_,_| Ok(json!([
- {"subdir": "datastore"}
- ])),
- ObjectSchema::new("Directory index.")))
- .subdir("datastore", datastore::router());
-
- route
-}
+++ /dev/null
-use failure::*;
-
-use crate::api::schema::*;
-use crate::api::router::*;
-//use crate::server::rest::*;
-use serde_json::{json, Value};
-
-//use hyper::StatusCode;
-//use hyper::rt::{Future, Stream};
-
-use crate::config::datastore;
-
-use crate::backup::datastore::*;
-
-mod catar;
-
-// this is just a test for mutability/mutex handling - will remove later
-fn start_garbage_collection(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- let store = param["store"].as_str().unwrap();
-
- let datastore = DataStore::lookup_datastore(store)?;
-
- println!("Starting garbage collection on store {}", store);
-
- datastore.garbage_collection()?;
-
- Ok(json!(null))
-}
-
-pub fn api_method_start_garbage_collection() -> ApiMethod {
- ApiMethod::new(
- start_garbage_collection,
- ObjectSchema::new("Start garbage collection.")
- .required("store", StringSchema::new("Datastore name."))
- )
-}
-
-fn garbage_collection_status(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- let store = param["store"].as_str().unwrap();
-
- println!("Garbage collection status on store {}", store);
-
- Ok(json!(null))
-
-}
-
-pub fn api_method_garbage_collection_status() -> ApiMethod {
- ApiMethod::new(
- garbage_collection_status,
- ObjectSchema::new("Garbage collection status.")
- .required("store", StringSchema::new("Datastore name."))
- )
-}
-
-fn get_backup_list(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- let config = datastore::config()?;
-
- let store = param["store"].as_str().unwrap();
-
- let datastore = DataStore::lookup_datastore(store)?;
-
- let mut list = vec![];
-
- for info in datastore.list_backups()? {
- list.push(json!({
- "backup_type": info.backup_type,
- "backup_id": info.backup_id,
- "backup_time": info.backup_time.timestamp(),
- }));
- }
-
- let result = json!(list);
-
- Ok(result)
-}
-
-fn get_datastore_list(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- let config = datastore::config()?;
-
- Ok(config.convert_to_array("store"))
-}
-
-
-pub fn router() -> Router {
-
- let datastore_info = Router::new()
- .get(ApiMethod::new(
- |_,_| Ok(json!([
- {"subdir": "backups" },
- {"subdir": "catar" },
- {"subdir": "status"},
- {"subdir": "gc" }
- ])),
- ObjectSchema::new("Directory index.")
- .required("store", StringSchema::new("Datastore name.")))
- )
- .subdir(
- "backups",
- Router::new()
- .get(ApiMethod::new(
- get_backup_list,
- ObjectSchema::new("List backups.")
- .required("store", StringSchema::new("Datastore name.")))))
- .subdir(
- "catar",
- Router::new()
- .download(catar::api_method_download_catar())
- .upload(catar::api_method_upload_catar()))
- .subdir(
- "gc",
- Router::new()
- .get(api_method_garbage_collection_status())
- .post(api_method_start_garbage_collection()));
-
-
-
- let route = Router::new()
- .get(ApiMethod::new(
- get_datastore_list,
- ObjectSchema::new("Directory index.")))
- .match_all("store", datastore_info);
-
-
-
- route
-}
+++ /dev/null
-use failure::*;
-
-use crate::tools;
-use crate::tools::wrapped_reader_stream::*;
-use crate::backup::datastore::*;
-use crate::backup::archive_index::*;
-//use crate::server::rest::*;
-use crate::api::schema::*;
-use crate::api::router::*;
-
-use chrono::{Utc, TimeZone};
-
-use serde_json::Value;
-use std::io::Write;
-use futures::*;
-use std::path::PathBuf;
-use std::sync::Arc;
-
-use hyper::Body;
-use hyper::http::request::Parts;
-
-pub struct UploadCaTar {
- stream: Body,
- index: ArchiveIndexWriter,
- count: usize,
-}
-
-impl Future for UploadCaTar {
- type Item = ();
- type Error = failure::Error;
-
- fn poll(&mut self) -> Poll<(), failure::Error> {
- loop {
- match try_ready!(self.stream.poll()) {
- Some(chunk) => {
- self.count += chunk.len();
- if let Err(err) = self.index.write(&chunk) {
- bail!("writing chunk failed - {}", err);
- }
- }
- None => {
- self.index.close()?;
- return Ok(Async::Ready(()))
- }
- }
- }
- }
-}
-
-fn upload_catar(parts: Parts, req_body: Body, param: Value, _info: &ApiAsyncMethod) -> Result<BoxFut, Error> {
-
- let store = tools::required_string_param(¶m, "store")?;
- let archive_name = tools::required_string_param(¶m, "archive_name")?;
-
- let backup_type = tools::required_string_param(¶m, "type")?;
- let backup_id = tools::required_string_param(¶m, "id")?;
- let backup_time = tools::required_integer_param(¶m, "time")?;
-
- println!("Upload {}.catar to {} ({}/{}/{}/{}.aidx)", archive_name, store,
- backup_type, backup_id, backup_time, archive_name);
-
- let content_type = parts.headers.get(http::header::CONTENT_TYPE)
- .ok_or(format_err!("missing content-type header"))?;
-
- if content_type != "application/x-proxmox-backup-catar" {
- bail!("got wrong content-type for catar archive upload");
- }
-
- let chunk_size = 4*1024*1024;
-
- let datastore = DataStore::lookup_datastore(store)?;
-
- let mut path = datastore.create_backup_dir(backup_type, backup_id, backup_time)?;
-
- let mut full_archive_name = PathBuf::from(archive_name);
- full_archive_name.set_extension("aidx");
-
- path.push(full_archive_name);
-
- let index = datastore.create_archive_writer(path, chunk_size)?;
-
- let upload = UploadCaTar { stream: req_body, index, count: 0};
-
- let resp = upload.and_then(|_| {
-
- let response = http::Response::builder()
- .status(200)
- .body(hyper::Body::empty())
- .unwrap();
-
- Ok(response)
- });
-
- Ok(Box::new(resp))
-}
-
-pub fn api_method_upload_catar() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upload_catar,
- ObjectSchema::new("Upload .catar backup file.")
- .required("store", StringSchema::new("Datastore name."))
- .required("archive_name", StringSchema::new("Backup archive name."))
- .required("type", StringSchema::new("Backup type.")
- .format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
- .required("id", StringSchema::new("Backup ID."))
- .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
- .minimum(1547797308))
-
- )
-}
-
-fn download_catar(_parts: Parts, _req_body: Body, param: Value, _info: &ApiAsyncMethod) -> Result<BoxFut, Error> {
-
- let store = tools::required_string_param(¶m, "store")?;
- let archive_name = tools::required_string_param(¶m, "archive_name")?;
-
- let backup_type = tools::required_string_param(¶m, "type")?;
- let backup_id = tools::required_string_param(¶m, "id")?;
- let backup_time = tools::required_integer_param(¶m, "time")?;
- let backup_time = Utc.timestamp(backup_time, 0);
-
- println!("Download {}.catar from {} ({}/{}/{}/{}.aidx)", archive_name, store,
- backup_type, backup_id, backup_time, archive_name);
-
- let datastore = DataStore::lookup_datastore(store)?;
-
- let mut path = datastore.get_backup_dir(backup_type, backup_id, backup_time);
-
- let mut full_archive_name = PathBuf::from(archive_name);
- full_archive_name.set_extension("aidx");
-
- path.push(full_archive_name);
-
- let index = datastore.open_archive_reader(path)?;
- let reader = BufferedArchiveReader::new(index);
- let stream = WrappedReaderStream::new(reader);
-
- // fixme: set size, content type?
- let response = http::Response::builder()
- .status(200)
- .body(Body::wrap_stream(stream))?;
-
- Ok(Box::new(future::ok(response)))
-}
-
-pub fn api_method_download_catar() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- download_catar,
- ObjectSchema::new("Download .catar backup file.")
- .required("store", StringSchema::new("Datastore name."))
- .required("archive_name", StringSchema::new("Backup archive name."))
- .required("type", StringSchema::new("Backup type.")
- .format(Arc::new(ApiStringFormat::Enum(vec!["ct".into(), "host".into()]))))
- .required("id", StringSchema::new("Backup ID."))
- .required("time", IntegerSchema::new("Backup time (Unix epoch.)")
- .minimum(1547797308))
-
- )
-}
+++ /dev/null
-//use failure::*;
-//use std::collections::HashMap;
-
-use crate::api::schema::*;
-use crate::api::router::*;
-use serde_json::{json};
-
-pub mod datastore;
-
-pub fn router() -> Router {
-
- let route = Router::new()
- .get(ApiMethod::new(
- |_,_| Ok(json!([
- {"subdir": "datastore"}
- ])),
- ObjectSchema::new("Directory index.")))
- .subdir("datastore", datastore::router());
-
-
- route
-}
+++ /dev/null
-use failure::*;
-//use std::collections::HashMap;
-
-use crate::api::schema::*;
-use crate::api::router::*;
-use crate::backup::chunk_store::*;
-use serde_json::{json, Value};
-use std::path::PathBuf;
-
-use crate::config::datastore;
-
-pub fn get() -> ApiMethod {
- ApiMethod::new(
- get_datastore_list,
- ObjectSchema::new("Directory index."))
-}
-
-fn get_datastore_list(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- let config = datastore::config()?;
-
- Ok(config.convert_to_array("name"))
-}
-
-pub fn post() -> ApiMethod {
- ApiMethod::new(
- create_datastore,
- ObjectSchema::new("Create new datastore.")
- .required("name", StringSchema::new("Datastore name."))
- .required("path", StringSchema::new("Directory path (must exist)."))
- )
-}
-
-fn create_datastore(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- // fixme: locking ?
-
- let mut config = datastore::config()?;
-
- let name = param["name"].as_str().unwrap();
-
- if let Some(_) = config.sections.get(name) {
- bail!("datastore '{}' already exists.", name);
- }
-
- let path: PathBuf = param["path"].as_str().unwrap().into();
- let _store = ChunkStore::create(name, path)?;
-
- let datastore = json!({
- "path": param["path"]
- });
-
- config.set_data(name, "datastore", datastore);
-
- datastore::save_config(&config)?;
-
- Ok(Value::Null)
-}
-
-pub fn delete() -> ApiMethod {
- ApiMethod::new(
- delete_datastore,
- ObjectSchema::new("Remove a datastore configuration.")
- .required("name", StringSchema::new("Datastore name.")))
-}
-
-fn delete_datastore(param: Value, _info: &ApiMethod) -> Result<Value, Error> {
- println!("This is a test {}", param);
-
- // fixme: locking ?
- // fixme: check digest ?
-
- let mut config = datastore::config()?;
-
- let name = param["name"].as_str().unwrap();
-
- match config.sections.get(name) {
- Some(_) => { config.sections.remove(name); },
- None => bail!("datastore '{}' does not exist.", name),
- }
-
- datastore::save_config(&config)?;
-
- Ok(Value::Null)
-}
-
-pub fn router() -> Router {
-
- let route = Router::new()
- .get(get())
- .post(post())
- .delete(delete());
-
-
- route
-}
+++ /dev/null
-use failure::*;
-
-use crate::api::schema::*;
-use crate::api::router::*;
-use serde_json::{json, Value};
-
-const PROXMOX_PKG_VERSION: &'static str = env!("PROXMOX_PKG_VERSION");
-const PROXMOX_PKG_RELEASE: &'static str = env!("PROXMOX_PKG_RELEASE");
-const PROXMOX_PKG_REPOID: &'static str = env!("PROXMOX_PKG_REPOID");
-
-fn get_version(_param: Value, _info: &ApiMethod) -> Result<Value, Error> {
-
- Ok(json!({
- "version": PROXMOX_PKG_VERSION,
- "release": PROXMOX_PKG_RELEASE,
- "repoid": PROXMOX_PKG_REPOID
- }))
-}
-
-pub fn router() -> Router {
-
- let route = Router::new()
- .get(ApiMethod::new(
- get_version,
- ObjectSchema::new("Proxmox Backup Server API version.")));
-
- route
-}
let addr = ([0,0,0,0,0,0,0,0], 8007).into();
lazy_static!{
- static ref ROUTER: Router = proxmox_backup::api3::router();
+ static ref ROUTER: Router = proxmox_backup::api2::router();
}
let mut config = ApiConfig::new("/usr/share/javascript/proxmox-backup", &ROUTER);
.append_pair("time", &epoch.to_string())
.finish();
- let path = format!("api3/json/admin/datastore/{}/catar?{}", store, query);
+ let path = format!("api2/json/admin/datastore/{}/catar?{}", store, query);
client.upload("application/x-proxmox-backup-catar", body, &path)?;
let client = HttpClient::new("localhost");
- let path = format!("api3/json/admin/datastore/{}/backups", store);
+ let path = format!("api2/json/admin/datastore/{}/backups", store);
let result = client.get(&path)?;
extern crate proxmox_backup;
-//use proxmox_backup::api3;
+//use proxmox_backup::api2;
use proxmox_backup::cli::command::*;
fn datastore_commands() -> CommandLineInterface {
use proxmox_backup::config;
- use proxmox_backup::api3;
+ use proxmox_backup::api2;
let cmd_def = CliCommandMap::new()
- .insert("list", CliCommand::new(api3::config::datastore::get()).into())
+ .insert("list", CliCommand::new(api2::config::datastore::get()).into())
.insert("create",
- CliCommand::new(api3::config::datastore::post())
+ CliCommand::new(api2::config::datastore::post())
.arg_param(vec!["name", "path"])
.into())
.insert("remove",
- CliCommand::new(api3::config::datastore::delete())
+ CliCommand::new(api2::config::datastore::delete())
.arg_param(vec!["name"])
.completion_cb("name", config::datastore::complete_datastore_name)
.into());
fn garbage_collection_commands() -> CommandLineInterface {
use proxmox_backup::config;
- use proxmox_backup::api3;
+ use proxmox_backup::api2;
let cmd_def = CliCommandMap::new()
.insert("status",
- CliCommand::new(api3::admin::datastore::api_method_garbage_collection_status())
+ CliCommand::new(api2::admin::datastore::api_method_garbage_collection_status())
.arg_param(vec!["name"])
.completion_cb("name", config::datastore::complete_datastore_name)
.into())
.insert("start",
- CliCommand::new(api3::admin::datastore::api_method_start_garbage_collection())
+ CliCommand::new(api2::admin::datastore::api_method_start_garbage_collection())
.arg_param(vec!["name"])
.completion_cb("name", config::datastore::complete_datastore_name)
.into());
}
-pub mod api3;
+pub mod api2;
pub mod client {
println!("REQUEST {} {}", method, path);
println!("COMPO {:?}", components);
- if comp_len >= 1 && components[0] == "api3" {
+ if comp_len >= 1 && components[0] == "api2" {
println!("GOT API REQUEST");
if comp_len >= 2 {
let format = components[1];
Ext.define('PBS.Utils', {
singleton: true,
- // Ext.Ajax.request
- API3Request: function(reqOpts) {
-
- var newopts = Ext.apply({
- waitMsg: gettext('Please wait...')
- }, reqOpts);
-
- if (!newopts.url.match(/^\/api3/)) {
- newopts.url = '/api3/extjs' + newopts.url;
- }
- delete newopts.callback;
-
- var createWrapper = function(successFn, callbackFn, failureFn) {
- Ext.apply(newopts, {
- success: function(response, options) {
- if (options.waitMsgTarget) {
- if (Proxmox.Utils.toolkit === 'touch') {
- options.waitMsgTarget.setMasked(false);
- } else {
- options.waitMsgTarget.setLoading(false);
- }
- }
- var result = Ext.decode(response.responseText);
- response.result = result;
- if (!result.success) {
- response.htmlStatus = Proxmox.Utils.extractRequestError(result, true);
- Ext.callback(callbackFn, options.scope, [options, false, response]);
- Ext.callback(failureFn, options.scope, [response, options]);
- return;
- }
- Ext.callback(callbackFn, options.scope, [options, true, response]);
- Ext.callback(successFn, options.scope, [response, options]);
- },
- failure: function(response, options) {
- if (options.waitMsgTarget) {
- if (Proxmox.Utils.toolkit === 'touch') {
- options.waitMsgTarget.setMasked(false);
- } else {
- options.waitMsgTarget.setLoading(false);
- }
- }
- response.result = {};
- try {
- response.result = Ext.decode(response.responseText);
- } catch(e) {}
- var msg = gettext('Connection error') + ' - server offline?';
- if (response.aborted) {
- msg = gettext('Connection error') + ' - aborted.';
- } else if (response.timedout) {
- msg = gettext('Connection error') + ' - Timeout.';
- } else if (response.status && response.statusText) {
- msg = gettext('Connection error') + ' ' + response.status + ': ' + response.statusText;
- }
- response.htmlStatus = msg;
- Ext.callback(callbackFn, options.scope, [options, false, response]);
- Ext.callback(failureFn, options.scope, [response, options]);
- }
- });
- };
-
- createWrapper(reqOpts.success, reqOpts.callback, reqOpts.failure);
-
- var target = newopts.waitMsgTarget;
- if (target) {
- if (Proxmox.Utils.toolkit === 'touch') {
- target.setMasked({ xtype: 'loadmask', message: newopts.waitMsg} );
- } else {
- // Note: ExtJS bug - this does not work when component is not rendered
- target.setLoading(newopts.waitMsg);
- }
- }
- Ext.Ajax.request(newopts);
- },
-
constructor: function() {
var me = this;
me.callParent();
if (me.makeApiCall) {
- PBS.Utils.API3Request({
+ Proxmox.Utils.API2Request({
url: '/version',
method: 'GET',
success: function(response) {