use crate::api_schema::router::*;
-pub fn router() -> Router {
+const NODES_ROUTER: Router = Router::new()
+ .match_all("node", &node::ROUTER);
- let nodes = Router::new()
- .match_all("node", node::router());
+pub const SUBDIRS: SubdirMap = &[
+ ("access", &access::ROUTER),
+ ("admin", &admin::ROUTER),
+ ("backup", &backup::ROUTER),
+ ("config", &config::ROUTER),
+ ("nodes", &NODES_ROUTER),
+ ("reader", &reader::ROUTER),
+ ("subscription", &subscription::ROUTER),
+ ("version", &version::ROUTER),
+];
- Router::new()
- .subdir("access", access::router())
- .subdir("admin", admin::router())
- .subdir("backup", backup::router())
- .subdir("reader", reader::router())
- .subdir("config", config::router())
- .subdir("nodes", nodes)
- .subdir("subscription", subscription::router())
- .subdir("version", version::router())
- .list_subdirs()
-}
+pub const ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(SUBDIRS))
+ .subdirs(SUBDIRS);
}
}
-pub fn router() -> Router {
- Router::new()
- .subdir(
- "ticket",
- Router::new()
- .post(
- ApiMethod::new(
- create_ticket,
- ObjectSchema::new("Create or verify authentication ticket.")
- .required(
+const SUBDIRS: SubdirMap = &[
+ (
+ "ticket", &Router::new()
+ .post(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&create_ticket),
+ &ObjectSchema::new(
+ "Create or verify authentication ticket.",
+ &[
+ (
"username",
- StringSchema::new("User name.")
+ false,
+ &StringSchema::new("User name.")
.max_length(64)
- )
- .required(
+ .schema()
+ ),
+ (
"password",
- StringSchema::new("The secret password. This can also be a valid ticket.")
- )
- ).returns(
- ObjectSchema::new("Returns authentication ticket with additional infos.")
- .required("username", StringSchema::new("User name."))
- .required("ticket", StringSchema::new("Auth ticket."))
- .required("CSRFPreventionToken", StringSchema::new("Cross Site Request Forgery Prevention Token."))
- ).protected(true)
- )
- )
- .list_subdirs()
-}
+ false,
+ &StringSchema::new("The secret password. This can also be a valid ticket.")
+ .schema()
+ ),
+ ],
+ )
+ ).returns(
+ &ObjectSchema::new(
+ "Returns authentication ticket with additional infos.",
+ &[
+ (
+ "username",
+ false,
+ &StringSchema::new("User name.").schema()
+ ),
+ (
+ "ticket",
+ false,
+ &StringSchema::new("Auth ticket.").schema()
+ ),
+ (
+ "CSRFPreventionToken",
+ false,
+ &StringSchema::new("Cross Site Request Forgery Prevention Token.")
+ .schema()
+ ),
+ ],
+ ).schema()
+ ).protected(true)
+ )
+ )
+];
+
+pub const ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(SUBDIRS))
+ .subdirs(SUBDIRS);
pub mod datastore;
-pub fn router() -> Router {
- Router::new()
- .subdir("datastore", datastore::router())
- .list_subdirs()
-}
+const SUBDIRS: SubdirMap = &[
+ ("datastore", &datastore::ROUTER)
+];
+
+pub const ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(SUBDIRS))
+ .subdirs(SUBDIRS);
use std::collections::{HashSet, HashMap};
use chrono::{DateTime, Datelike, TimeZone, Local};
use std::path::PathBuf;
-use std::sync::Arc;
use proxmox::tools::{try_block, fs::file_get_contents, fs::file_set_contents};
}))
}
-fn api_method_status() -> ApiMethod {
- ApiMethod::new(
- status,
- add_common_prune_prameters(
- ObjectSchema::new("Get datastore status.")
- .required(
- "store",
- StringSchema::new("Datastore name.")
- )
- )
- )
+#[macro_export]
+macro_rules! add_common_prune_prameters {
+ ($( $list:tt )*) => {
+ [
+ (
+ "keep-last",
+ true,
+ &IntegerSchema::new("Number of backups to keep.")
+ .minimum(1)
+ .schema()
+ ),
+ (
+ "keep-daily",
+ true,
+ &IntegerSchema::new("Number of daily backups to keep.")
+ .minimum(1)
+ .schema()
+ ),
+ (
+ "keep-weekly",
+ true,
+ &IntegerSchema::new("Number of weekly backups to keep.")
+ .minimum(1)
+ .schema()
+ ),
+ (
+ "keep-monthly",
+ true,
+ &IntegerSchema::new("Number of monthly backups to keep.")
+ .minimum(1)
+ .schema()
+ ),
+ (
+ "keep-yearly",
+ true,
+ &IntegerSchema::new("Number of yearly backups to keep.")
+ .minimum(1)
+ .schema()
+ ),
+ $( $list )*
+ ]
+ }
}
+const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&status),
+ &ObjectSchema::new(
+ "Get datastore status.",
+ &add_common_prune_prameters!(
+ ("store", false, &StringSchema::new("Datastore name.").schema()),
+ ),
+ )
+);
+
+
fn prune(
param: Value,
_info: &ApiMethod,
Ok(json!(null))
}
-pub fn add_common_prune_prameters(schema: ObjectSchema) -> ObjectSchema {
-
- schema
- .optional(
- "keep-last",
- IntegerSchema::new("Number of backups to keep.")
- .minimum(1)
- )
- .optional(
- "keep-daily",
- IntegerSchema::new("Number of daily backups to keep.")
- .minimum(1)
- )
- .optional(
- "keep-weekly",
- IntegerSchema::new("Number of weekly backups to keep.")
- .minimum(1)
- )
- .optional(
- "keep-monthly",
- IntegerSchema::new("Number of monthly backups to keep.")
- .minimum(1)
- )
- .optional(
- "keep-yearly",
- IntegerSchema::new("Number of yearly backups to keep.")
- .minimum(1)
- )
-}
-
-fn api_method_prune() -> ApiMethod {
- ApiMethod::new(
- prune,
- add_common_prune_prameters(
- ObjectSchema::new("Prune the datastore.")
- .required(
- "store",
- StringSchema::new("Datastore name.")
- )
- .required("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .required("backup-id", BACKUP_ID_SCHEMA.clone())
+const API_METHOD_PRUNE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&prune),
+ &ObjectSchema::new(
+ "Prune the datastore.",
+ &add_common_prune_prameters!(
+ ("store", false, &StringSchema::new("Datastore name.").schema()),
+ ("backup-type", false, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", false, &BACKUP_ID_SCHEMA),
)
)
-}
+);
fn start_garbage_collection(
param: Value,
Ok(json!(upid_str))
}
-pub fn api_method_start_garbage_collection() -> ApiMethod {
- ApiMethod::new(
- start_garbage_collection,
- ObjectSchema::new("Start garbage collection.")
- .required("store", StringSchema::new("Datastore name."))
+pub const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&start_garbage_collection),
+ &ObjectSchema::new(
+ "Start garbage collection.",
+ &[ ("store", false, &StringSchema::new("Datastore name.").schema()) ]
)
-}
+);
fn garbage_collection_status(
param: Value,
Ok(serde_json::to_value(&status)?)
}
-pub fn api_method_garbage_collection_status() -> ApiMethod {
- ApiMethod::new(
- garbage_collection_status,
- ObjectSchema::new("Garbage collection status.")
- .required("store", StringSchema::new("Datastore name."))
+pub const API_METHOD_GARBAGE_COLLECTION_STATUS: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&garbage_collection_status),
+ &ObjectSchema::new(
+ "Garbage collection status.",
+ &[ ("store", false, &StringSchema::new("Datastore name.").schema()) ]
)
-}
+);
fn get_datastore_list(
_param: Value,
_parts: Parts,
_req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
_rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(response_future))
}
-pub fn api_method_download_file() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- download_file,
- ObjectSchema::new("Download single raw file from backup snapshot.")
- .required("store", StringSchema::new("Datastore name."))
- .required("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .required("backup-id", BACKUP_ID_SCHEMA.clone())
- .required("backup-time", BACKUP_TIME_SCHEMA.clone())
- .required("file-name", StringSchema::new("Raw file name.").format(FILENAME_FORMAT.clone()))
+pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&download_file),
+ &ObjectSchema::new(
+ "Download single raw file from backup snapshot.",
+ &[
+ ("store", false, &StringSchema::new("Datastore name.").schema()),
+ ("backup-type", false, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", false, &BACKUP_ID_SCHEMA),
+ ("backup-time", false, &BACKUP_TIME_SCHEMA),
+ ("file-name", false, &StringSchema::new("Raw file name.")
+ .format(&FILENAME_FORMAT)
+ .schema()
+ ),
+ ],
)
-}
+);
fn upload_backup_log(
_parts: Parts,
req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
_rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(resp))
}
-pub fn api_method_upload_backup_log() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upload_backup_log,
- ObjectSchema::new("Download single raw file from backup snapshot.")
- .required("store", StringSchema::new("Datastore name."))
- .required("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .required("backup-id", BACKUP_ID_SCHEMA.clone())
- .required("backup-time", BACKUP_TIME_SCHEMA.clone())
+pub const API_METHOD_UPLOAD_BACKUP_LOG: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upload_backup_log),
+ &ObjectSchema::new(
+ "Download single raw file from backup snapshot.",
+ &[
+ ("store", false, &StringSchema::new("Datastore name.").schema()),
+ ("backup-type", false, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", false, &BACKUP_ID_SCHEMA),
+ ("backup-time", false, &BACKUP_TIME_SCHEMA),
+ ],
)
-}
-
-pub fn router() -> Router {
-
- let store_schema: Arc<Schema> = Arc::new(
- StringSchema::new("Datastore name.").into()
- );
-
- let datastore_info = Router::new()
- .subdir(
- "download",
- Router::new()
- .download(api_method_download_file())
- )
- .subdir(
- "upload-backup-log",
- Router::new()
- .upload(api_method_upload_backup_log())
- )
- .subdir(
- "gc",
- Router::new()
- .get(api_method_garbage_collection_status())
- .post(api_method_start_garbage_collection()))
- .subdir(
- "files",
- Router::new()
- .get(
- ApiMethod::new(
- list_snapshot_files,
- ObjectSchema::new("List snapshot files.")
- .required("store", store_schema.clone())
- .required("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .required("backup-id", BACKUP_ID_SCHEMA.clone())
- .required("backup-time", BACKUP_TIME_SCHEMA.clone())
+);
+
+const STORE_SCHEMA: Schema = StringSchema::new("Datastore name.").schema();
+
+const DATASTORE_INFO_SUBDIRS: SubdirMap = &[
+ (
+ "download",
+ &Router::new()
+ .download(&API_METHOD_DOWNLOAD_FILE)
+ ),
+ (
+ "files",
+ &Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&list_snapshot_files),
+ &ObjectSchema::new(
+ "List snapshot files.",
+ &[
+ ("store", false, &STORE_SCHEMA),
+ ("backup-type", false, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", false, &BACKUP_ID_SCHEMA),
+ ("backup-time", false, &BACKUP_TIME_SCHEMA),
+ ],
)
)
- )
- .subdir(
- "groups",
- Router::new()
- .get(ApiMethod::new(
- list_groups,
- ObjectSchema::new("List backup groups.")
- .required("store", store_schema.clone()))))
- .subdir(
- "snapshots",
- Router::new()
- .get(
- ApiMethod::new(
- list_snapshots,
- ObjectSchema::new("List backup groups.")
- .required("store", store_schema.clone())
- .optional("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .optional("backup-id", BACKUP_ID_SCHEMA.clone())
+ )
+ ),
+ (
+ "gc",
+ &Router::new()
+ .get(&API_METHOD_GARBAGE_COLLECTION_STATUS)
+ .post(&API_METHOD_START_GARBAGE_COLLECTION)
+ ),
+ (
+ "groups",
+ &Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&list_groups),
+ &ObjectSchema::new(
+ "List backup groups.",
+ &[ ("store", false, &STORE_SCHEMA) ],
)
)
- .delete(
- ApiMethod::new(
- delete_snapshots,
- ObjectSchema::new("Delete backup snapshot.")
- .required("store", store_schema.clone())
- .required("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .required("backup-id", BACKUP_ID_SCHEMA.clone())
- .required("backup-time", BACKUP_TIME_SCHEMA.clone())
- )
+ )
+ ),
+ (
+ "prune",
+ &Router::new()
+ .post(&API_METHOD_PRUNE)
+ ),
+ (
+ "snapshots",
+ &Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&list_snapshots),
+ &ObjectSchema::new(
+ "List backup groups.",
+ &[
+ ("store", false, &STORE_SCHEMA),
+ ("backup-type", true, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", true, &BACKUP_ID_SCHEMA),
+ ],
+ )
)
+ )
+ .delete(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&delete_snapshots),
+ &ObjectSchema::new(
+ "Delete backup snapshot.",
+ &[
+ ("store", false, &STORE_SCHEMA),
+ ("backup-type", false, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", false, &BACKUP_ID_SCHEMA),
+ ("backup-time", false, &BACKUP_TIME_SCHEMA),
+ ],
+ )
+ )
+ )
+ ),
+ (
+ "status",
+ &Router::new()
+ .get(&API_METHOD_STATUS)
+ ),
+ (
+ "upload-backup-log",
+ &Router::new()
+ .upload(&API_METHOD_UPLOAD_BACKUP_LOG)
+ ),
+];
+
+const DATASTORE_INFO_ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(DATASTORE_INFO_SUBDIRS))
+ .subdirs(DATASTORE_INFO_SUBDIRS);
+
+
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_datastore_list),
+ &ObjectSchema::new("Directory index.", &[])
)
- .subdir(
- "prune",
- Router::new()
- .post(api_method_prune())
- )
- .subdir(
- "status",
- Router::new()
- .get(api_method_status())
- )
- .list_subdirs();
-
- Router::new()
- .get(ApiMethod::new(
- get_datastore_list,
- ObjectSchema::new("Directory index.")))
- .match_all("store", datastore_info)
-}
+ )
+ .match_all("store", &DATASTORE_INFO_ROUTER);
use failure::*;
-use lazy_static::lazy_static;
-
-//use std::sync::Arc;
use futures::*;
use hyper::header::{HeaderValue, UPGRADE};
mod upload_chunk;
use upload_chunk::*;
-pub fn router() -> Router {
- Router::new()
- .upgrade(api_method_upgrade_backup())
-}
-
-pub fn api_method_upgrade_backup() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upgrade_to_backup_protocol,
- ObjectSchema::new(concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_PROTOCOL_ID_V1!(), "')."))
- .required("store", StringSchema::new("Datastore name."))
- .required("backup-type", BACKUP_TYPE_SCHEMA.clone())
- .required("backup-id", BACKUP_ID_SCHEMA.clone())
- .required("backup-time", BACKUP_TIME_SCHEMA.clone())
- .optional("debug", BooleanSchema::new("Enable verbose debug logging."))
+pub const ROUTER: Router = Router::new()
+ .upgrade(&API_METHOD_UPGRADE_BACKUP);
+
+pub const API_METHOD_UPGRADE_BACKUP: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upgrade_to_backup_protocol),
+ &ObjectSchema::new(
+ concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_PROTOCOL_ID_V1!(), "')."),
+ &[
+ ("store", false, &StringSchema::new("Datastore name.").schema()),
+ ("backup-type", false, &BACKUP_TYPE_SCHEMA),
+ ("backup-id", false, &BACKUP_ID_SCHEMA),
+ ("backup-time", false, &BACKUP_TIME_SCHEMA),
+ ("debug", true, &BooleanSchema::new("Enable verbose debug logging.").schema()),
+ ],
)
-}
+);
fn upgrade_to_backup_protocol(
parts: Parts,
req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
env.log(format!("starting new backup on datastore '{}': {:?}", store, path));
- let service = H2Service::new(env.clone(), worker.clone(), &BACKUP_ROUTER, debug);
+ let service = H2Service::new(env.clone(), worker.clone(), &BACKUP_API_ROUTER, debug);
let abort_future = worker.abort_future();
Ok(Box::new(futures::future::ok(response)))
}
-lazy_static!{
- static ref BACKUP_ROUTER: Router = backup_api();
-}
-
-pub fn backup_api() -> Router {
- Router::new()
- .subdir(
- "blob", Router::new()
- .upload(api_method_upload_blob())
- )
- .subdir(
- "dynamic_chunk", Router::new()
- .upload(api_method_upload_dynamic_chunk())
- )
- .subdir(
- "dynamic_index", Router::new()
- .download(api_method_dynamic_chunk_index())
- .post(api_method_create_dynamic_index())
- .put(api_method_dynamic_append())
- )
- .subdir(
- "dynamic_close", Router::new()
- .post(api_method_close_dynamic_index())
- )
- .subdir(
- "fixed_chunk", Router::new()
- .upload(api_method_upload_fixed_chunk())
- )
- .subdir(
- "fixed_index", Router::new()
- .download(api_method_fixed_chunk_index())
- .post(api_method_create_fixed_index())
- .put(api_method_fixed_append())
- )
- .subdir(
- "fixed_close", Router::new()
- .post(api_method_close_fixed_index())
- )
- .subdir(
- "finish", Router::new()
- .post(
- ApiMethod::new(
- finish_backup,
- ObjectSchema::new("Mark backup as finished.")
- )
+pub const BACKUP_API_SUBDIRS: SubdirMap = &[
+ (
+ "blob", &Router::new()
+ .upload(&API_METHOD_UPLOAD_BLOB)
+ ),
+ (
+ "dynamic_chunk", &Router::new()
+ .upload(&API_METHOD_UPLOAD_DYNAMIC_CHUNK)
+ ),
+ (
+ "dynamic_close", &Router::new()
+ .post(&API_METHOD_CLOSE_DYNAMIC_INDEX)
+ ),
+ (
+ "dynamic_index", &Router::new()
+ .download(&API_METHOD_DYNAMIC_CHUNK_INDEX)
+ .post(&API_METHOD_CREATE_DYNAMIC_INDEX)
+ .put(&API_METHOD_DYNAMIC_APPEND)
+ ),
+ (
+ "finish", &Router::new()
+ .post(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&finish_backup),
+ &ObjectSchema::new("Mark backup as finished.", &[])
)
- )
- .subdir(
- "speedtest", Router::new()
- .upload(api_method_upload_speedtest())
- )
- .list_subdirs()
-}
-
-pub fn api_method_create_dynamic_index() -> ApiMethod {
- ApiMethod::new(
- create_dynamic_index,
- ObjectSchema::new("Create dynamic chunk index file.")
- .required("archive-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
+ )
+ ),
+ (
+ "fixed_chunk", &Router::new()
+ .upload(&API_METHOD_UPLOAD_FIXED_CHUNK)
+ ),
+ (
+ "fixed_close", &Router::new()
+ .post(&API_METHOD_CLOSE_FIXED_INDEX)
+ ),
+ (
+ "fixed_index", &Router::new()
+ .download(&API_METHOD_FIXED_CHUNK_INDEX)
+ .post(&API_METHOD_CREATE_FIXED_INDEX)
+ .put(&API_METHOD_FIXED_APPEND)
+ ),
+ (
+ "speedtest", &Router::new()
+ .upload(&API_METHOD_UPLOAD_SPEEDTEST)
+ ),
+];
+
+pub const BACKUP_API_ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(BACKUP_API_SUBDIRS))
+ .subdirs(BACKUP_API_SUBDIRS);
+
+pub const API_METHOD_CREATE_DYNAMIC_INDEX: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&create_dynamic_index),
+ &ObjectSchema::new(
+ "Create dynamic chunk index file.",
+ &[
+ ("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
+ ],
)
-}
+);
fn create_dynamic_index(
param: Value,
Ok(json!(wid))
}
-pub fn api_method_create_fixed_index() -> ApiMethod {
- ApiMethod::new(
- create_fixed_index,
- ObjectSchema::new("Create fixed chunk index file.")
- .required("archive-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
- .required("size", IntegerSchema::new("File size.")
- .minimum(1)
- )
+pub const API_METHOD_CREATE_FIXED_INDEX: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&create_fixed_index),
+ &ObjectSchema::new(
+ "Create fixed chunk index file.",
+ &[
+ ("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
+ ("size", false, &IntegerSchema::new("File size.")
+ .minimum(1)
+ .schema()
+ ),
+ ],
)
-}
+);
fn create_fixed_index(
param: Value,
Ok(json!(wid))
}
-pub fn api_method_dynamic_append() -> ApiMethod {
- ApiMethod::new(
- dynamic_append,
- ObjectSchema::new("Append chunk to dynamic index writer.")
- .required("wid", IntegerSchema::new("Dynamic writer ID.")
- .minimum(1)
- .maximum(256)
- )
- .required("digest-list", ArraySchema::new(
- "Chunk digest list.", CHUNK_DIGEST_SCHEMA.clone())
- )
- .required("offset-list", ArraySchema::new(
- "Chunk offset list.",
- IntegerSchema::new("Corresponding chunk offsets.")
- .minimum(0)
- .into())
- )
+pub const API_METHOD_DYNAMIC_APPEND: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&dynamic_append),
+ &ObjectSchema::new(
+ "Append chunk to dynamic index writer.",
+ &[
+ (
+ "wid",
+ false,
+ &IntegerSchema::new("Dynamic writer ID.")
+ .minimum(1)
+ .maximum(256)
+ .schema()
+ ),
+ (
+ "digest-list",
+ false,
+ &ArraySchema::new("Chunk digest list.", &CHUNK_DIGEST_SCHEMA).schema()
+ ),
+ (
+ "offset-list",
+ false,
+ &ArraySchema::new(
+ "Chunk offset list.",
+ &IntegerSchema::new("Corresponding chunk offsets.")
+ .minimum(0)
+ .schema()
+ ).schema()
+ ),
+ ],
)
-}
+);
fn dynamic_append (
param: Value,
Ok(Value::Null)
}
-pub fn api_method_fixed_append() -> ApiMethod {
- ApiMethod::new(
- fixed_append,
- ObjectSchema::new("Append chunk to fixed index writer.")
- .required("wid", IntegerSchema::new("Fixed writer ID.")
- .minimum(1)
- .maximum(256)
- )
- .required("digest-list", ArraySchema::new(
- "Chunk digest list.", CHUNK_DIGEST_SCHEMA.clone())
- )
- .required("offset-list", ArraySchema::new(
- "Chunk offset list.",
- IntegerSchema::new("Corresponding chunk offsets.")
- .minimum(0)
- .into())
+pub const API_METHOD_FIXED_APPEND: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&fixed_append),
+ &ObjectSchema::new(
+ "Append chunk to fixed index writer.",
+ &[
+ (
+ "wid",
+ false,
+ &IntegerSchema::new("Fixed writer ID.")
+ .minimum(1)
+ .maximum(256)
+ .schema()
+ ),
+ (
+ "digest-list",
+ false,
+ &ArraySchema::new("Chunk digest list.", &CHUNK_DIGEST_SCHEMA).schema()
+ ),
+ (
+ "offset-list",
+ false,
+ &ArraySchema::new(
+ "Chunk offset list.",
+ &IntegerSchema::new("Corresponding chunk offsets.")
+ .minimum(0)
+ .schema()
+ ).schema()
)
+ ],
)
-}
+);
fn fixed_append (
param: Value,
Ok(Value::Null)
}
-pub fn api_method_close_dynamic_index() -> ApiMethod {
- ApiMethod::new(
- close_dynamic_index,
- ObjectSchema::new("Close dynamic index writer.")
- .required("wid", IntegerSchema::new("Dynamic writer ID.")
- .minimum(1)
- .maximum(256)
- )
- .required("chunk-count", IntegerSchema::new("Chunk count. This is used to verify that the server got all chunks.")
- .minimum(1)
- )
- .required("size", IntegerSchema::new("File size. This is used to verify that the server got all data.")
- .minimum(1)
- )
- .required("csum", StringSchema::new("Digest list checksum."))
+pub const API_METHOD_CLOSE_DYNAMIC_INDEX: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&close_dynamic_index),
+ &ObjectSchema::new(
+ "Close dynamic index writer.",
+ &[
+ (
+ "wid",
+ false,
+ &IntegerSchema::new("Dynamic writer ID.")
+ .minimum(1)
+ .maximum(256)
+ .schema()
+ ),
+ (
+ "chunk-count",
+ false,
+ &IntegerSchema::new("Chunk count. This is used to verify that the server got all chunks.")
+ .minimum(1)
+ .schema()
+ ),
+ (
+ "size",
+ false,
+ &IntegerSchema::new("File size. This is used to verify that the server got all data.")
+ .minimum(1)
+ .schema()
+ ),
+ ("csum", false, &StringSchema::new("Digest list checksum.").schema()),
+ ],
)
-}
+);
fn close_dynamic_index (
param: Value,
Ok(Value::Null)
}
-pub fn api_method_close_fixed_index() -> ApiMethod {
- ApiMethod::new(
- close_fixed_index,
- ObjectSchema::new("Close fixed index writer.")
- .required("wid", IntegerSchema::new("Fixed writer ID.")
- .minimum(1)
- .maximum(256)
- )
- .required("chunk-count", IntegerSchema::new("Chunk count. This is used to verify that the server got all chunks.")
- .minimum(1)
- )
- .required("size", IntegerSchema::new("File size. This is used to verify that the server got all data.")
- .minimum(1)
- )
- .required("csum", StringSchema::new("Digest list checksum."))
+pub const API_METHOD_CLOSE_FIXED_INDEX: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&close_fixed_index),
+ &ObjectSchema::new(
+ "Close fixed index writer.",
+ &[
+ (
+ "wid",
+ false,
+ &IntegerSchema::new("Fixed writer ID.")
+ .minimum(1)
+ .maximum(256)
+ .schema()
+ ),
+ (
+ "chunk-count",
+ false,
+ &IntegerSchema::new("Chunk count. This is used to verify that the server got all chunks.")
+ .minimum(1)
+ .schema()
+ ),
+ (
+ "size",
+ false,
+ &IntegerSchema::new("File size. This is used to verify that the server got all data.")
+ .minimum(1)
+ .schema()
+ ),
+ ("csum", false, &StringSchema::new("Digest list checksum.").schema()),
+ ],
)
-}
+);
fn close_fixed_index (
param: Value,
Ok(Value::Null)
}
-pub fn api_method_dynamic_chunk_index() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- dynamic_chunk_index,
- ObjectSchema::new(r###"
+pub const API_METHOD_DYNAMIC_CHUNK_INDEX: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&dynamic_chunk_index),
+ &ObjectSchema::new(
+ r###"
Download the dynamic chunk index from the previous backup.
Simply returns an empty list if this is the first backup.
-"###
- )
- .required("archive-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
+"### ,
+ &[ ("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA) ],
)
-}
+);
fn dynamic_chunk_index(
_parts: Parts,
_req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(future::ok(response)))
}
-pub fn api_method_fixed_chunk_index() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- fixed_chunk_index,
- ObjectSchema::new(r###"
+pub const API_METHOD_FIXED_CHUNK_INDEX: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&fixed_chunk_index),
+ &ObjectSchema::new(
+ r###"
Download the fixed chunk index from the previous backup.
Simply returns an empty list if this is the first backup.
-"###
- )
- .required("archive-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
+"### ,
+ &[ ("archive-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA) ],
)
-}
+);
fn fixed_chunk_index(
_parts: Parts,
_req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
}
}
-pub fn api_method_upload_fixed_chunk() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upload_fixed_chunk,
- ObjectSchema::new("Upload a new chunk.")
- .required("wid", IntegerSchema::new("Fixed writer ID.")
- .minimum(1)
- .maximum(256)
- )
- .required("digest", CHUNK_DIGEST_SCHEMA.clone())
- .required("size", IntegerSchema::new("Chunk size.")
- .minimum(1)
- .maximum(1024*1024*16)
- )
- .required("encoded-size", IntegerSchema::new("Encoded chunk size.")
- .minimum((std::mem::size_of::<DataBlobHeader>() as isize)+1)
- .maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
- )
+pub const API_METHOD_UPLOAD_FIXED_CHUNK: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upload_fixed_chunk),
+ &ObjectSchema::new(
+ "Upload a new chunk.",
+ &[
+ ("wid", false, &IntegerSchema::new("Fixed writer ID.")
+ .minimum(1)
+ .maximum(256)
+ .schema()
+ ),
+ ("digest", false, &CHUNK_DIGEST_SCHEMA),
+ ("size", false, &IntegerSchema::new("Chunk size.")
+ .minimum(1)
+ .maximum(1024*1024*16)
+ .schema()
+ ),
+ ("encoded-size", false, &IntegerSchema::new("Encoded chunk size.")
+ .minimum((std::mem::size_of::<DataBlobHeader>() as isize)+1)
+ .maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
+ .schema()
+ ),
+ ],
)
-}
+);
fn upload_fixed_chunk(
_parts: Parts,
req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(resp))
}
-pub fn api_method_upload_dynamic_chunk() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upload_dynamic_chunk,
- ObjectSchema::new("Upload a new chunk.")
- .required("wid", IntegerSchema::new("Dynamic writer ID.")
- .minimum(1)
- .maximum(256)
- )
- .required("digest", CHUNK_DIGEST_SCHEMA.clone())
- .required("size", IntegerSchema::new("Chunk size.")
- .minimum(1)
- .maximum(1024*1024*16)
- )
- .required("encoded-size", IntegerSchema::new("Encoded chunk size.")
- .minimum((std::mem::size_of::<DataBlobHeader>() as isize) +1)
- .maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
- )
+pub const API_METHOD_UPLOAD_DYNAMIC_CHUNK: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upload_dynamic_chunk),
+ &ObjectSchema::new(
+ "Upload a new chunk.",
+ &[
+ ("wid", false, &IntegerSchema::new("Dynamic writer ID.")
+ .minimum(1)
+ .maximum(256)
+ .schema()
+ ),
+ ("digest", false, &CHUNK_DIGEST_SCHEMA),
+ ("size", false, &IntegerSchema::new("Chunk size.")
+ .minimum(1)
+ .maximum(1024*1024*16)
+ .schema()
+ ),
+ ("encoded-size", false, &IntegerSchema::new("Encoded chunk size.")
+ .minimum((std::mem::size_of::<DataBlobHeader>() as isize) +1)
+ .maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
+ .schema()
+ ),
+ ],
)
-}
+);
fn upload_dynamic_chunk(
_parts: Parts,
req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(resp))
}
-pub fn api_method_upload_speedtest() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upload_speedtest,
- ObjectSchema::new("Test uploadf speed.")
- )
-}
+pub const API_METHOD_UPLOAD_SPEEDTEST: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upload_speedtest),
+ &ObjectSchema::new("Test upload speed.", &[])
+);
fn upload_speedtest(
_parts: Parts,
req_body: Body,
_param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(resp))
}
-pub fn api_method_upload_blob() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upload_blob,
- ObjectSchema::new("Upload binary blob file.")
- .required("file-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
- .required("encoded-size", IntegerSchema::new("Encoded blob size.")
- .minimum((std::mem::size_of::<DataBlobHeader>() as isize) +1)
- .maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
+pub const API_METHOD_UPLOAD_BLOB: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upload_blob),
+ &ObjectSchema::new(
+ "Upload binary blob file.",
+ &[
+ ("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA),
+ ("encoded-size", false, &IntegerSchema::new("Encoded blob size.")
+ .minimum((std::mem::size_of::<DataBlobHeader>() as isize) +1)
+ .maximum(1024*1024*16+(std::mem::size_of::<EncryptedDataBlobHeader>() as isize))
+ .schema()
)
+ ],
)
-}
+);
fn upload_blob(
_parts: Parts,
req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
-//use failure::*;
-//use std::collections::HashMap;
-
-//use crate::api_schema;
use crate::api_schema::router::*;
pub mod datastore;
-pub fn router() -> Router {
- Router::new()
- .subdir("datastore", datastore::router())
- .list_subdirs()
-}
+const SUBDIRS: SubdirMap = &[
+ ("datastore", &datastore::ROUTER)
+];
+
+pub const ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(SUBDIRS))
+ .subdirs(SUBDIRS);
use crate::config::datastore;
-pub fn get() -> ApiMethod {
- ApiMethod::new(
- get_datastore_list,
- ObjectSchema::new("Directory index."))
-}
+pub const GET: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&get_datastore_list),
+ &ObjectSchema::new("Directory index.", &[])
+);
fn get_datastore_list(
_param: Value,
Ok(config.convert_to_array("name"))
}
-pub fn post() -> ApiMethod {
- ApiMethod::new(
- create_datastore,
- ObjectSchema::new("Create new datastore.")
- .required("name", StringSchema::new("Datastore name."))
- .required("path", StringSchema::new("Directory path (must exist)."))
- )
-}
+pub const POST: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&create_datastore),
+ &ObjectSchema::new(
+ "Create new datastore.",
+ &[
+ ("name", false, &StringSchema::new("Datastore name.").schema()),
+ ("path", false, &StringSchema::new("Directory path (must exist).").schema()),
+ ],
+ )
+);
fn create_datastore(
param: Value,
Ok(Value::Null)
}
-pub fn delete() -> ApiMethod {
- ApiMethod::new(
- delete_datastore,
- ObjectSchema::new("Remove a datastore configuration.")
- .required("name", StringSchema::new("Datastore name.")))
-}
+pub const DELETE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&delete_datastore),
+ &ObjectSchema::new(
+ "Remove a datastore configuration.",
+ &[
+ ("name", false, &StringSchema::new("Datastore name.").schema()),
+ ],
+ )
+);
fn delete_datastore(
param: Value,
Ok(Value::Null)
}
-pub fn router() -> Router {
- Router::new()
- .get(get())
- .post(post())
- .delete(delete())
-}
+pub const ROUTER: Router = Router::new()
+ .get(&GET)
+ .post(&POST)
+ .delete(&DELETE);
mod syslog;
mod services;
-pub fn router() -> Router {
- Router::new()
- .subdir("dns", dns::router())
- .subdir("network", network::router())
- .subdir("services", services::router())
- .subdir("syslog", syslog::router())
- .subdir("tasks", tasks::router())
- .subdir("time", time::router())
- .list_subdirs()
-}
+pub const SUBDIRS: SubdirMap = &[
+ ("dns", &dns::ROUTER),
+ ("network", &network::ROUTER),
+ ("services", &services::ROUTER),
+ ("syslog", &syslog::ROUTER),
+ ("tasks", &tasks::ROUTER),
+ ("time", &time::ROUTER),
+];
+
+pub const ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(SUBDIRS))
+ .subdirs(SUBDIRS);
read_etc_resolv_conf()
}
-pub fn router() -> Router {
- Router::new()
- .get(
- ApiMethod::new(
- get_dns,
- ObjectSchema::new("Read DNS settings.")
- .required("node", NODE_SCHEMA.clone())
- ).returns(
- ObjectSchema::new("Returns DNS server IPs and sreach domain.")
- .required("digest", PVE_CONFIG_DIGEST_SCHEMA.clone())
- .optional("search", SEARCH_DOMAIN_SCHEMA.clone())
- .optional("dns1", FIRST_DNS_SERVER_SCHEMA.clone())
- .optional("dns2", SECOND_DNS_SERVER_SCHEMA.clone())
- .optional("dns3", THIRD_DNS_SERVER_SCHEMA.clone())
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_dns),
+ &ObjectSchema::new(
+ "Read DNS settings.",
+ &[ ("node", false, &NODE_SCHEMA) ],
)
+ ).returns(
+ &ObjectSchema::new(
+ "Returns DNS server IPs and sreach domain.",
+ &[
+ ("digest", false, &PVE_CONFIG_DIGEST_SCHEMA),
+ ("search", true, &SEARCH_DOMAIN_SCHEMA),
+ ("dns1", true, &FIRST_DNS_SERVER_SCHEMA),
+ ("dns2", true, &SECOND_DNS_SERVER_SCHEMA),
+ ("dns3", true, &THIRD_DNS_SERVER_SCHEMA),
+ ],
+ ).schema()
)
- .put(
- ApiMethod::new(
- update_dns,
- ObjectSchema::new("Returns DNS server IPs and sreach domain.")
- .required("node", NODE_SCHEMA.clone())
- .required("search", SEARCH_DOMAIN_SCHEMA.clone())
- .optional("dns1", FIRST_DNS_SERVER_SCHEMA.clone())
- .optional("dns2", SECOND_DNS_SERVER_SCHEMA.clone())
- .optional("dns3", THIRD_DNS_SERVER_SCHEMA.clone())
- .optional("digest", PVE_CONFIG_DIGEST_SCHEMA.clone())
- ).protected(true)
- )
-}
+ )
+ .put(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&update_dns),
+ &ObjectSchema::new(
+ "Returns DNS server IPs and sreach domain.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("search", false, &SEARCH_DOMAIN_SCHEMA),
+ ("dns1", true, &FIRST_DNS_SERVER_SCHEMA),
+ ("dns2", true, &SECOND_DNS_SERVER_SCHEMA),
+ ("dns3", true, &THIRD_DNS_SERVER_SCHEMA),
+ ("digest", true, &PVE_CONFIG_DIGEST_SCHEMA),
+ ],
+ )
+ ).protected(true)
+ );
Ok(json!({}))
}
-pub fn router() -> Router {
- Router::new()
- .get(ApiMethod::new(
- get_network_config,
- ObjectSchema::new("Read network configuration.")
- .required("node", NODE_SCHEMA.clone())
- ))
-}
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_network_config),
+ &ObjectSchema::new(
+ "Read network configuration.",
+ &[ ("node", false, &NODE_SCHEMA) ],
+ )
+ )
+ );
+
use crate::api_schema::router::*;
use serde_json::{json, Value};
-use std::sync::Arc;
use std::process::{Command, Stdio};
use crate::api2::types::*;
run_service_command(service, "reload")
}
-pub fn router() -> Router {
-
- let service_id_schema : Arc<Schema> = Arc::new(
- StringSchema::new("Service ID.")
- .max_length(256)
- .into()
- );
-
- let service_api = Router::new()
- .subdir(
- "state",
- Router::new()
- .get(ApiMethod::new(
- get_service_state,
- ObjectSchema::new("Read service properties.")
- .required("node", NODE_SCHEMA.clone())
- .required("service", service_id_schema.clone()))
- )
- )
- .subdir(
- "start",
- Router::new()
- .post(
- ApiMethod::new(
- start_service,
- ObjectSchema::new("Start service.")
- .required("node", NODE_SCHEMA.clone())
- .required("service", service_id_schema.clone())
- ).protected(true)
- )
- )
- .subdir(
- "stop",
- Router::new()
- .post(
- ApiMethod::new(
- stop_service,
- ObjectSchema::new("Stop service.")
- .required("node", NODE_SCHEMA.clone())
- .required("service", service_id_schema.clone())
- ).protected(true)
- )
- )
- .subdir(
- "restart",
- Router::new()
- .post(
- ApiMethod::new(
- restart_service,
- ObjectSchema::new("Restart service.")
- .required("node", NODE_SCHEMA.clone())
- .required("service", service_id_schema.clone())
- ).protected(true)
- )
- )
- .subdir(
- "reload",
- Router::new()
- .post(
- ApiMethod::new(
- reload_service,
- ObjectSchema::new("Reload service.")
- .required("node", NODE_SCHEMA.clone())
- .required("service", service_id_schema.clone())
- ).protected(true)
- )
- )
- .list_subdirs();
-
- Router::new()
- .get(
- ApiMethod::new(
- list_services,
- ObjectSchema::new("Service list.")
- .required("node", NODE_SCHEMA.clone())
- ).returns(
- ArraySchema::new(
- "Returns a list of systemd services.",
- ObjectSchema::new("Service details.")
- .required("service", service_id_schema.clone())
- .required("name", StringSchema::new("systemd service name."))
- .required("desc", StringSchema::new("systemd service description."))
- .required("state", StringSchema::new("systemd service 'SubState'."))
- .into()
+
+const SERVICE_ID_SCHEMA: Schema = StringSchema::new("Service ID.")
+ .max_length(256)
+ .schema();
+
+const SERVICE_SUBDIRS: SubdirMap = &[
+ (
+ "reload", &Router::new()
+ .post(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&reload_service),
+ &ObjectSchema::new(
+ "Reload service.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("service", false, &SERVICE_ID_SCHEMA),
+ ],
+ )
+ ).protected(true)
+ )
+ ),
+ (
+ "restart", &Router::new()
+ .post(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&restart_service),
+ &ObjectSchema::new(
+ "Restart service.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("service", false, &SERVICE_ID_SCHEMA),
+ ],
+ )
+ ).protected(true)
+ )
+ ),
+ (
+ "start", &Router::new()
+ .post(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&start_service),
+ &ObjectSchema::new(
+ "Start service.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("service", false, &SERVICE_ID_SCHEMA),
+ ],
+ )
+ ).protected(true)
+ )
+ ),
+ (
+ "state", &Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_service_state),
+ &ObjectSchema::new(
+ "Read service properties.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("service", false, &SERVICE_ID_SCHEMA),
+ ],
+ )
)
)
+ ),
+ (
+ "stop", &Router::new()
+ .post(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&stop_service),
+ &ObjectSchema::new(
+ "Stop service.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("service", false, &SERVICE_ID_SCHEMA),
+ ],
+ )
+ ).protected(true)
+ )
+ ),
+];
+
+const SERVICE_ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(SERVICE_SUBDIRS))
+ .subdirs(SERVICE_SUBDIRS);
+
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&list_services),
+ &ObjectSchema::new(
+ "Service list.",
+ &[ ("node", false, &NODE_SCHEMA) ],
+ )
+ ).returns(
+ &ArraySchema::new(
+ "Returns a list of systemd services.",
+ &ObjectSchema::new(
+ "Service details.",
+ &[
+ ("service", false, &SERVICE_ID_SCHEMA),
+ ("name", false, &StringSchema::new("systemd service name.").schema()),
+ ("desc", false, &StringSchema::new("systemd service description.").schema()),
+ ("state", false, &StringSchema::new("systemd service 'SubState'.").schema()),
+ ],
+ ).schema()
+ ).schema()
)
- .match_all("service", service_api)
-}
+ )
+ .match_all("service", &SERVICE_ROUTER);
+
use crate::api2::types::*;
use serde_json::{json, Value};
-
-use std::sync::Arc;
-use lazy_static::lazy_static;
-use proxmox::tools::common_regex;
use std::process::{Command, Stdio};
fn dump_journal(
Ok(json!(lines))
}
-lazy_static! {
- pub static ref SYSTEMD_DATETIME_FORMAT: Arc<ApiStringFormat> =
- ApiStringFormat::Pattern(&common_regex::SYSTEMD_DATETIME_REGEX).into();
-}
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_syslog),
+ &ObjectSchema::new(
+ "Read server time and time zone settings.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("start", true, &IntegerSchema::new("Start line number.")
+ .minimum(0)
+ .schema()
+ ),
+ ("limit", true, &IntegerSchema::new("Max. number of lines.")
+ .minimum(0)
+ .schema()
+ ),
+ ("since", true, &StringSchema::new("Display all log since this date-time string.")
+ .format(&SYSTEMD_DATETIME_FORMAT)
+ .schema()
+ ),
+ ("until", true, &StringSchema::new("Display all log until this date-time string.")
+ .format(&SYSTEMD_DATETIME_FORMAT)
+ .schema()
+ ),
+ ("service", true, &StringSchema::new("Service ID.")
+ .max_length(128)
+ .schema()
+ ),
+ ],
+ )
+ ).returns(
+ &ObjectSchema::new(
+ "Returns a list of syslog entries.",
+ &[
+ ("n", false, &IntegerSchema::new("Line number.").schema()),
+ ("t", false, &StringSchema::new("Line text.").schema()),
+ ],
+ ).schema()
+ ).protected(true)
+ );
-pub fn router() -> Router {
- Router::new()
- .get(
- ApiMethod::new(
- get_syslog,
- ObjectSchema::new("Read server time and time zone settings.")
- .required("node", NODE_SCHEMA.clone())
- .optional(
- "start",
- IntegerSchema::new("Start line number.")
- .minimum(0)
- )
- .optional(
- "limit",
- IntegerSchema::new("Max. number of lines.")
- .minimum(0)
- )
- .optional(
- "since",
- StringSchema::new("Display all log since this date-time string.")
- .format(SYSTEMD_DATETIME_FORMAT.clone())
- )
- .optional(
- "until",
- StringSchema::new("Display all log until this date-time string.")
- .format(SYSTEMD_DATETIME_FORMAT.clone())
- )
- .optional(
- "service",
- StringSchema::new("Service ID.")
- .max_length(128)
- )
- ).returns(
- ObjectSchema::new("Returns a list of syslog entries.")
- .required("n", IntegerSchema::new("Line number."))
- .required("t", StringSchema::new("Line text."))
- ).protected(true)
- )
-}
use crate::api_schema::*;
use crate::api_schema::router::*;
use serde_json::{json, Value};
-use std::sync::Arc;
use std::fs::File;
use std::io::{BufRead,BufReader};
Ok(json!(result))
}
-pub fn router() -> Router {
-
- let upid_schema: Arc<Schema> = Arc::new(
- StringSchema::new("Unique Process/Task ID.")
- .max_length(256)
- .into()
- );
-
- let upid_api = Router::new()
- .delete(ApiMethod::new(
- stop_task,
- ObjectSchema::new("Try to stop a task.")
- .required("node", NODE_SCHEMA.clone())
- .required("upid", upid_schema.clone())).protected(true)
-
- )
- .subdir(
- "log", Router::new()
- .get(
- ApiMethod::new(
- read_task_log,
- ObjectSchema::new("Read task log.")
- .required("node", NODE_SCHEMA.clone())
- .required("upid", upid_schema.clone())
- .optional(
- "start",
- IntegerSchema::new("Start at this line.")
- .minimum(0)
- .default(0)
- )
- .optional(
- "limit",
- IntegerSchema::new("Only list this amount of lines.")
- .minimum(0)
- .default(50)
- )
+const UPID_SCHEMA: Schema = StringSchema::new("Unique Process/Task ID.")
+ .max_length(256)
+ .schema();
+
+const UPID_API_SUBDIRS: SubdirMap = &[
+ (
+ "log", &Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&read_task_log),
+ &ObjectSchema::new(
+ "Read task log.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("upid", false, &UPID_SCHEMA),
+ ("start", true, &IntegerSchema::new("Start at this line.")
+ .minimum(0)
+ .default(0)
+ .schema()
+ ),
+ ("limit", true, &IntegerSchema::new("Only list this amount of lines.")
+ .minimum(0)
+ .default(50)
+ .schema()
+ ),
+ ],
)
)
- )
- .subdir(
- "status", Router::new()
- .get(
- ApiMethod::new(
- get_task_status,
- ObjectSchema::new("Get task status.")
- .required("node", NODE_SCHEMA.clone())
- .required("upid", upid_schema.clone()))
- )
- )
- .list_subdirs();
-
-
- Router::new()
- .get(ApiMethod::new(
- list_tasks,
- ObjectSchema::new("List tasks.")
- .required("node", NODE_SCHEMA.clone())
- .optional(
- "start",
- IntegerSchema::new("List tasks beginning from this offset.")
- .minimum(0)
- .default(0)
- )
- .optional(
- "limit",
- IntegerSchema::new("Only list this amount of tasks.")
- .minimum(0)
- .default(50)
- )
- .optional(
- "errors",
- BooleanSchema::new("Only list erroneous tasks.")
- )
- .optional(
- "userfilter",
- StringSchema::new("Only list tasks from this user.")
+ )
+ ),
+ (
+ "status", &Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_task_status),
+ &ObjectSchema::new(
+ "Get task status.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("upid", false, &UPID_SCHEMA),
+ ],
+ )
)
- )
+ )
+ )
+];
+
+pub const UPID_API_ROUTER: Router = Router::new()
+ .get(&list_subdirs_api_method!(UPID_API_SUBDIRS))
+ .delete(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&stop_task),
+ &ObjectSchema::new(
+ "Try to stop a task.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("upid", false, &UPID_SCHEMA),
+ ],
+ )
+ ).protected(true)
+ )
+ .subdirs(&UPID_API_SUBDIRS);
+
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&list_tasks),
+ &ObjectSchema::new(
+ "List tasks.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("start", true, &IntegerSchema::new("List tasks beginning from this offset.")
+ .minimum(0)
+ .default(0)
+ .schema()
+ ),
+ ("limit", true, &IntegerSchema::new("Only list this amount of tasks.")
+ .minimum(0)
+ .default(50)
+ .schema()
+ ),
+ ("errors", true, &BooleanSchema::new("Only list erroneous tasks.").schema()),
+ ("userfilter", true, &StringSchema::new("Only list tasks from this user.").schema()),
+ ],
+ )
)
- .match_all("upid", upid_api)
-}
+ )
+ .match_all("upid", &UPID_API_ROUTER);
Ok(Value::Null)
}
-pub fn router() -> Router {
- Router::new()
- .get(
- ApiMethod::new(
- get_time,
- ObjectSchema::new("Read server time and time zone settings.")
- .required("node", NODE_SCHEMA.clone())
- ).returns(
- ObjectSchema::new("Returns server time and timezone.")
- .required("timezone", StringSchema::new("Time zone"))
- .required("time", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC.")
- .minimum(1_297_163_644))
- .required("localtime", IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC. (local time)")
- .minimum(1_297_163_644))
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_time),
+ &ObjectSchema::new(
+ "Read server time and time zone settings.",
+ &[ ("node", false, &NODE_SCHEMA) ],
)
+ ).returns(
+ &ObjectSchema::new(
+ "Returns server time and timezone.",
+ &[
+ ("timezone", false, &StringSchema::new("Time zone").schema()),
+ ("time", false, &IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC.")
+ .minimum(1_297_163_644)
+ .schema()
+ ),
+ ("localtime", false, &IntegerSchema::new("Seconds since 1970-01-01 00:00:00 UTC. (local time)")
+ .minimum(1_297_163_644)
+ .schema()
+ ),
+ ],
+ ).schema()
)
- .put(
- ApiMethod::new(
- set_timezone,
- ObjectSchema::new("Set time zone.")
- .required("node", NODE_SCHEMA.clone())
- .required("timezone", StringSchema::new(
- "Time zone. The file '/usr/share/zoneinfo/zone.tab' contains the list of valid names."))
- ).protected(true).reload_timezone(true)
- )
-}
+ )
+ .put(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&set_timezone),
+ &ObjectSchema::new(
+ "Set time zone.",
+ &[
+ ("node", false, &NODE_SCHEMA),
+ ("timezone", false, &StringSchema::new(
+ "Time zone. The file '/usr/share/zoneinfo/zone.tab' contains the list of valid names.")
+ .schema()
+ ),
+ ],
+ )
+ ).protected(true).reload_timezone(true)
+ );
+
use failure::*;
-use lazy_static::lazy_static;
-
-use std::sync::Arc;
use futures::*;
use hyper::header::{self, HeaderValue, UPGRADE};
mod environment;
use environment::*;
-pub fn router() -> Router {
- Router::new()
- .upgrade(api_method_upgrade_backup())
-}
-
-pub fn api_method_upgrade_backup() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- upgrade_to_backup_reader_protocol,
- ObjectSchema::new(concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_READER_PROTOCOL_ID_V1!(), "')."))
- .required("store", StringSchema::new("Datastore name."))
- .required("backup-type", StringSchema::new("Backup type.")
- .format(Arc::new(ApiStringFormat::Enum(&["vm", "ct", "host"]))))
- .required("backup-id", StringSchema::new("Backup ID."))
- .required("backup-time", IntegerSchema::new("Backup time (Unix epoch.)")
- .minimum(1_547_797_308))
- .optional("debug", BooleanSchema::new("Enable verbose debug logging."))
+pub const ROUTER: Router = Router::new()
+ .upgrade(&API_METHOD_UPGRADE_BACKUP);
+
+pub const API_METHOD_UPGRADE_BACKUP: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&upgrade_to_backup_reader_protocol),
+ &ObjectSchema::new(
+ concat!("Upgraded to backup protocol ('", PROXMOX_BACKUP_READER_PROTOCOL_ID_V1!(), "')."),
+ &[
+ ("store", false, &StringSchema::new("Datastore name.").schema()),
+ ("backup-type", false, &StringSchema::new("Backup type.")
+ .format(&ApiStringFormat::Enum(&["vm", "ct", "host"]))
+ .schema()
+ ),
+ ("backup-id", false, &StringSchema::new("Backup ID.").schema()),
+ ("backup-time", false, &IntegerSchema::new("Backup time (Unix epoch.)")
+ .minimum(1_547_797_308)
+ .schema()
+ ),
+ ("debug", true, &BooleanSchema::new("Enable verbose debug logging.").schema()),
+ ],
)
-}
+);
fn upgrade_to_backup_reader_protocol(
parts: Parts,
req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
env.log(format!("starting new backup reader datastore '{}': {:?}", store, path));
- let service = H2Service::new(env.clone(), worker.clone(), &READER_ROUTER, debug);
+ let service = H2Service::new(env.clone(), worker.clone(), &READER_API_ROUTER, debug);
let abort_future = worker.abort_future();
Ok(Box::new(futures::future::ok(response)))
}
-lazy_static!{
- static ref READER_ROUTER: Router = reader_api();
-}
-
-pub fn reader_api() -> Router {
- Router::new()
- .subdir(
- "chunk", Router::new()
- .download(api_method_download_chunk())
- )
- .subdir(
- "download", Router::new()
- .download(api_method_download_file())
- )
- .subdir(
- "speedtest", Router::new()
- .download(api_method_speedtest())
- )
-}
-
-pub fn api_method_download_file() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- download_file,
- ObjectSchema::new("Download specified file.")
- .required("file-name", crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA.clone())
+pub const READER_API_ROUTER: Router = Router::new()
+ .subdirs(&[
+ (
+ "chunk", &Router::new()
+ .download(&API_METHOD_DOWNLOAD_CHUNK)
+ ),
+ (
+ "download", &Router::new()
+ .download(&API_METHOD_DOWNLOAD_FILE)
+ ),
+ (
+ "speedtest", &Router::new()
+ .download(&API_METHOD_SPEEDTEST)
+ ),
+ ]);
+
+pub const API_METHOD_DOWNLOAD_FILE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&download_file),
+ &ObjectSchema::new(
+ "Download specified file.",
+ &[ ("file-name", false, &crate::api2::types::BACKUP_ARCHIVE_NAME_SCHEMA) ],
)
-}
+);
fn download_file(
_parts: Parts,
_req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
Ok(Box::new(response_future))
}
-pub fn api_method_download_chunk() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- download_chunk,
- ObjectSchema::new("Download specified chunk.")
- .required("digest", CHUNK_DIGEST_SCHEMA.clone())
+pub const API_METHOD_DOWNLOAD_CHUNK: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&download_chunk),
+ &ObjectSchema::new(
+ "Download specified chunk.",
+ &[ ("digest", false, &CHUNK_DIGEST_SCHEMA) ],
)
-}
+);
fn download_chunk(
_parts: Parts,
_req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
_parts: Parts,
_req_body: Body,
param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
}
*/
-pub fn api_method_speedtest() -> ApiAsyncMethod {
- ApiAsyncMethod::new(
- speedtest,
- ObjectSchema::new("Test 4M block download speed.")
- )
-}
+pub const API_METHOD_SPEEDTEST: ApiMethod = ApiMethod::new(
+ &ApiHandler::Async(&speedtest),
+ &ObjectSchema::new("Test 4M block download speed.", &[])
+);
fn speedtest(
_parts: Parts,
_req_body: Body,
_param: Value,
- _info: &ApiAsyncMethod,
+ _info: &ApiMethod,
_rpcenv: Box<dyn RpcEnvironment>,
) -> Result<BoxFut, Error> {
}))
}
-pub fn router() -> Router {
- Router::new()
- .get(ApiMethod::new(
- get_subscription,
- ObjectSchema::new("Read subscription info.")))
-}
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_subscription),
+ &ObjectSchema::new("Read subscription info.", &[])
+ )
+ );
use failure::*;
-use lazy_static::lazy_static;
-use std::sync::Arc;
+//use lazy_static::lazy_static;
+//use std::sync::Arc;
use crate::api_schema::*;
-use proxmox::tools::common_regex;
+use proxmox::tools::*; // required to use IPRE!() macro ???
+
+// File names: may not contain slashes, may not start with "."
+pub const FILENAME_FORMAT: ApiStringFormat = ApiStringFormat::VerifyFn(|name| {
+ if name.starts_with('.') {
+ bail!("file names may not start with '.'");
+ }
+ if name.contains('/') {
+ bail!("file names may not contain slashes");
+ }
+ Ok(())
+});
+
+
+const_regex!{
+ pub IP_FORMAT_REGEX = IPRE!();
+ pub SHA256_HEX_REGEX = r"^[a-f0-9]{64}$"; // fixme: define in common_regex ?
+ pub SYSTEMD_DATETIME_REGEX = r"^\d{4}-\d{2}-\d{2}( \d{2}:\d{2}(:\d{2})?)?$"; // fixme: define in common_regex ?
+}
-lazy_static!{
+pub const SYSTEMD_DATETIME_FORMAT: ApiStringFormat =
+ ApiStringFormat::Pattern(&SYSTEMD_DATETIME_REGEX);
- // File names: may not contain slashes, may not start with "."
- pub static ref FILENAME_FORMAT: Arc<ApiStringFormat> = Arc::new(ApiStringFormat::VerifyFn(|name| {
- if name.starts_with('.') {
- bail!("file names may not start with '.'");
- }
- if name.contains('/') {
- bail!("file names may not contain slashes");
- }
- Ok(())
- })).into();
-
- pub static ref IP_FORMAT: Arc<ApiStringFormat> = ApiStringFormat::Pattern(&common_regex::IP_REGEX).into();
-
- pub static ref PVE_CONFIG_DIGEST_FORMAT: Arc<ApiStringFormat> =
- ApiStringFormat::Pattern(&common_regex::SHA256_HEX_REGEX).into();
-
- pub static ref PVE_CONFIG_DIGEST_SCHEMA: Arc<Schema> =
- StringSchema::new("Prevent changes if current configuration file has different SHA256 digest. This can be used to prevent concurrent modifications.")
- .format(PVE_CONFIG_DIGEST_FORMAT.clone()).into();
-
- pub static ref CHUNK_DIGEST_FORMAT: Arc<ApiStringFormat> =
- ApiStringFormat::Pattern(&common_regex::SHA256_HEX_REGEX).into();
-
- pub static ref CHUNK_DIGEST_SCHEMA: Arc<Schema> =
- StringSchema::new("Chunk digest (SHA256).")
- .format(CHUNK_DIGEST_FORMAT.clone()).into();
-
- pub static ref NODE_SCHEMA: Arc<Schema> = Arc::new(
- StringSchema::new("Node name (or 'localhost')")
- .format(
- Arc::new(ApiStringFormat::VerifyFn(|node| {
- if node == "localhost" || node == proxmox::tools::nodename() {
- Ok(())
- } else {
- bail!("no such node '{}'", node);
- }
- }))
- )
- .into()
- );
-
- pub static ref SEARCH_DOMAIN_SCHEMA: Arc<Schema> =
- StringSchema::new("Search domain for host-name lookup.").into();
-
- pub static ref FIRST_DNS_SERVER_SCHEMA: Arc<Schema> =
- StringSchema::new("First name server IP address.")
- .format(IP_FORMAT.clone()).into();
-
- pub static ref SECOND_DNS_SERVER_SCHEMA: Arc<Schema> =
- StringSchema::new("Second name server IP address.")
- .format(IP_FORMAT.clone()).into();
-
- pub static ref THIRD_DNS_SERVER_SCHEMA: Arc<Schema> =
- StringSchema::new("Third name server IP address.")
- .format(IP_FORMAT.clone()).into();
-
- pub static ref BACKUP_ARCHIVE_NAME_SCHEMA: Arc<Schema> =
- StringSchema::new("Backup archive name.")
- .format(FILENAME_FORMAT.clone()).into();
-
- pub static ref BACKUP_TYPE_SCHEMA: Arc<Schema> =
- StringSchema::new("Backup type.")
- .format(Arc::new(ApiStringFormat::Enum(&["vm", "ct", "host"])))
- .into();
-
- pub static ref BACKUP_ID_SCHEMA: Arc<Schema> =
- StringSchema::new("Backup ID.")
- .format(FILENAME_FORMAT.clone())
- .into();
-
- pub static ref BACKUP_TIME_SCHEMA: Arc<Schema> =
- IntegerSchema::new("Backup time (Unix epoch.)")
- .minimum(1_547_797_308)
- .into();
+pub const IP_FORMAT: ApiStringFormat =
+ ApiStringFormat::Pattern(&IP_FORMAT_REGEX);
-}
+pub const PVE_CONFIG_DIGEST_FORMAT: ApiStringFormat =
+ ApiStringFormat::Pattern(&SHA256_HEX_REGEX);
+
+pub const PVE_CONFIG_DIGEST_SCHEMA: Schema = StringSchema::new(r#"\
+Prevent changes if current configuration file has different SHA256 digest.
+This can be used to prevent concurrent modifications.
+"#
+)
+ .format(&PVE_CONFIG_DIGEST_FORMAT)
+ .schema();
+
+
+pub const CHUNK_DIGEST_FORMAT: ApiStringFormat =
+ ApiStringFormat::Pattern(&SHA256_HEX_REGEX);
+
+pub const CHUNK_DIGEST_SCHEMA: Schema = StringSchema::new("Chunk digest (SHA256).")
+ .format(&CHUNK_DIGEST_FORMAT)
+ .schema();
+
+pub const NODE_SCHEMA: Schema = StringSchema::new("Node name (or 'localhost')")
+ .format(&ApiStringFormat::VerifyFn(|node| {
+ if node == "localhost" || node == proxmox::tools::nodename() {
+ Ok(())
+ } else {
+ bail!("no such node '{}'", node);
+ }
+ }))
+ .schema();
+
+pub const SEARCH_DOMAIN_SCHEMA: Schema =
+ StringSchema::new("Search domain for host-name lookup.").schema();
+
+pub const FIRST_DNS_SERVER_SCHEMA: Schema =
+ StringSchema::new("First name server IP address.")
+ .format(&IP_FORMAT)
+ .schema();
+
+pub const SECOND_DNS_SERVER_SCHEMA: Schema =
+ StringSchema::new("Second name server IP address.")
+ .format(&IP_FORMAT)
+ .schema();
+
+pub const THIRD_DNS_SERVER_SCHEMA: Schema =
+ StringSchema::new("Third name server IP address.")
+ .format(&IP_FORMAT)
+ .schema();
+
+pub const BACKUP_ARCHIVE_NAME_SCHEMA: Schema =
+ StringSchema::new("Backup archive name.")
+ .format(&FILENAME_FORMAT)
+ .schema();
+
+pub const BACKUP_TYPE_SCHEMA: Schema =
+ StringSchema::new("Backup type.")
+ .format(&ApiStringFormat::Enum(&["vm", "ct", "host"]))
+ .schema();
+
+pub const BACKUP_ID_SCHEMA: Schema =
+ StringSchema::new("Backup ID.")
+ .format(&FILENAME_FORMAT)
+ .schema();
+
+pub const BACKUP_TIME_SCHEMA: Schema =
+ IntegerSchema::new("Backup time (Unix epoch.)")
+ .minimum(1_547_797_308)
+ .schema();
}))
}
-pub fn router() -> Router {
- Router::new()
- .get(ApiMethod::new(
- get_version,
- ObjectSchema::new("Proxmox Backup Server API version.")))
-}
+pub const ROUTER: Router = Router::new()
+ .get(
+ &ApiMethod::new(
+ &ApiHandler::Sync(&get_version),
+ &ObjectSchema::new("Proxmox Backup Server API version.", &[])
+ )
+ );
+
mod schema;
pub use schema::*;
+pub mod rpc_environment;
pub mod api_handler;
-pub mod registry;
#[macro_use]
pub mod router;
+
+//pub mod registry;
pub mod config;
pub mod format;
+
use failure::Error;
use serde_json::Value;
-use super::router::{ApiMethod, RpcEnvironment};
+use hyper::{Body, Response};
+use hyper::rt::Future;
+use hyper::http::request::Parts;
-pub type ApiHandlerFn = Box<
- dyn Fn(Value, &ApiMethod, &mut dyn RpcEnvironment) -> Result<Value, Error>
- + Send + Sync + 'static
->;
-
-pub trait WrapApiHandler<Args, R, MetaArgs> {
- fn wrap(self) -> ApiHandlerFn;
-}
-
-// fn()
-impl<F, R> WrapApiHandler<(), R, ()> for F
-where
- F: Fn() -> Result<R, Error> + Send + Sync + 'static,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |_value, _method, _rpc_env| {
- Ok(serde_json::to_value((self)()?)?)
- })
- }
-}
+use super::rpc_environment::RpcEnvironment;
+use super::router::ApiMethod;
-// fn(Arg)
-impl<F, A, R> WrapApiHandler<(A,), R, ()> for F
-where
- F: Fn(A) -> Result<R, Error> + Send + Sync + 'static,
- A: serde::de::DeserializeOwned,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |value, _method, _rpc_env| {
- Ok(serde_json::to_value((self)(serde_json::from_value(value)?)?)?)
- })
- }
-}
+pub type BoxFut = Box<dyn Future<Output = Result<Response<Body>, failure::Error>> + Send>;
-// fn(&ApiMethod)
-impl<F, R> WrapApiHandler<(), R, (ApiMethod,)> for F
-where
- F: Fn(&ApiMethod) -> Result<R, Error> + Send + Sync + 'static,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |_value, method, _rpc_env| {
- Ok(serde_json::to_value((self)(method)?)?)
- })
- }
-}
-
-// fn(Arg, &ApiMethod)
-impl<F, A, R> WrapApiHandler<(A,), R, (ApiMethod,)> for F
-where
- F: Fn(A, &ApiMethod) -> Result<R, Error> + Send + Sync + 'static,
- A: serde::de::DeserializeOwned,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |value, method, _rpc_env| {
- Ok(serde_json::to_value((self)(
- serde_json::from_value(value)?,
- method,
- )?)?)
- })
- }
-}
-
-// RpcEnvironment is a trait, so use a "marker" type for it instead:
-pub struct RpcEnvArg();
-
-// fn(&mut dyn RpcEnvironment)
-impl<F, R> WrapApiHandler<(), R, (RpcEnvArg,)> for F
-where
- F: Fn(&mut dyn RpcEnvironment) -> Result<R, Error> + Send + Sync + 'static,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |_value, _method, rpc_env| {
- Ok(serde_json::to_value((self)(rpc_env)?)?)
- })
- }
-}
-
-// fn(Arg, &mut dyn RpcEnvironment)
-impl<F, A, R> WrapApiHandler<(A,), R, (RpcEnvArg,)> for F
-where
- F: Fn(A, &mut dyn RpcEnvironment) -> Result<R, Error> + Send + Sync + 'static,
- A: serde::de::DeserializeOwned,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |value, _method, rpc_env| {
- Ok(serde_json::to_value((self)(
- serde_json::from_value(value)?,
- rpc_env,
- )?)?)
- })
- }
-}
+pub type ApiHandlerFn = &'static (
+ dyn Fn(Value, &ApiMethod, &mut dyn RpcEnvironment) -> Result<Value, Error>
+ + Send + Sync + 'static
+);
-// fn(&ApiMethod, &mut dyn RpcEnvironment)
-impl<F, R> WrapApiHandler<(), R, (ApiMethod, RpcEnvArg,)> for F
-where
- F: Fn(&ApiMethod, &mut dyn RpcEnvironment) -> Result<R, Error> + Send + Sync + 'static,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |_value, method, rpc_env| {
- Ok(serde_json::to_value((self)(method, rpc_env)?)?)
- })
- }
-}
+pub type ApiAsyncHandlerFn = &'static (
+ dyn Fn(Parts, Body, Value, &'static ApiMethod, Box<dyn RpcEnvironment>) -> Result<BoxFut, Error>
+ + Send + Sync + 'static
+);
-// fn(Arg, &ApiMethod, &mut dyn RpcEnvironment)
-impl<F, A, R> WrapApiHandler<(A,), R, (ApiMethod, RpcEnvArg,)> for F
-where
- F: Fn(A, &ApiMethod, &mut dyn RpcEnvironment) -> Result<R, Error> + Send + Sync + 'static,
- A: serde::de::DeserializeOwned,
- R: serde::Serialize,
-{
- fn wrap(self) -> ApiHandlerFn {
- Box::new(move |value, method, rpc_env| {
- Ok(serde_json::to_value((self)(
- serde_json::from_value(value)?,
- method,
- rpc_env,
- )?)?)
- })
- }
+pub enum ApiHandler {
+ Sync(ApiHandlerFn),
+ Async(ApiAsyncHandlerFn),
}
-
}
}
- pub fn find_method(&self, components: &[&str], method: Method, uri_param: &mut HashMap<String, String>) -> &'static MethodDefinition {
+ pub fn find_method(
+ &self,
+ components: &[&str],
+ method: Method,
+ uri_param: &mut HashMap<String, String>,
+ ) -> Option<&'static ApiMethod> {
self.router.find_method(components, method, uri_param)
}
use failure::*;
use std::io::Write;
-use crate::api_schema::*;
-use crate::api_schema::router::*;
+//use super::*;
+use super::router::*;
+use super::schema::*;
+//use super::api_handler::*;
+
#[derive(Copy, Clone)]
pub enum ParameterDisplayStyle {
let mut res = wrap_text("", "", param.description, 80);
- let properties = ¶m.properties;
-
- let mut prop_names: Vec<&str> = properties.keys().copied().collect();
- prop_names.sort();
-
let mut required_list: Vec<String> = Vec::new();
let mut optional_list: Vec<String> = Vec::new();
- for prop in prop_names {
- let (optional, schema) = properties.get(prop).unwrap();
-
+ for (prop, optional, schema) in param.properties {
let param_descr = get_property_description(
prop, &schema, ParameterDisplayStyle::Config, DocumentationFormat::ReST);
res
}
-fn dump_method_definition(method: &str, path: &str, def: &MethodDefinition) -> Option<String> {
+fn dump_method_definition(method: &str, path: &str, def: Option<&ApiMethod>) -> Option<String> {
match def {
- MethodDefinition::None => None,
- MethodDefinition::Simple(simple_method) => {
- let param_descr = dump_api_parameters(&simple_method.parameters);
+ None => None,
+ Some(api_method) => {
+ let param_descr = dump_api_parameters(api_method.parameters);
- let return_descr = dump_api_return_schema(&simple_method.returns);
+ let return_descr = dump_api_return_schema(api_method.returns);
- let res = format!("**{} {}**\n\n{}\n\n{}", method, path, param_descr, return_descr);
- Some(res)
- }
- MethodDefinition::Async(async_method) => {
- let method = if method == "POST" { "UPLOAD" } else { method };
- let method = if method == "GET" { "DOWNLOAD" } else { method };
-
- let param_descr = dump_api_parameters(&async_method.parameters);
-
- let return_descr = dump_api_return_schema(&async_method.returns);
+ let mut method = method;
+ if let ApiHandler::Async(_) = api_method.handler {
+ method = if method == "POST" { "UPLOAD" } else { method };
+ method = if method == "GET" { "DOWNLOAD" } else { method };
+ }
+
let res = format!("**{} {}**\n\n{}\n\n{}", method, path, param_descr, return_descr);
Some(res)
}
Ok(())
};
- cond_print(dump_method_definition("GET", path, &router.get))?;
- cond_print(dump_method_definition("POST", path, &router.post))?;
- cond_print(dump_method_definition("PUT", path, &router.put))?;
- cond_print(dump_method_definition("DELETE", path, &router.delete))?;
+ cond_print(dump_method_definition("GET", path, router.get))?;
+ cond_print(dump_method_definition("POST", path, router.post))?;
+ cond_print(dump_method_definition("PUT", path, router.put))?;
+ cond_print(dump_method_definition("DELETE", path, router.delete))?;
match &router.subroute {
- SubRoute::None => return Ok(()),
- SubRoute::MatchAll { router, param_name } => {
+ None => return Ok(()),
+ Some(SubRoute::MatchAll { router, param_name }) => {
let sub_path = if path == "." {
format!("<{}>", param_name)
} else {
};
dump_api(output, router, &sub_path, pos)?;
}
- SubRoute::Hash(map) => {
- let mut keys: Vec<&String> = map.keys().collect();
- keys.sort_unstable_by(|a, b| a.cmp(b));
- for key in keys {
- let sub_router = &map[key];
- let sub_path = if path == "." { key.to_owned() } else { format!("{}/{}", path, key) };
+ Some(SubRoute::Map(dirmap)) => {
+ //let mut keys: Vec<&String> = map.keys().collect();
+ //keys.sort_unstable_by(|a, b| a.cmp(b));
+ for (key, sub_router) in dirmap.iter() {
+ let sub_path = if path == "." { key.to_string() } else { format!("{}/{}", path, key) };
dump_api(output, sub_router, &sub_path, pos)?;
}
}
use failure::*;
-use crate::api_schema::*;
-use serde_json::{json, Value};
+use serde_json::Value;
use std::collections::HashMap;
-use std::sync::Arc;
use std::fmt;
-use hyper::{Body, Method, Response, StatusCode};
-use hyper::rt::Future;
-use hyper::http::request::Parts;
+use hyper::{Method, StatusCode};
+//use hyper::http::request::Parts;
-use super::api_handler::*;
+use super::schema::*;
+pub use super::rpc_environment::*;
+pub use super::api_handler::*;
-pub type BoxFut = Box<dyn Future<Output = Result<Response<Body>, failure::Error>> + Send>;
-
-/// Abstract Interface for API methods to interact with the environment
-pub trait RpcEnvironment: std::any::Any + crate::tools::AsAny + Send {
-
- /// Use this to pass additional result data. It is up to the environment
- /// how the data is used.
- fn set_result_attrib(&mut self, name: &str, value: Value);
-
- /// Query additional result data.
- fn get_result_attrib(&self, name: &str) -> Option<&Value>;
-
- /// The environment type
- fn env_type(&self) -> RpcEnvironmentType;
-
- /// Set user name
- fn set_user(&mut self, user: Option<String>);
-
- /// Get user name
- fn get_user(&self) -> Option<String>;
-}
-
-
-/// Environment Type
-///
-/// We use this to enumerate the different environment types. Some methods
-/// needs to do different things when started from the command line interface,
-/// or when executed from a privileged server running as root.
-#[derive(PartialEq, Copy, Clone)]
-pub enum RpcEnvironmentType {
- /// Command started from command line
- CLI,
- /// Access from public accessible server
- PUBLIC,
- /// Access from privileged server (run as root)
- PRIVILEGED,
-}
#[derive(Debug, Fail)]
pub struct HttpError {
}
}
+#[macro_export]
macro_rules! http_err {
($status:ident, $msg:expr) => {{
Error::from(HttpError::new(StatusCode::$status, $msg))
}}
}
-type ApiAsyncHandlerFn = Box<
- dyn Fn(Parts, Body, Value, &ApiAsyncMethod, Box<dyn RpcEnvironment>) -> Result<BoxFut, Error>
- + Send + Sync + 'static
->;
-
/// This struct defines synchronous API call which returns the restulkt as json `Value`
pub struct ApiMethod {
/// The protected flag indicates that the provides function should be forwarded
/// should do a tzset afterwards
pub reload_timezone: bool,
/// Parameter type Schema
- pub parameters: ObjectSchema,
+ pub parameters: &'static ObjectSchema,
/// Return type Schema
- pub returns: Arc<Schema>,
+ pub returns: &'static Schema,
/// Handler function
- pub handler: Option<ApiHandlerFn>,
+ pub handler: &'static ApiHandler,
}
+impl std::fmt::Debug for ApiMethod {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "ApiMethod {{ ")?;
+ write!(f, " parameters: {:?}", self.parameters)?;
+ write!(f, " returns: {:?}", self.returns)?;
+ write!(f, " handler: {:p}", &self.handler)?;
+ write!(f, "}}")
+ }
+}
+
+const NULL_SCHEMA: Schema = Schema::Null;
+
+fn dummy_handler_fn(_arg: Value, _method: &ApiMethod, _env: &mut dyn RpcEnvironment) -> Result<Value, Error> {
+ // do nothing
+ Ok(Value::Null)
+}
+
+const DUMMY_HANDLER: ApiHandler = ApiHandler::Sync(&dummy_handler_fn);
+
impl ApiMethod {
- pub fn new<F, Args, R, MetaArgs>(func: F, parameters: ObjectSchema) -> Self
- where
- F: WrapApiHandler<Args, R, MetaArgs>,
- {
+ pub const fn new(handler: &'static ApiHandler, parameters: &'static ObjectSchema) -> Self {
Self {
parameters,
- handler: Some(func.wrap()),
- returns: Arc::new(Schema::Null),
+ handler,
+ returns: &NULL_SCHEMA,
protected: false,
reload_timezone: false,
}
}
- pub fn new_dummy(parameters: ObjectSchema) -> Self {
+ pub const fn new_dummy(parameters: &'static ObjectSchema) -> Self {
Self {
parameters,
- handler: None,
- returns: Arc::new(Schema::Null),
+ handler: &DUMMY_HANDLER,
+ returns: &NULL_SCHEMA,
protected: false,
reload_timezone: false,
}
}
- pub fn returns<S: Into<Arc<Schema>>>(mut self, schema: S) -> Self {
+ pub const fn returns(mut self, schema: &'static Schema) -> Self {
- self.returns = schema.into();
+ self.returns = schema;
self
}
- pub fn protected(mut self, protected: bool) -> Self {
+ pub const fn protected(mut self, protected: bool) -> Self {
self.protected = protected;
self
}
- pub fn reload_timezone(mut self, reload_timezone: bool) -> Self {
+ pub const fn reload_timezone(mut self, reload_timezone: bool) -> Self {
self.reload_timezone = reload_timezone;
}
}
-pub struct ApiAsyncMethod {
- pub parameters: ObjectSchema,
- pub returns: Arc<Schema>,
- pub handler: ApiAsyncHandlerFn,
-}
-
-impl ApiAsyncMethod {
-
- pub fn new<F>(handler: F, parameters: ObjectSchema) -> Self
- where
- F: Fn(Parts, Body, Value, &ApiAsyncMethod, Box<dyn RpcEnvironment>) -> Result<BoxFut, Error>
- + Send + Sync + 'static,
- {
- Self {
- parameters,
- handler: Box::new(handler),
- returns: Arc::new(Schema::Null),
- }
- }
-
- pub fn returns<S: Into<Arc<Schema>>>(mut self, schema: S) -> Self {
-
- self.returns = schema.into();
-
- self
- }
-}
+pub type SubdirMap = &'static [(&'static str, &'static Router)];
pub enum SubRoute {
- None,
- Hash(HashMap<String, Router>),
- MatchAll { router: Box<Router>, param_name: String },
+ //Hash(HashMap<String, Router>),
+ Map(SubdirMap),
+ MatchAll { router: &'static Router, param_name: &'static str },
}
-pub enum MethodDefinition {
- None,
- Simple(ApiMethod),
- Async(ApiAsyncMethod),
+/// Macro to create an ApiMethod to list entries from SubdirMap
+#[macro_export]
+macro_rules! list_subdirs_api_method {
+ ($map:expr) => {
+ ApiMethod::new(
+ &ApiHandler::Sync( & |_, _, _| {
+ let index = serde_json::json!(
+ $map.iter().map(|s| serde_json::json!({ "subdir": s.0}))
+ .collect::<Vec<serde_json::Value>>()
+ );
+ Ok(index)
+ }),
+ &crate::api_schema::ObjectSchema::new("Directory index.", &[]).additional_properties(true)
+ )
+ }
}
pub struct Router {
- pub get: MethodDefinition,
- pub put: MethodDefinition,
- pub post: MethodDefinition,
- pub delete: MethodDefinition,
- pub subroute: SubRoute,
+ pub get: Option<&'static ApiMethod>,
+ pub put: Option<&'static ApiMethod>,
+ pub post: Option<&'static ApiMethod>,
+ pub delete: Option<&'static ApiMethod>,
+ pub subroute: Option<SubRoute>,
}
impl Router {
- pub fn new() -> Self {
+ pub const fn new() -> Self {
Self {
- get: MethodDefinition::None,
- put: MethodDefinition::None,
- post: MethodDefinition::None,
- delete: MethodDefinition::None,
- subroute: SubRoute::None
- }
- }
-
- pub fn subdir<S: Into<String>>(mut self, subdir: S, router: Router) -> Self {
- if let SubRoute::None = self.subroute {
- self.subroute = SubRoute::Hash(HashMap::new());
- }
- match self.subroute {
- SubRoute::Hash(ref mut map) => {
- map.insert(subdir.into(), router);
- }
- _ => panic!("unexpected subroute type"),
+ get: None,
+ put: None,
+ post: None,
+ delete: None,
+ subroute: None,
}
- self
}
- pub fn subdirs(mut self, map: HashMap<String, Router>) -> Self {
- self.subroute = SubRoute::Hash(map);
+ pub const fn subdirs(mut self, map: SubdirMap) -> Self {
+ self.subroute = Some(SubRoute::Map(map));
self
}
- pub fn match_all<S: Into<String>>(mut self, param_name: S, router: Router) -> Self {
- if let SubRoute::None = self.subroute {
- self.subroute = SubRoute::MatchAll { router: Box::new(router), param_name: param_name.into() };
- } else {
- panic!("unexpected subroute type");
- }
+ pub const fn match_all(mut self, param_name: &'static str, router: &'static Router) -> Self {
+ self.subroute = Some(SubRoute::MatchAll { router, param_name });
self
}
-
- pub fn list_subdirs(self) -> Self {
- match self.get {
- MethodDefinition::None => {},
- _ => panic!("cannot create directory index - method get already in use"),
- }
- match self.subroute {
- SubRoute::Hash(ref map) => {
- let index = json!(map.keys().map(|s| json!({ "subdir": s}))
- .collect::<Vec<Value>>());
- self.get(ApiMethod::new(
- move || { Ok(index.clone()) },
- ObjectSchema::new("Directory index.").additional_properties(true))
- )
- }
- _ => panic!("cannot create directory index (no SubRoute::Hash)"),
- }
- }
-
- pub fn get(mut self, m: ApiMethod) -> Self {
- self.get = MethodDefinition::Simple(m);
+
+ pub const fn get(mut self, m: &'static ApiMethod) -> Self {
+ self.get = Some(m);
self
}
- pub fn put(mut self, m: ApiMethod) -> Self {
- self.put = MethodDefinition::Simple(m);
+ pub const fn put(mut self, m: &'static ApiMethod) -> Self {
+ self.put = Some(m);
self
}
- pub fn post(mut self, m: ApiMethod) -> Self {
- self.post = MethodDefinition::Simple(m);
+ pub const fn post(mut self, m: &'static ApiMethod) -> Self {
+ self.post = Some(m);
self
}
- pub fn upload(mut self, m: ApiAsyncMethod) -> Self {
- self.post = MethodDefinition::Async(m);
+ /// Same as post, buth async (fixme: expect Async)
+ pub const fn upload(mut self, m: &'static ApiMethod) -> Self {
+ self.post = Some(m);
self
}
- pub fn download(mut self, m: ApiAsyncMethod) -> Self {
- self.get = MethodDefinition::Async(m);
+ /// Same as get, but async (fixme: expect Async)
+ pub const fn download(mut self, m: &'static ApiMethod) -> Self {
+ self.get = Some(m);
self
}
- pub fn upgrade(mut self, m: ApiAsyncMethod) -> Self {
- self.get = MethodDefinition::Async(m);
+ /// Same as get, but async (fixme: expect Async)
+ pub const fn upgrade(mut self, m: &'static ApiMethod) -> Self {
+ self.get = Some(m);
self
}
- pub fn delete(mut self, m: ApiMethod) -> Self {
- self.delete = MethodDefinition::Simple(m);
+ pub const fn delete(mut self, m: &'static ApiMethod) -> Self {
+ self.delete = Some(m);
self
}
let (dir, rest) = (components[0], &components[1..]);
match self.subroute {
- SubRoute::None => {},
- SubRoute::Hash(ref dirmap) => {
- if let Some(ref router) = dirmap.get(dir) {
+ None => {},
+ Some(SubRoute::Map(dirmap)) => {
+ if let Ok(ind) = dirmap.binary_search_by_key(&dir, |(name, _)| name) {
+ let (_name, router) = dirmap[ind];
//println!("FOUND SUBDIR {}", dir);
return router.find_route(rest, uri_param);
}
}
- SubRoute::MatchAll { ref router, ref param_name } => {
+ Some(SubRoute::MatchAll { router, param_name }) => {
//println!("URI PARAM {} = {}", param_name, dir); // fixme: store somewhere
- uri_param.insert(param_name.clone(), dir.into());
+ uri_param.insert(param_name.to_owned(), dir.into());
return router.find_route(rest, uri_param);
},
}
components: &[&str],
method: Method,
uri_param: &mut HashMap<String, String>
- ) -> &MethodDefinition {
+ ) -> Option<&ApiMethod> {
if let Some(info) = self.find_route(components, uri_param) {
return match method {
- Method::GET => &info.get,
- Method::PUT => &info.put,
- Method::POST => &info.post,
- Method::DELETE => &info.delete,
- _ => &MethodDefinition::None,
+ Method::GET => info.get,
+ Method::PUT => info.put,
+ Method::POST => info.post,
+ Method::DELETE => info.delete,
+ _ => None,
};
}
- &MethodDefinition::None
+ None
}
}
--- /dev/null
+use serde_json::Value;
+
+/// Abstract Interface for API methods to interact with the environment
+pub trait RpcEnvironment: std::any::Any + crate::tools::AsAny + Send {
+
+ /// Use this to pass additional result data. It is up to the environment
+ /// how the data is used.
+ fn set_result_attrib(&mut self, name: &str, value: Value);
+
+ /// Query additional result data.
+ fn get_result_attrib(&self, name: &str) -> Option<&Value>;
+
+ /// The environment type
+ fn env_type(&self) -> RpcEnvironmentType;
+
+ /// Set user name
+ fn set_user(&mut self, user: Option<String>);
+
+ /// Get user name
+ fn get_user(&self) -> Option<String>;
+}
+
+
+/// Environment Type
+///
+/// We use this to enumerate the different environment types. Some methods
+/// needs to do different things when started from the command line interface,
+/// or when executed from a privileged server running as root.
+#[derive(PartialEq, Copy, Clone)]
+pub enum RpcEnvironmentType {
+ /// Command started from command line
+ CLI,
+ /// Access from public accessible server
+ PUBLIC,
+ /// Access from privileged server (run as root)
+ PRIVILEGED,
+}
use failure::*;
-use std::collections::HashMap;
use serde_json::{json, Value};
use url::form_urlencoded;
-use regex::Regex;
use std::fmt;
-use std::sync::Arc;
#[derive(Default, Debug, Fail)]
pub struct ParameterError {
impl BooleanSchema {
- pub fn new(description: &'static str) -> Self {
+ pub const fn new(description: &'static str) -> Self {
BooleanSchema {
description,
default: None,
}
}
- pub fn default(mut self, default: bool) -> Self {
+ pub const fn default(mut self, default: bool) -> Self {
self.default = Some(default);
self
}
+
+ pub const fn schema(self) -> Schema {
+ Schema::Boolean(self)
+ }
}
#[derive(Debug)]
impl IntegerSchema {
- pub fn new(description: &'static str) -> Self {
+ pub const fn new(description: &'static str) -> Self {
IntegerSchema {
description,
default: None,
}
}
- pub fn default(mut self, default: isize) -> Self {
+ pub const fn default(mut self, default: isize) -> Self {
self.default = Some(default);
self
}
- pub fn minimum(mut self, minimum: isize) -> Self {
+ pub const fn minimum(mut self, minimum: isize) -> Self {
self.minimum = Some(minimum);
self
}
- pub fn maximum(mut self, maximium: isize) -> Self {
+ pub const fn maximum(mut self, maximium: isize) -> Self {
self.maximum = Some(maximium);
self
}
+ pub const fn schema(self) -> Schema {
+ Schema::Integer(self)
+ }
+
fn check_constraints(&self, value: isize) -> Result<(), Error> {
if let Some(minimum) = self.minimum {
}
}
+/// Helper to represent const regular expressions
+///
+/// This is mostly a workaround, unless we can create const_fn Regex.
+pub struct ConstRegexPattern {
+ pub regex_string: &'static str,
+ pub regex_obj: fn() -> &'static regex::Regex,
+}
+
+impl std::fmt::Debug for ConstRegexPattern {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{:?}", self.regex_string)
+ }
+}
+
+/// Macro to generate a ConstRegexPattern
+#[macro_export]
+macro_rules! const_regex {
+ () => {};
+ ($(#[$attr:meta])* pub ($($vis:tt)+) $name:ident = $regex:expr; $($rest:tt)*) => {
+ const_regex! { (pub ($($vis)+)) $(#[$attr])* $name = $regex; $($rest)* }
+ };
+ ($(#[$attr:meta])* pub $name:ident = $regex:expr; $($rest:tt)*) => {
+ const_regex! { (pub) $(#[$attr])* $name = $regex; $($rest)* }
+ };
+ ($(#[$attr:meta])* $name:ident = $regex:expr; $($rest:tt)*) => {
+ const_regex! { () $(#[$attr])* $name = $regex; $($rest)* }
+ };
+ (
+ ($($pub:tt)*) $(#[$attr:meta])* $name:ident = $regex:expr;
+ $($rest:tt)*
+ ) => {
+ $(#[$attr])* $($pub)* const $name: ConstRegexPattern = ConstRegexPattern {
+ regex_string: $regex,
+ regex_obj: (|| -> &'static regex::Regex {
+ lazy_static::lazy_static! {
+ static ref SCHEMA: regex::Regex = regex::Regex::new($regex).unwrap();
+ }
+ &SCHEMA
+ })
+ };
+
+ const_regex! { $($rest)* }
+ };
+}
#[derive(Debug)]
pub struct StringSchema {
pub default: Option<&'static str>,
pub min_length: Option<usize>,
pub max_length: Option<usize>,
- pub format: Option<Arc<ApiStringFormat>>,
+ pub format: Option<&'static ApiStringFormat>,
}
impl StringSchema {
- pub fn new(description: &'static str) -> Self {
+ pub const fn new(description: &'static str) -> Self {
StringSchema {
description,
default: None,
}
}
- pub fn default(mut self, text: &'static str) -> Self {
+ pub const fn default(mut self, text: &'static str) -> Self {
self.default = Some(text);
self
}
- pub fn format(mut self, format: Arc<ApiStringFormat>) -> Self {
+ pub const fn format(mut self, format: &'static ApiStringFormat) -> Self {
self.format = Some(format);
self
}
- pub fn min_length(mut self, min_length: usize) -> Self {
+ pub const fn min_length(mut self, min_length: usize) -> Self {
self.min_length = Some(min_length);
self
}
- pub fn max_length(mut self, max_length: usize) -> Self {
+ pub const fn max_length(mut self, max_length: usize) -> Self {
self.max_length = Some(max_length);
self
}
+ pub const fn schema(self) -> Schema {
+ Schema::String(self)
+ }
+
fn check_length(&self, length: usize) -> Result<(), Error> {
if let Some(min_length) = self.min_length {
self.check_length(value.chars().count())?;
if let Some(ref format) = self.format {
- match format.as_ref() {
- ApiStringFormat::Pattern(ref regex) => {
- if !regex.is_match(value) {
+ match format {
+ ApiStringFormat::Pattern(regex) => {
+ if !(regex.regex_obj)().is_match(value) {
bail!("value does not match the regex pattern");
}
}
- ApiStringFormat::Enum(ref stringvec) => {
+ ApiStringFormat::Enum(stringvec) => {
if stringvec.iter().find(|&e| *e == value) == None {
bail!("value '{}' is not defined in the enumeration.", value);
}
}
- ApiStringFormat::Complex(ref subschema) => {
+ ApiStringFormat::Complex(subschema) => {
parse_property_string(value, subschema)?;
}
ApiStringFormat::VerifyFn(verify_fn) => {
Ok(())
}
-
}
#[derive(Debug)]
pub struct ArraySchema {
pub description: &'static str,
- pub items: Arc<Schema>,
+ pub items: &'static Schema,
pub min_length: Option<usize>,
pub max_length: Option<usize>,
}
impl ArraySchema {
- pub fn new(description: &'static str, item_schema: Arc<Schema>) -> Self {
+ pub const fn new(description: &'static str, item_schema: &'static Schema) -> Self {
ArraySchema {
description,
items: item_schema,
}
}
- pub fn min_length(mut self, min_length: usize) -> Self {
+ pub const fn min_length(mut self, min_length: usize) -> Self {
self.min_length = Some(min_length);
self
}
- pub fn max_length(mut self, max_length: usize) -> Self {
+ pub const fn max_length(mut self, max_length: usize) -> Self {
self.max_length = Some(max_length);
self
}
+ pub const fn schema(self) -> Schema {
+ Schema::Array(self)
+ }
+
fn check_length(&self, length: usize) -> Result<(), Error> {
if let Some(min_length) = self.min_length {
}
}
+/// Lookup table to Schema properties
+///
+/// Stores a sorted list of (name, optional, schema) tuples:
+///
+/// name: The name of the property
+/// optional: Set when the property is optional
+/// schema: Property type schema
+///
+/// NOTE: The list has to be storted by name, because we use
+/// a binary search to find items.
+///
+/// This is a workaround unless RUST can const_fn Hash::new()
+pub type SchemaPropertyMap = &'static [(&'static str, bool, &'static Schema)];
+
#[derive(Debug)]
pub struct ObjectSchema {
pub description: &'static str,
pub additional_properties: bool,
- pub properties: HashMap<&'static str, (bool, Arc<Schema>)>,
+ pub properties: SchemaPropertyMap,
pub default_key: Option<&'static str>,
}
impl ObjectSchema {
- pub fn new(description: &'static str) -> Self {
- let properties = HashMap::new();
+ pub const fn new(description: &'static str, properties: SchemaPropertyMap) -> Self {
ObjectSchema {
description,
- additional_properties: false,
properties,
+ additional_properties: false,
default_key: None,
}
}
- pub fn additional_properties(mut self, additional_properties: bool) -> Self {
+ pub const fn additional_properties(mut self, additional_properties: bool) -> Self {
self.additional_properties = additional_properties;
self
}
- pub fn default_key(mut self, key: &'static str) -> Self {
+ pub const fn default_key(mut self, key: &'static str) -> Self {
self.default_key = Some(key);
self
}
- pub fn required<S: Into<Arc<Schema>>>(mut self, name: &'static str, schema: S) -> Self {
- self.properties.insert(name, (false, schema.into()));
- self
+ pub const fn schema(self) -> Schema {
+ Schema::Object(self)
}
- pub fn optional<S: Into<Arc<Schema>>>(mut self, name: &'static str, schema: S) -> Self {
- self.properties.insert(name, (true, schema.into()));
- self
+ pub fn lookup(&self, key: &str) -> Option<(bool, &Schema)> {
+ if let Ok(ind) = self.properties.binary_search_by_key(&key, |(name, _, _)| name) {
+ let (_name, optional, prop_schema) = self.properties[ind];
+ Some((optional, prop_schema))
+ } else {
+ None
+ }
}
}
Array(ArraySchema),
}
-impl From<StringSchema> for Schema {
- fn from(string_schema: StringSchema) -> Self {
- Schema::String(string_schema)
- }
-}
-
-impl From<StringSchema> for Arc<Schema> {
- fn from(string_schema: StringSchema) -> Self {
- Arc::new(Schema::String(string_schema))
- }
-}
-
-impl From<BooleanSchema> for Schema {
- fn from(boolean_schema: BooleanSchema) -> Self {
- Schema::Boolean(boolean_schema)
- }
-}
-
-impl From<BooleanSchema> for Arc<Schema> {
- fn from(boolean_schema: BooleanSchema) -> Self {
- Arc::new(Schema::Boolean(boolean_schema))
- }
-}
-
-impl From<IntegerSchema> for Schema {
- fn from(integer_schema: IntegerSchema) -> Self {
- Schema::Integer(integer_schema)
- }
-}
-
-impl From<IntegerSchema> for Arc<Schema> {
- fn from(integer_schema: IntegerSchema) -> Self {
- Arc::new(Schema::Integer(integer_schema))
- }
-}
-
-impl From<ObjectSchema> for Schema {
- fn from(object_schema: ObjectSchema) -> Self {
- Schema::Object(object_schema)
- }
-}
-
-impl From<ObjectSchema> for Arc<Schema> {
- fn from(object_schema: ObjectSchema) -> Self {
- Arc::new(Schema::Object(object_schema))
- }
-}
-
-impl From<ArraySchema> for Schema {
- fn from(array_schema: ArraySchema) -> Self {
- Schema::Array(array_schema)
- }
-}
-
-impl From<ArraySchema> for Arc<Schema> {
- fn from(array_schema: ArraySchema) -> Self {
- Arc::new(Schema::Array(array_schema))
- }
-}
-
pub enum ApiStringFormat {
Enum(&'static [&'static str]),
- Pattern(&'static Regex),
- Complex(Arc<Schema>),
+ Pattern(&'static ConstRegexPattern),
+ Complex(&'static Schema),
VerifyFn(fn(&str) -> Result<(), Error>),
}
let mut errors = ParameterError::new();
- let properties = &schema.properties;
let additional_properties = schema.additional_properties;
for (key, value) in data {
- if let Some((_optional, prop_schema)) = properties.get::<str>(key) {
- match prop_schema.as_ref() {
+ if let Some((_optional, prop_schema)) = schema.lookup(&key) {
+ match prop_schema {
Schema::Array(array_schema) => {
if params[key] == Value::Null {
params[key] = json!([]);
}
if test_required && errors.len() == 0 {
- for (name, (optional, _prop_schema)) in properties {
+ for (name, optional, _prop_schema) in schema.properties {
if !(*optional) && params[name] == Value::Null {
errors.push(format_err!("parameter '{}': parameter is missing and it is not optional.", name));
}
_ => bail!("Expected object - got scalar value."),
};
- let properties = &schema.properties;
let additional_properties = schema.additional_properties;
for (key, value) in map {
- if let Some((_optional, prop_schema)) = properties.get::<str>(key) {
- match prop_schema.as_ref() {
+ if let Some((_optional, prop_schema)) = schema.lookup(&key) {
+ match prop_schema {
Schema::Object(object_schema) => {
verify_json_object(value, object_schema)?;
}
}
}
- for (name, (optional, _prop_schema)) in properties {
+ for (name, optional, _prop_schema) in schema.properties {
if !(*optional) && data[name] == Value::Null {
bail!("property '{}': property is missing and it is not optional.", name);
}
let schema = Schema::Object(ObjectSchema {
description: "TEST",
additional_properties: false,
- properties: {
- let map = HashMap::new();
-
- map
- },
+ properties: &[],
default_key: None,
});
#[test]
fn test_query_string() {
- let schema = ObjectSchema::new("Parameters.")
- .required("name", StringSchema::new("Name."));
-
- let res = parse_query_string("", &schema, true);
- assert!(res.is_err());
-
- let schema = ObjectSchema::new("Parameters.")
- .optional("name", StringSchema::new("Name."));
-
- let res = parse_query_string("", &schema, true);
- assert!(res.is_ok());
-
- // TEST min_length and max_length
-
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "name", StringSchema::new("Name.")
- .min_length(5)
- .max_length(10)
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[("name", false, &StringSchema::new("Name.").schema())]
);
- let res = parse_query_string("name=abcd", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("name=abcde", &schema, true);
- assert!(res.is_ok());
-
- let res = parse_query_string("name=abcdefghijk", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("name=abcdefghij", &schema, true);
- assert!(res.is_ok());
-
- // TEST regex pattern
-
- use lazy_static::lazy_static;
- lazy_static! {
- static ref TEST_REGEX: Regex = Regex::new("test").unwrap();
- static ref TEST2_REGEX: Regex = Regex::new("^test$").unwrap();
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_err());
}
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "name", StringSchema::new("Name.")
- .format(Arc::new(ApiStringFormat::Pattern(&TEST_REGEX)))
- );
-
- let res = parse_query_string("name=abcd", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("name=ateststring", &schema, true);
- assert!(res.is_ok());
-
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "name", StringSchema::new("Name.")
- .format(Arc::new(ApiStringFormat::Pattern(&TEST2_REGEX)))
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[("name", true, &StringSchema::new("Name.").schema())]
);
-
- let res = parse_query_string("name=ateststring", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("name=test", &schema, true);
- assert!(res.is_ok());
-
+
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_ok());
+ }
+
+ // TEST min_length and max_length
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("name", true, &StringSchema::new("Name.")
+ .min_length(5)
+ .max_length(10)
+ .schema()
+ ),
+ ]);
+
+ let res = parse_query_string("name=abcd", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("name=abcde", &SCHEMA, true);
+ assert!(res.is_ok());
+
+ let res = parse_query_string("name=abcdefghijk", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("name=abcdefghij", &SCHEMA, true);
+ assert!(res.is_ok());
+ }
+
+ // TEST regex pattern
+ const_regex! {
+ TEST_REGEX = "test";
+ TEST2_REGEX = "^test$";
+ }
+
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("name", false, &StringSchema::new("Name.")
+ .format(&ApiStringFormat::Pattern(&TEST_REGEX))
+ .schema()
+ ),
+ ]);
+
+ let res = parse_query_string("name=abcd", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("name=ateststring", &SCHEMA, true);
+ assert!(res.is_ok());
+ }
+
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("name", false, &StringSchema::new("Name.")
+ .format(&ApiStringFormat::Pattern(&TEST2_REGEX))
+ .schema()
+ ),
+ ]);
+
+ let res = parse_query_string("name=ateststring", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("name=test", &SCHEMA, true);
+ assert!(res.is_ok());
+ }
+
// TEST string enums
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("name", false, &StringSchema::new("Name.")
+ .format(&ApiStringFormat::Enum(&["ev1", "ev2"]))
+ .schema()
+ ),
+ ]);
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "name", StringSchema::new("Name.")
- .format(Arc::new(ApiStringFormat::Enum(&["ev1", "ev2"])))
- );
-
- let res = parse_query_string("name=noenum", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("name=ev1", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("name=noenum", &SCHEMA, true);
+ assert!(res.is_err());
- let res = parse_query_string("name=ev2", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("name=ev1", &SCHEMA, true);
+ assert!(res.is_ok());
- let res = parse_query_string("name=ev3", &schema, true);
- assert!(res.is_err());
+ let res = parse_query_string("name=ev2", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("name=ev3", &SCHEMA, true);
+ assert!(res.is_err());
+ }
}
#[test]
fn test_query_integer() {
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "count" , IntegerSchema::new("Count.")
- );
-
- let res = parse_query_string("", &schema, true);
- assert!(res.is_err());
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("count", false, &IntegerSchema::new("Count.").schema()),
+ ]);
- let schema = ObjectSchema::new("Parameters.")
- .optional(
- "count", IntegerSchema::new("Count.")
- .minimum(-3)
- .maximum(50)
- );
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_err());
+ }
- let res = parse_query_string("", &schema, true);
- assert!(res.is_ok());
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("count", true, &IntegerSchema::new("Count.")
+ .minimum(-3)
+ .maximum(50)
+ .schema()
+ ),
+ ]);
+
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_ok());
- let res = parse_query_string("count=abc", &schema, false);
- assert!(res.is_err());
+ let res = parse_query_string("count=abc", &SCHEMA, false);
+ assert!(res.is_err());
- let res = parse_query_string("count=30", &schema, false);
- assert!(res.is_ok());
+ let res = parse_query_string("count=30", &SCHEMA, false);
+ assert!(res.is_ok());
- let res = parse_query_string("count=-1", &schema, false);
- assert!(res.is_ok());
+ let res = parse_query_string("count=-1", &SCHEMA, false);
+ assert!(res.is_ok());
- let res = parse_query_string("count=300", &schema, false);
- assert!(res.is_err());
+ let res = parse_query_string("count=300", &SCHEMA, false);
+ assert!(res.is_err());
- let res = parse_query_string("count=-30", &schema, false);
- assert!(res.is_err());
+ let res = parse_query_string("count=-30", &SCHEMA, false);
+ assert!(res.is_err());
- let res = parse_query_string("count=50", &schema, false);
- assert!(res.is_ok());
+ let res = parse_query_string("count=50", &SCHEMA, false);
+ assert!(res.is_ok());
- let res = parse_query_string("count=-3", &schema, false);
- assert!(res.is_ok());
+ let res = parse_query_string("count=-3", &SCHEMA, false);
+ assert!(res.is_ok());
+ }
}
#[test]
fn test_query_boolean() {
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "force", BooleanSchema::new("Force.")
- );
-
- let res = parse_query_string("", &schema, true);
- assert!(res.is_err());
-
- let schema = ObjectSchema::new("Parameters.")
- .optional(
- "force", BooleanSchema::new("Force.")
- );
-
- let res = parse_query_string("", &schema, true);
- assert!(res.is_ok());
-
- let res = parse_query_string("a=b", &schema, true);
- assert!(res.is_err());
-
-
- let res = parse_query_string("force", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("force=yes", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=1", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=On", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=TRUE", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=TREU", &schema, true);
- assert!(res.is_err());
-
- let res = parse_query_string("force=NO", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=0", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=off", &schema, true);
- assert!(res.is_ok());
- let res = parse_query_string("force=False", &schema, true);
- assert!(res.is_ok());
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("force", false, &BooleanSchema::new("Force.").schema()),
+ ]);
+
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_err());
+ }
+
+ {
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("force", true, &BooleanSchema::new("Force.").schema()),
+ ]);
+
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_ok());
+
+ let res = parse_query_string("a=b", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("force", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("force=yes", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=1", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=On", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=TRUE", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=TREU", &SCHEMA, true);
+ assert!(res.is_err());
+
+ let res = parse_query_string("force=NO", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=0", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=off", &SCHEMA, true);
+ assert!(res.is_ok());
+ let res = parse_query_string("force=False", &SCHEMA, true);
+ assert!(res.is_ok());
+ }
}
#[test]
fn test_verify_function() {
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "p1", StringSchema::new("P1")
- .format(ApiStringFormat::VerifyFn(|value| {
- if value == "test" { return Ok(()) };
- bail!("format error");
- }).into())
- );
-
- let res = parse_query_string("p1=tes", &schema, true);
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("p1", false, &StringSchema::new("P1")
+ .format(&ApiStringFormat::VerifyFn(|value| {
+ if value == "test" { return Ok(()) };
+ bail!("format error");
+ }))
+ .schema()
+ ),
+ ]);
+
+ let res = parse_query_string("p1=tes", &SCHEMA, true);
assert!(res.is_err());
- let res = parse_query_string("p1=test", &schema, true);
+ let res = parse_query_string("p1=test", &SCHEMA, true);
assert!(res.is_ok());
}
#[test]
fn test_verify_complex_object() {
- let nic_models = Arc::new(ApiStringFormat::Enum(
- &["e1000", "virtio"]));
-
- let param_schema: Arc<Schema> = ObjectSchema::new("Properties.")
+ const NIC_MODELS: ApiStringFormat = ApiStringFormat::Enum(&["e1000", "virtio"]);
+
+ const PARAM_SCHEMA: Schema = ObjectSchema::new(
+ "Properties.",
+ &[
+ ("enable", true, &BooleanSchema::new("Enable device.").schema()),
+ ("model", false, &StringSchema::new("Ethernet device Model.")
+ .format(&NIC_MODELS)
+ .schema()
+ ),
+ ])
.default_key("model")
- .required("model", StringSchema::new("Ethernet device Model.")
- .format(nic_models))
- .optional("enable", BooleanSchema::new("Enable device."))
- .into();
-
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "net0", StringSchema::new("First Network device.")
- .format(ApiStringFormat::Complex(param_schema).into())
- );
-
- let res = parse_query_string("", &schema, true);
+ .schema();
+
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("net0", false, &StringSchema::new("First Network device.")
+ .format(&ApiStringFormat::Complex(&PARAM_SCHEMA))
+ .schema()
+ ),
+ ]);
+
+ let res = parse_query_string("", &SCHEMA, true);
assert!(res.is_err());
- let res = parse_query_string("test=abc", &schema, true);
+ let res = parse_query_string("test=abc", &SCHEMA, true);
assert!(res.is_err());
- let res = parse_query_string("net0=model=abc", &schema, true);
+ let res = parse_query_string("net0=model=abc", &SCHEMA, true);
assert!(res.is_err());
- let res = parse_query_string("net0=model=virtio", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("net0=model=virtio", &SCHEMA, true);
+ assert!(res.is_ok());
- let res = parse_query_string("net0=model=virtio,enable=1", &schema, true);
+ let res = parse_query_string("net0=model=virtio,enable=1", &SCHEMA, true);
assert!(res.is_ok());
- let res = parse_query_string("net0=virtio,enable=no", &schema, true);
+ let res = parse_query_string("net0=virtio,enable=no", &SCHEMA, true);
assert!(res.is_ok());
}
#[test]
fn test_verify_complex_array() {
- let param_schema: Arc<Schema> = ArraySchema::new(
- "Integer List.", Arc::new(IntegerSchema::new("Soemething").into()))
- .into();
+ {
+ const PARAM_SCHEMA: Schema = ArraySchema::new(
+ "Integer List.", &IntegerSchema::new("Soemething").schema())
+ .schema();
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "list", StringSchema::new("A list on integers, comma separated.")
- .format(ApiStringFormat::Complex(param_schema).into())
- );
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("list", false, &StringSchema::new("A list on integers, comma separated.")
+ .format(&ApiStringFormat::Complex(&PARAM_SCHEMA))
+ .schema()
+ ),
+ ]);
- let res = parse_query_string("", &schema, true);
- assert!(res.is_err());
+ let res = parse_query_string("", &SCHEMA, true);
+ assert!(res.is_err());
- let res = parse_query_string("list=", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("list=", &SCHEMA, true);
+ assert!(res.is_ok());
- let res = parse_query_string("list=abc", &schema, true);
- assert!(res.is_err());
+ let res = parse_query_string("list=abc", &SCHEMA, true);
+ assert!(res.is_err());
- let res = parse_query_string("list=1", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("list=1", &SCHEMA, true);
+ assert!(res.is_ok());
- let res = parse_query_string("list=2,3,4,5", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("list=2,3,4,5", &SCHEMA, true);
+ assert!(res.is_ok());
+ }
- let param_schema: Arc<Schema> = ArraySchema::new(
- "Integer List.", Arc::new(IntegerSchema::new("Soemething").into()))
- .min_length(1)
- .max_length(3)
- .into();
+ {
- let schema = ObjectSchema::new("Parameters.")
- .required(
- "list", StringSchema::new("A list on integers, comma separated.")
- .format(ApiStringFormat::Complex(param_schema).into())
- );
+ const PARAM_SCHEMA: Schema = ArraySchema::new(
+ "Integer List.", &IntegerSchema::new("Soemething").schema())
+ .min_length(1)
+ .max_length(3)
+ .schema();
- let res = parse_query_string("list=", &schema, true);
- assert!(res.is_err());
+ const SCHEMA: ObjectSchema = ObjectSchema::new(
+ "Parameters.",
+ &[
+ ("list", false, &StringSchema::new("A list on integers, comma separated.")
+ .format(&ApiStringFormat::Complex(&PARAM_SCHEMA))
+ .schema()
+ ),
+ ]);
- let res = parse_query_string("list=1,2,3", &schema, true);
- assert!(res.is_ok());
+ let res = parse_query_string("list=", &SCHEMA, true);
+ assert!(res.is_err());
- let res = parse_query_string("list=2,3,4,5", &schema, true);
- assert!(res.is_err());
+ let res = parse_query_string("list=1,2,3", &SCHEMA, true);
+ assert!(res.is_ok());
+
+ let res = parse_query_string("list=2,3,4,5", &SCHEMA, true);
+ assert!(res.is_err());
+ }
}
fn main() -> Result<(), Error> {
- let api = api2::backup::backup_api();
+ let api = api2::backup::BACKUP_API_ROUTER;
dump_api(&mut std::io::stdout(), &api, ".", 0)?;
use failure::*;
use futures::*;
-use lazy_static::lazy_static;
use proxmox::tools::try_block;
}
let _ = csrf_secret(); // load with lazy_static
- lazy_static!{
- static ref ROUTER: Router = proxmox_backup::api2::router();
- }
-
let config = ApiConfig::new(
- buildcfg::JS_DIR, &ROUTER, RpcEnvironmentType::PRIVILEGED);
-
+ buildcfg::JS_DIR, &proxmox_backup::api2::ROUTER, RpcEnvironmentType::PRIVILEGED);
+
let rest_server = RestServer::new(config);
// http server future:
server.await?;
log::info!("done - exit server");
-
+
Ok(())
}
-//#[macro_use]
+#[macro_use]
extern crate proxmox_backup;
use failure::*;
use serde_json::{json, Value};
//use hyper::Body;
use std::sync::{Arc, Mutex};
-use regex::Regex;
+//use regex::Regex;
use xdg::BaseDirectories;
-use lazy_static::lazy_static;
use futures::*;
use tokio::sync::mpsc;
-lazy_static! {
- static ref BACKUPSPEC_REGEX: Regex = Regex::new(r"^([a-zA-Z0-9_-]+\.(?:pxar|img|conf|log)):(.+)$").unwrap();
-
- static ref REPO_URL_SCHEMA: Arc<Schema> = Arc::new(
- StringSchema::new("Repository URL.")
- .format(BACKUP_REPO_URL.clone())
- .max_length(256)
- .into()
- );
+proxmox_backup::const_regex! {
+ BACKUPSPEC_REGEX = r"^([a-zA-Z0-9_-]+\.(?:pxar|img|conf|log)):(.+)$";
}
+const REPO_URL_SCHEMA: Schema = StringSchema::new("Repository URL.")
+ .format(&BACKUP_REPO_URL)
+ .max_length(256)
+ .schema();
fn get_default_repository() -> Option<String> {
std::env::var("PBS_REPOSITORY").ok()
fn parse_backupspec(value: &str) -> Result<(&str, &str), Error> {
- if let Some(caps) = BACKUPSPEC_REGEX.captures(value) {
+ if let Some(caps) = (BACKUPSPEC_REGEX.regex_obj)().captures(value) {
return Ok((caps.get(1).unwrap().as_str(), caps.get(2).unwrap().as_str()));
}
bail!("unable to parse directory specification '{}'", value);
fn key_mgmt_cli() -> CliCommandMap {
- let kdf_schema: Arc<Schema> = Arc::new(
+ const KDF_SCHEMA: Schema =
StringSchema::new("Key derivation function. Choose 'none' to store the key unecrypted.")
- .format(Arc::new(ApiStringFormat::Enum(&["scrypt", "none"])))
- .default("scrypt")
- .into()
+ .format(&ApiStringFormat::Enum(&["scrypt", "none"]))
+ .default("scrypt")
+ .schema();
+
+ const API_METHOD_KEY_CREATE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&key_create),
+ &ObjectSchema::new(
+ "Create a new encryption key.",
+ &[
+ ("path", false, &StringSchema::new("File system path.").schema()),
+ ("kdf", true, &KDF_SCHEMA),
+ ],
+ )
);
-
- let key_create_cmd_def = CliCommand::new(
- ApiMethod::new(
- key_create,
- ObjectSchema::new("Create a new encryption key.")
- .required("path", StringSchema::new("File system path."))
- .optional("kdf", kdf_schema.clone())
- ))
+
+ let key_create_cmd_def = CliCommand::new(&API_METHOD_KEY_CREATE)
.arg_param(vec!["path"])
.completion_cb("path", tools::complete_file_name);
- let key_change_passphrase_cmd_def = CliCommand::new(
- ApiMethod::new(
- key_change_passphrase,
- ObjectSchema::new("Change the passphrase required to decrypt the key.")
- .required("path", StringSchema::new("File system path."))
- .optional("kdf", kdf_schema.clone())
- ))
+ const API_METHOD_KEY_CHANGE_PASSPHRASE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&key_change_passphrase),
+ &ObjectSchema::new(
+ "Change the passphrase required to decrypt the key.",
+ &[
+ ("path", false, &StringSchema::new("File system path.").schema()),
+ ("kdf", true, &KDF_SCHEMA),
+ ],
+ )
+ );
+
+ let key_change_passphrase_cmd_def = CliCommand::new(&API_METHOD_KEY_CHANGE_PASSPHRASE)
.arg_param(vec!["path"])
.completion_cb("path", tools::complete_file_name);
- let key_create_master_key_cmd_def = CliCommand::new(
- ApiMethod::new(
- key_create_master_key,
- ObjectSchema::new("Create a new 4096 bit RSA master pub/priv key pair.")
- ));
-
- let key_import_master_pubkey_cmd_def = CliCommand::new(
- ApiMethod::new(
- key_import_master_pubkey,
- ObjectSchema::new("Import a new RSA public key and use it as master key. The key is expected to be in '.pem' format.")
- .required("path", StringSchema::new("File system path."))
- ))
+ const API_METHOD_KEY_CREATE_MASTER_KEY: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&key_create_master_key),
+ &ObjectSchema::new("Create a new 4096 bit RSA master pub/priv key pair.", &[])
+ );
+
+ let key_create_master_key_cmd_def = CliCommand::new(&API_METHOD_KEY_CREATE_MASTER_KEY);
+
+ const API_METHOD_KEY_IMPORT_MASTER_PUBKEY: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&key_import_master_pubkey),
+ &ObjectSchema::new(
+ "Import a new RSA public key and use it as master key. The key is expected to be in '.pem' format.",
+ &[ ("path", false, &StringSchema::new("File system path.").schema()) ],
+ )
+ );
+
+ let key_import_master_pubkey_cmd_def = CliCommand::new(&API_METHOD_KEY_IMPORT_MASTER_PUBKEY)
.arg_param(vec!["path"])
.completion_cb("path", tools::complete_file_name);
.insert("change-passphrase".to_owned(), key_change_passphrase_cmd_def.into())
}
-
fn mount(
param: Value,
_info: &ApiMethod,
fn main() {
- let backup_source_schema: Arc<Schema> = Arc::new(
- StringSchema::new("Backup source specification ([<label>:<path>]).")
- .format(Arc::new(ApiStringFormat::Pattern(&BACKUPSPEC_REGEX)))
- .into()
- );
+ const BACKUP_SOURCE_SCHEMA: Schema = StringSchema::new("Backup source specification ([<label>:<path>]).")
+ .format(&ApiStringFormat::Pattern(&BACKUPSPEC_REGEX))
+ .schema();
- let backup_cmd_def = CliCommand::new(
- ApiMethod::new(
- create_backup,
- ObjectSchema::new("Create (host) backup.")
- .required(
+ const API_METHOD_CREATE_BACKUP: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&create_backup),
+ &ObjectSchema::new(
+ "Create (host) backup.",
+ &[
+ (
"backupspec",
- ArraySchema::new(
+ false,
+ &ArraySchema::new(
"List of backup source specifications ([<label.ext>:<path>] ...)",
- backup_source_schema,
- ).min_length(1)
- )
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional(
+ &BACKUP_SOURCE_SCHEMA,
+ ).min_length(1).schema()
+ ),
+ (
+ "repository",
+ true,
+ &REPO_URL_SCHEMA
+ ),
+ (
"include-dev",
- ArraySchema::new(
+ true,
+ &ArraySchema::new(
"Include mountpoints with same st_dev number (see ``man fstat``) as specified files.",
- StringSchema::new("Path to file.").into()
- )
- )
- .optional(
+ &StringSchema::new("Path to file.").schema()
+ ).schema()
+ ),
+ (
"keyfile",
- StringSchema::new("Path to encryption key. All data will be encrypted using this key."))
- .optional(
+ true,
+ &StringSchema::new("Path to encryption key. All data will be encrypted using this key.").schema()
+ ),
+ (
"verbose",
- BooleanSchema::new("Verbose output.").default(false))
- .optional(
+ true,
+ &BooleanSchema::new("Verbose output.")
+ .default(false)
+ .schema()
+ ),
+ (
"skip-lost-and-found",
- BooleanSchema::new("Skip lost+found directory").default(false))
- .optional(
+ true,
+ &BooleanSchema::new("Skip lost+found directory")
+ .default(false)
+ .schema()
+ ),
+ (
"backup-type",
- BACKUP_TYPE_SCHEMA.clone()
- )
- .optional(
+ true,
+ &BACKUP_TYPE_SCHEMA,
+ ),
+ (
"backup-id",
- BACKUP_ID_SCHEMA.clone()
- )
- .optional(
+ true,
+ &BACKUP_ID_SCHEMA
+ ),
+ (
"backup-time",
- BACKUP_TIME_SCHEMA.clone()
- )
- .optional(
+ true,
+ &BACKUP_TIME_SCHEMA
+ ),
+ (
"chunk-size",
- IntegerSchema::new("Chunk size in KB. Must be a power of 2.")
+ true,
+ &IntegerSchema::new("Chunk size in KB. Must be a power of 2.")
.minimum(64)
.maximum(4096)
.default(4096)
- )
- ))
+ .schema()
+ ),
+ ],
+ )
+ );
+
+ let backup_cmd_def = CliCommand::new(&API_METHOD_CREATE_BACKUP)
.arg_param(vec!["backupspec"])
.completion_cb("repository", complete_repository)
.completion_cb("backupspec", complete_backup_source)
.completion_cb("keyfile", tools::complete_file_name)
.completion_cb("chunk-size", complete_chunk_size);
- let upload_log_cmd_def = CliCommand::new(
- ApiMethod::new(
- upload_log,
- ObjectSchema::new("Upload backup log file.")
- .required("snapshot", StringSchema::new("Snapshot path."))
- .required("logfile", StringSchema::new("The path to the log file you want to upload."))
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional(
+ const API_METHOD_UPLOAD_LOG: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&upload_log),
+ &ObjectSchema::new(
+ "Upload backup log file.",
+ &[
+ (
+ "snapshot",
+ false,
+ &StringSchema::new("Snapshot path.").schema()
+ ),
+ (
+ "logfile",
+ false,
+ &StringSchema::new("The path to the log file you want to upload.").schema()
+ ),
+ (
+ "repository",
+ true,
+ &REPO_URL_SCHEMA
+ ),
+ (
"keyfile",
- StringSchema::new("Path to encryption key. All data will be encrypted using this key."))
- ))
+ true,
+ &StringSchema::new("Path to encryption key. All data will be encrypted using this key.").schema()
+ ),
+ ],
+ )
+ );
+
+ let upload_log_cmd_def = CliCommand::new(&API_METHOD_UPLOAD_LOG)
.arg_param(vec!["snapshot", "logfile"])
.completion_cb("snapshot", complete_backup_snapshot)
.completion_cb("logfile", tools::complete_file_name)
.completion_cb("keyfile", tools::complete_file_name)
.completion_cb("repository", complete_repository);
- let list_cmd_def = CliCommand::new(
- ApiMethod::new(
- list_backup_groups,
- ObjectSchema::new("List backup groups.")
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional("output-format", OUTPUT_FORMAT.clone())
- ))
+ const API_METHOD_LIST_BACKUP_GROUPS: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&list_backup_groups),
+ &ObjectSchema::new(
+ "List backup groups.",
+ &[
+ ("repository", true, &REPO_URL_SCHEMA),
+ ("output-format", true, &OUTPUT_FORMAT),
+ ],
+ )
+ );
+
+ let list_cmd_def = CliCommand::new(&API_METHOD_LIST_BACKUP_GROUPS)
.completion_cb("repository", complete_repository);
- let snapshots_cmd_def = CliCommand::new(
- ApiMethod::new(
- list_snapshots,
- ObjectSchema::new("List backup snapshots.")
- .optional("group", StringSchema::new("Backup group."))
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional("output-format", OUTPUT_FORMAT.clone())
- ))
+ const API_METHOD_LIST_SNAPSHOTS: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&list_snapshots),
+ &ObjectSchema::new(
+ "List backup snapshots.",
+ &[
+ ("group", true, &StringSchema::new("Backup group.").schema()),
+ ("repository", true, &REPO_URL_SCHEMA),
+ ("output-format", true, &OUTPUT_FORMAT),
+ ],
+ )
+ );
+
+ let snapshots_cmd_def = CliCommand::new(&API_METHOD_LIST_SNAPSHOTS)
.arg_param(vec!["group"])
.completion_cb("group", complete_backup_group)
.completion_cb("repository", complete_repository);
- let forget_cmd_def = CliCommand::new(
- ApiMethod::new(
- forget_snapshots,
- ObjectSchema::new("Forget (remove) backup snapshots.")
- .required("snapshot", StringSchema::new("Snapshot path."))
- .optional("repository", REPO_URL_SCHEMA.clone())
- ))
+ const API_METHOD_FORGET_SNAPSHOTS: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&forget_snapshots),
+ &ObjectSchema::new(
+ "Forget (remove) backup snapshots.",
+ &[
+ ("snapshot", false, &StringSchema::new("Snapshot path.").schema()),
+ ("repository", true, &REPO_URL_SCHEMA),
+ ],
+ )
+ );
+
+ let forget_cmd_def = CliCommand::new(&API_METHOD_FORGET_SNAPSHOTS)
.arg_param(vec!["snapshot"])
.completion_cb("repository", complete_repository)
.completion_cb("snapshot", complete_backup_snapshot);
- let garbage_collect_cmd_def = CliCommand::new(
- ApiMethod::new(
- start_garbage_collection,
- ObjectSchema::new("Start garbage collection for a specific repository.")
- .optional("repository", REPO_URL_SCHEMA.clone())
- ))
+ const API_METHOD_START_GARBAGE_COLLECTION: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&start_garbage_collection),
+ &ObjectSchema::new(
+ "Start garbage collection for a specific repository.",
+ &[ ("repository", true, &REPO_URL_SCHEMA) ],
+ )
+ );
+
+ let garbage_collect_cmd_def = CliCommand::new(&API_METHOD_START_GARBAGE_COLLECTION)
.completion_cb("repository", complete_repository);
- let restore_cmd_def = CliCommand::new(
- ApiMethod::new(
- restore,
- ObjectSchema::new("Restore backup repository.")
- .required("snapshot", StringSchema::new("Group/Snapshot path."))
- .required("archive-name", StringSchema::new("Backup archive name."))
- .required("target", StringSchema::new(r###"Target directory path. Use '-' to write to stdandard output.
+ const API_METHOD_RESTORE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&restore),
+ &ObjectSchema::new(
+ "Restore backup repository.",
+ &[
+ ("snapshot", false, &StringSchema::new("Group/Snapshot path.").schema()),
+ ("archive-name", false, &StringSchema::new("Backup archive name.").schema()),
+ (
+ "target",
+ false,
+ &StringSchema::new(
+ r###"Target directory path. Use '-' to write to stdandard output.
We do not extraxt '.pxar' archives when writing to stdandard output.
"###
- ))
- .optional(
+ ).schema()
+ ),
+ (
"allow-existing-dirs",
- BooleanSchema::new("Do not fail if directories already exists.").default(false))
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional("keyfile", StringSchema::new("Path to encryption key."))
- .optional(
+ true,
+ &BooleanSchema::new("Do not fail if directories already exists.")
+ .default(false)
+ .schema()
+ ),
+ ("repository", true, &REPO_URL_SCHEMA),
+ ("keyfile", true, &StringSchema::new("Path to encryption key.").schema()),
+ (
"verbose",
- BooleanSchema::new("Verbose output.").default(false)
- )
- ))
+ true,
+ &BooleanSchema::new("Verbose output.")
+ .default(false)
+ .schema()
+ ),
+ ],
+ )
+ );
+
+ let restore_cmd_def = CliCommand::new(&API_METHOD_RESTORE)
.arg_param(vec!["snapshot", "archive-name", "target"])
.completion_cb("repository", complete_repository)
.completion_cb("snapshot", complete_group_or_snapshot)
.completion_cb("archive-name", complete_archive_name)
.completion_cb("target", tools::complete_file_name);
- let files_cmd_def = CliCommand::new(
- ApiMethod::new(
- list_snapshot_files,
- ObjectSchema::new("List snapshot files.")
- .required("snapshot", StringSchema::new("Snapshot path."))
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional("output-format", OUTPUT_FORMAT.clone())
- ))
+ const API_METHOD_LIST_SNAPSHOT_FILES: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&list_snapshot_files),
+ &ObjectSchema::new(
+ "List snapshot files.",
+ &[
+ ("snapshot", false, &StringSchema::new("Snapshot path.").schema()),
+ ("repository", true, &REPO_URL_SCHEMA),
+ ("output-format", true, &OUTPUT_FORMAT),
+ ],
+ )
+ );
+
+ let files_cmd_def = CliCommand::new(&API_METHOD_LIST_SNAPSHOT_FILES)
.arg_param(vec!["snapshot"])
.completion_cb("repository", complete_repository)
.completion_cb("snapshot", complete_backup_snapshot);
- let catalog_cmd_def = CliCommand::new(
- ApiMethod::new(
- dump_catalog,
- ObjectSchema::new("Dump catalog.")
- .required("snapshot", StringSchema::new("Snapshot path."))
- .optional("repository", REPO_URL_SCHEMA.clone())
- ))
+ const API_METHOD_DUMP_CATALOG: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&dump_catalog),
+ &ObjectSchema::new(
+ "Dump catalog.",
+ &[
+ ("snapshot", false, &StringSchema::new("Snapshot path.").schema()),
+ ("repository", true, &REPO_URL_SCHEMA),
+ ],
+ )
+ );
+
+ let catalog_cmd_def = CliCommand::new(&API_METHOD_DUMP_CATALOG)
.arg_param(vec!["snapshot"])
.completion_cb("repository", complete_repository)
.completion_cb("snapshot", complete_backup_snapshot);
- let prune_cmd_def = CliCommand::new(
- ApiMethod::new(
- prune,
- proxmox_backup::api2::admin::datastore::add_common_prune_prameters(
- ObjectSchema::new("Prune backup repository.")
- .required("group", StringSchema::new("Backup group."))
- .optional("repository", REPO_URL_SCHEMA.clone())
+ const API_METHOD_PRUNE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&prune),
+ &ObjectSchema::new(
+ "Prune backup repository.",
+ &proxmox_backup::add_common_prune_prameters!(
+ ("group", false, &StringSchema::new("Backup group.").schema()),
+ ("repository", true, &REPO_URL_SCHEMA),
)
- ))
+ )
+ );
+
+ let prune_cmd_def = CliCommand::new(&API_METHOD_PRUNE)
.arg_param(vec!["group"])
.completion_cb("group", complete_backup_group)
.completion_cb("repository", complete_repository);
- let status_cmd_def = CliCommand::new(
- ApiMethod::new(
- status,
- ObjectSchema::new("Get repository status.")
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional("output-format", OUTPUT_FORMAT.clone())
- ))
+ const API_METHOD_STATUS: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&status),
+ &ObjectSchema::new(
+ "Get repository status.",
+ &[
+ ("repository", true, &REPO_URL_SCHEMA),
+ ("output-format", true, &OUTPUT_FORMAT),
+ ],
+ )
+ );
+
+ let status_cmd_def = CliCommand::new(&API_METHOD_STATUS)
.completion_cb("repository", complete_repository);
- let login_cmd_def = CliCommand::new(
- ApiMethod::new(
- api_login,
- ObjectSchema::new("Try to login. If successful, store ticket.")
- .optional("repository", REPO_URL_SCHEMA.clone())
- ))
+ const API_METHOD_API_LOGIN: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&api_login),
+ &ObjectSchema::new(
+ "Try to login. If successful, store ticket.",
+ &[ ("repository", true, &REPO_URL_SCHEMA) ],
+ )
+ );
+
+ let login_cmd_def = CliCommand::new(&API_METHOD_API_LOGIN)
.completion_cb("repository", complete_repository);
- let logout_cmd_def = CliCommand::new(
- ApiMethod::new(
- api_logout,
- ObjectSchema::new("Logout (delete stored ticket).")
- .optional("repository", REPO_URL_SCHEMA.clone())
- ))
+ const API_METHOD_API_LOGOUT: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&api_logout),
+ &ObjectSchema::new(
+ "Logout (delete stored ticket).",
+ &[ ("repository", true, &REPO_URL_SCHEMA) ],
+ )
+ );
+
+ let logout_cmd_def = CliCommand::new(&API_METHOD_API_LOGOUT)
.completion_cb("repository", complete_repository);
- let mount_cmd_def = CliCommand::new(
- ApiMethod::new(
- mount,
- ObjectSchema::new("Mount pxar archive.")
- .required("snapshot", StringSchema::new("Group/Snapshot path."))
- .required("archive-name", StringSchema::new("Backup archive name."))
- .required("target", StringSchema::new("Target directory path."))
- .optional("repository", REPO_URL_SCHEMA.clone())
- .optional("keyfile", StringSchema::new("Path to encryption key."))
- .optional("verbose", BooleanSchema::new("Verbose output.").default(false))
- ))
+ const API_METHOD_MOUNT: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&mount),
+ &ObjectSchema::new(
+ "Mount pxar archive.",
+ &[
+ ("snapshot", false, &StringSchema::new("Group/Snapshot path.").schema()),
+ ("archive-name", false, &StringSchema::new("Backup archive name.").schema()),
+ ("target", false, &StringSchema::new("Target directory path.").schema()),
+ ("repository", true, &REPO_URL_SCHEMA),
+ ("keyfile", true, &StringSchema::new("Path to encryption key.").schema()),
+ ("verbose", true, &BooleanSchema::new("Verbose output.").default(false).schema()),
+ ],
+ )
+ );
+
+ let mount_cmd_def = CliCommand::new(&API_METHOD_MOUNT)
.arg_param(vec!["snapshot", "archive-name", "target"])
.completion_cb("repository", complete_repository)
.completion_cb("snapshot", complete_group_or_snapshot)
use proxmox_backup::api2;
let cmd_def = CliCommandMap::new()
- .insert("list", CliCommand::new(api2::config::datastore::get()).into())
+ .insert("list", CliCommand::new(&api2::config::datastore::GET).into())
.insert("create",
- CliCommand::new(api2::config::datastore::post())
+ CliCommand::new(&api2::config::datastore::POST)
.arg_param(vec!["name", "path"])
.into())
.insert("remove",
- CliCommand::new(api2::config::datastore::delete())
+ CliCommand::new(&api2::config::datastore::DELETE)
.arg_param(vec!["name"])
.completion_cb("name", config::datastore::complete_datastore_name)
.into());
let cmd_def = CliCommandMap::new()
.insert("status",
- CliCommand::new(api2::admin::datastore::api_method_garbage_collection_status())
+ CliCommand::new(&api2::admin::datastore::API_METHOD_GARBAGE_COLLECTION_STATUS)
.arg_param(vec!["store"])
.completion_cb("store", config::datastore::complete_datastore_name)
.into())
.insert("start",
- CliCommand::new(api2::admin::datastore::api_method_start_garbage_collection())
+ CliCommand::new(&api2::admin::datastore::API_METHOD_START_GARBAGE_COLLECTION)
.arg_param(vec!["store"])
.completion_cb("store", config::datastore::complete_datastore_name)
.into());
use failure::*;
use proxmox::tools::try_block;
-use lazy_static::lazy_static;
use futures::*;
let _ = public_auth_key(); // load with lazy_static
let _ = csrf_secret(); // load with lazy_static
- lazy_static!{
- static ref ROUTER: Router = proxmox_backup::api2::router();
- }
-
let mut config = ApiConfig::new(
- buildcfg::JS_DIR, &ROUTER, RpcEnvironmentType::PUBLIC);
+ buildcfg::JS_DIR, &proxmox_backup::api2::ROUTER, RpcEnvironmentType::PUBLIC);
// add default dirs which includes jquery and bootstrap
// my $base = '/usr/share/libpve-http-server-perl';
use std::path::{Path, PathBuf};
use std::fs::OpenOptions;
use std::ffi::OsStr;
-use std::sync::Arc;
use std::os::unix::fs::OpenOptionsExt;
use std::os::unix::io::AsRawFd;
use std::collections::HashSet;
Ok(Value::Null)
}
+const API_METHOD_CREATE_ARCHIVE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&create_archive),
+ &ObjectSchema::new(
+ "Create new .pxar archive.",
+ &[
+ (
+ "archive",
+ false,
+ &StringSchema::new("Archive name").schema()
+ ),
+ (
+ "source",
+ false,
+ &StringSchema::new("Source directory.").schema()
+ ),
+ (
+ "verbose",
+ true,
+ &BooleanSchema::new("Verbose output.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-xattrs",
+ true,
+ &BooleanSchema::new("Ignore extended file attributes.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-fcaps",
+ true,
+ &BooleanSchema::new("Ignore file capabilities.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-acls",
+ true,
+ &BooleanSchema::new("Ignore access control list entries.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "all-file-systems",
+ true,
+ &BooleanSchema::new("Include mounted sudirs.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-device-nodes",
+ true,
+ &BooleanSchema::new("Ignore device nodes.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-fifos",
+ true,
+ &BooleanSchema::new("Ignore fifos.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-sockets",
+ true,
+ &BooleanSchema::new("Ignore sockets.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "exclude",
+ true,
+ &ArraySchema::new(
+ "List of paths or pattern matching files to exclude.",
+ &StringSchema::new("Path or pattern matching files to restore.").schema()
+ ).schema()
+ ),
+ ],
+ )
+);
+
+const API_METHOD_EXTRACT_ARCHIVE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&extract_archive),
+ &ObjectSchema::new(
+ "Extract an archive.",
+ &[
+ (
+ "archive",
+ false,
+ &StringSchema::new("Archive name.").schema()
+ ),
+ (
+ "pattern",
+ true,
+ &ArraySchema::new(
+ "List of paths or pattern matching files to restore",
+ &StringSchema::new("Path or pattern matching files to restore.").schema()
+ ).schema()
+ ),
+ (
+ "target",
+ true,
+ &StringSchema::new("Target directory.").schema()
+ ),
+ (
+ "verbose",
+ true,
+ &BooleanSchema::new("Verbose output.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-xattrs",
+ true,
+ &BooleanSchema::new("Ignore extended file attributes.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-fcaps",
+ true,
+ &BooleanSchema::new("Ignore file capabilities.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-acls",
+ true,
+ &BooleanSchema::new("Ignore access control list entries.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "allow-existing-dirs",
+ true,
+ &BooleanSchema::new("Allows directories to already exist on restore.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "files-from",
+ true,
+ &StringSchema::new("Match pattern for files to restore.").schema()
+ ),
+ (
+ "no-device-nodes",
+ true,
+ &BooleanSchema::new("Ignore device nodes.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-fifos",
+ true,
+ &BooleanSchema::new("Ignore fifos.")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-sockets",
+ true,
+ &BooleanSchema::new("Ignore sockets.")
+ .default(false)
+ .schema()
+ ),
+ ],
+ )
+);
+
+const API_METHOD_MOUNT_ARCHIVE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&mount_archive),
+ &ObjectSchema::new(
+ "Mount the archive as filesystem via FUSE.",
+ &[
+ (
+ "archive",
+ false,
+ &StringSchema::new("Archive name.").schema()
+ ),
+ (
+ "mountpoint",
+ false,
+ &StringSchema::new("Mountpoint for the filesystem root.").schema()
+ ),
+ (
+ "verbose",
+ true,
+ &BooleanSchema::new("Verbose output, keeps process running in foreground (for debugging).")
+ .default(false)
+ .schema()
+ ),
+ (
+ "no-mt",
+ true,
+ &BooleanSchema::new("Run in single threaded mode (for debugging).")
+ .default(false)
+ .schema()
+ ),
+ ],
+ )
+);
+
+const API_METHOD_DUMP_ARCHIVE: ApiMethod = ApiMethod::new(
+ &ApiHandler::Sync(&dump_archive),
+ &ObjectSchema::new(
+ "List the contents of an archive.",
+ &[
+ ( "archive", false, &StringSchema::new("Archive name.").schema()),
+ ( "verbose", true, &BooleanSchema::new("Verbose output.")
+ .default(false)
+ .schema()
+ ),
+ ]
+ )
+);
+
fn main() {
let cmd_def = CliCommandMap::new()
- .insert("create", CliCommand::new(
- ApiMethod::new(
- create_archive,
- ObjectSchema::new("Create new .pxar archive.")
- .required("archive", StringSchema::new("Archive name"))
- .required("source", StringSchema::new("Source directory."))
- .optional("verbose", BooleanSchema::new("Verbose output.").default(false))
- .optional("no-xattrs", BooleanSchema::new("Ignore extended file attributes.").default(false))
- .optional("no-fcaps", BooleanSchema::new("Ignore file capabilities.").default(false))
- .optional("no-acls", BooleanSchema::new("Ignore access control list entries.").default(false))
- .optional("all-file-systems", BooleanSchema::new("Include mounted sudirs.").default(false))
- .optional("no-device-nodes", BooleanSchema::new("Ignore device nodes.").default(false))
- .optional("no-fifos", BooleanSchema::new("Ignore fifos.").default(false))
- .optional("no-sockets", BooleanSchema::new("Ignore sockets.").default(false))
- .optional("exclude", Arc::new(
- ArraySchema::new(
- "List of paths or pattern matching files to exclude.",
- Arc::new(StringSchema::new("Path or pattern matching files to restore.").into())
- ).into()
- ))
- ))
+ .insert("create", CliCommand::new(&API_METHOD_CREATE_ARCHIVE)
.arg_param(vec!["archive", "source", "exclude"])
.completion_cb("archive", tools::complete_file_name)
.completion_cb("source", tools::complete_file_name)
.into()
)
- .insert("extract", CliCommand::new(
- ApiMethod::new(
- extract_archive,
- ObjectSchema::new("Extract an archive.")
- .required("archive", StringSchema::new("Archive name."))
- .optional("pattern", Arc::new(
- ArraySchema::new(
- "List of paths or pattern matching files to restore",
- Arc::new(StringSchema::new("Path or pattern matching files to restore.").into())
- ).into()
- ))
- .optional("target", StringSchema::new("Target directory."))
- .optional("verbose", BooleanSchema::new("Verbose output.").default(false))
- .optional("no-xattrs", BooleanSchema::new("Ignore extended file attributes.").default(false))
- .optional("no-fcaps", BooleanSchema::new("Ignore file capabilities.").default(false))
- .optional("no-acls", BooleanSchema::new("Ignore access control list entries.").default(false))
- .optional("allow-existing-dirs", BooleanSchema::new("Allows directories to already exist on restore.").default(false))
- .optional("files-from", StringSchema::new("Match pattern for files to restore."))
- .optional("no-device-nodes", BooleanSchema::new("Ignore device nodes.").default(false))
- .optional("no-fifos", BooleanSchema::new("Ignore fifos.").default(false))
- .optional("no-sockets", BooleanSchema::new("Ignore sockets.").default(false))
- ))
+ .insert("extract", CliCommand::new(&API_METHOD_EXTRACT_ARCHIVE)
.arg_param(vec!["archive", "pattern"])
.completion_cb("archive", tools::complete_file_name)
.completion_cb("target", tools::complete_file_name)
.completion_cb("files-from", tools::complete_file_name)
.into()
)
- .insert("mount", CliCommand::new(
- ApiMethod::new(
- mount_archive,
- ObjectSchema::new("Mount the archive as filesystem via FUSE.")
- .required("archive", StringSchema::new("Archive name."))
- .required("mountpoint", StringSchema::new("Mountpoint for the filesystem root."))
- .optional("verbose", BooleanSchema::new("Verbose output, keeps process running in foreground (for debugging).").default(false))
- .optional("no-mt", BooleanSchema::new("Run in single threaded mode (for debugging).").default(false))
- ))
+ .insert("mount", CliCommand::new(&API_METHOD_MOUNT_ARCHIVE)
.arg_param(vec!["archive", "mountpoint"])
.completion_cb("archive", tools::complete_file_name)
.completion_cb("mountpoint", tools::complete_file_name)
.into()
)
- .insert("list", CliCommand::new(
- ApiMethod::new(
- dump_archive,
- ObjectSchema::new("List the contents of an archive.")
- .required("archive", StringSchema::new("Archive name."))
- .optional("verbose", BooleanSchema::new("Verbose output.").default(false))
- ))
+ .insert("list", CliCommand::new(&API_METHOD_DUMP_ARCHIVE)
.arg_param(vec!["archive"])
.completion_cb("archive", tools::complete_file_name)
.into()
use failure::*;
-use lazy_static::lazy_static;
-use std::sync::Arc;
use serde_json::Value;
use std::collections::{HashMap, HashSet};
use crate::api_schema::*;
+use crate::api_schema::api_handler::*;
use crate::api_schema::router::*;
use crate::api_schema::format::*;
//use crate::api_schema::config::*;
use super::getopts;
-lazy_static!{
-
- pub static ref OUTPUT_FORMAT: Arc<Schema> =
- StringSchema::new("Output format.")
- .format(Arc::new(ApiStringFormat::Enum(&["text", "json", "json-pretty"])))
- .into();
-
-}
+pub const OUTPUT_FORMAT: Schema =
+ StringSchema::new("Output format.")
+ .format(&ApiStringFormat::Enum(&["text", "json", "json-pretty"]))
+ .schema();
/// Helper function to format and print result
///
let arg_param = &cli_cmd.arg_param;
let fixed_param = &cli_cmd.fixed_param;
- let properties = &cli_cmd.info.parameters.properties;
- let description = &cli_cmd.info.parameters.description;
-
+ let schema = cli_cmd.info.parameters;
+
let mut done_hash = HashSet::<&str>::new();
let mut args = String::new();
for positional_arg in arg_param {
- match properties.get(positional_arg) {
- Some((optional, schema)) => {
+ match schema.lookup(positional_arg) {
+ Some((optional, param_schema)) => {
args.push(' ');
- let is_array = if let Schema::Array(_) = schema.as_ref() { true } else { false };
- if *optional { args.push('['); }
+ let is_array = if let Schema::Array(_) = param_schema { true } else { false };
+ if optional { args.push('['); }
if is_array { args.push('{'); }
args.push('<'); args.push_str(positional_arg); args.push('>');
if is_array { args.push('}'); }
- if *optional { args.push(']'); }
+ if optional { args.push(']'); }
done_hash.insert(positional_arg);
}
let mut arg_descr = String::new();
for positional_arg in arg_param {
- let (_optional, schema) = properties.get(positional_arg).unwrap();
+ let (_optional, param_schema) = schema.lookup(positional_arg).unwrap();
let param_descr = get_property_description(
- positional_arg, &schema, ParameterDisplayStyle::Fixed, format);
+ positional_arg, param_schema, ParameterDisplayStyle::Fixed, format);
arg_descr.push_str(¶m_descr);
}
let mut options = String::new();
- let mut prop_names: Vec<&str> = properties.keys().map(|v| *v).collect();
- prop_names.sort();
-
- for prop in prop_names {
- let (optional, schema) = properties.get(prop).unwrap();
+ for (prop, optional, param_schema) in schema.properties {
if done_hash.contains(prop) { continue; }
- if fixed_param.contains_key(&prop) { continue; }
+ if fixed_param.contains_key(prop) { continue; }
- let type_text = get_schema_type_text(&schema, ParameterDisplayStyle::Arg);
+ let type_text = get_schema_type_text(param_schema, ParameterDisplayStyle::Arg);
if *optional {
if options.len() > 0 { options.push('\n'); }
- options.push_str(&get_property_description(prop, &schema, ParameterDisplayStyle::Arg, format));
+ options.push_str(&get_property_description(prop, param_schema, ParameterDisplayStyle::Arg, format));
} else {
args.push_str(" --"); args.push_str(prop);
format!("{}{}{}{}\n\n", indent, prefix, args, option_indicator)
}
DocumentationFormat::Full => {
- format!("{}{}{}{}\n\n{}\n\n", indent, prefix, args, option_indicator, description)
+ format!("{}{}{}{}\n\n{}\n\n", indent, prefix, args, option_indicator, schema.description)
}
DocumentationFormat::ReST => {
- format!("``{}{}{}``\n\n{}\n\n", prefix, args, option_indicator, description)
+ format!("``{}{}{}``\n\n{}\n\n", prefix, args, option_indicator, schema.description)
}
};
eprint!("Error: {}\nUsage: {}", err, usage);
}
-fn print_help(
+pub fn print_help(
top_def: &CommandLineInterface,
mut prefix: String,
args: &Vec<String>,
}
fn handle_simple_command(
- top_def: &CommandLineInterface,
+ _top_def: &CommandLineInterface,
prefix: &str,
cli_cmd: &CliCommand,
args: Vec<String>,
}
};
- if cli_cmd.info.handler.is_none() {
- let prefix = prefix.split(' ').next().unwrap().to_string();
- print_help(top_def, prefix, &rest, params["verbose"].as_bool());
- return;
- }
-
if !rest.is_empty() {
let err = format_err!("got additional arguments: {:?}", rest);
print_simple_usage_error(prefix, cli_cmd, err);
let mut rpcenv = CliEnvironment::new();
- match (cli_cmd.info.handler.as_ref().unwrap())(params, &cli_cmd.info, &mut rpcenv) {
- Ok(value) => {
- if value != Value::Null {
- println!("Result: {}", serde_json::to_string_pretty(&value).unwrap());
+ match cli_cmd.info.handler {
+ ApiHandler::Sync(handler) => {
+ match (handler)(params, &cli_cmd.info, &mut rpcenv) {
+ Ok(value) => {
+ if value != Value::Null {
+ println!("Result: {}", serde_json::to_string_pretty(&value).unwrap());
+ }
+ }
+ Err(err) => {
+ eprintln!("Error: {}", err);
+ std::process::exit(-1);
+ }
}
}
- Err(err) => {
- eprintln!("Error: {}", err);
- std::process::exit(-1);
+ ApiHandler::Async(_) => {
+ //fixme
+ unimplemented!();
}
- }
+ }
}
fn find_command<'a>(def: &'a CliCommandMap, name: &str) -> Option<&'a CommandLineInterface> {
}
if let Schema::String(StringSchema { format: Some(format), ..} ) = schema {
- if let ApiStringFormat::Enum(list) = *format.as_ref() {
- for value in list {
+ if let ApiStringFormat::Enum(list) = format {
+ for value in list.iter() {
if value.starts_with(arg) {
println!("{}", value);
}
fn record_done_argument(done: &mut HashMap<String, String>, parameters: &ObjectSchema, key: &str, value: &str) {
- if let Some((_, schema)) = parameters.properties.get::<str>(key) {
- match schema.as_ref() {
+ if let Some((_, schema)) = parameters.lookup(key) {
+ match schema {
Schema::Array(_) => { /* do nothing ?? */ }
_ => { done.insert(key.to_owned(), value.to_owned()); }
}
if !arg_param.is_empty() {
let prop_name = arg_param[0];
if args.len() > 1 {
- record_done_argument(done, &cli_cmd.info.parameters, prop_name, &args[0]);
+ record_done_argument(done, cli_cmd.info.parameters, prop_name, &args[0]);
print_simple_completion(cli_cmd, done, arg_param, &arg_param[1..], &args[1..]);
return;
} else if args.len() == 1 {
- record_done_argument(done, &cli_cmd.info.parameters, prop_name, &args[0]);
- if let Some((_, schema)) = cli_cmd.info.parameters.properties.get(prop_name) {
+ record_done_argument(done, cli_cmd.info.parameters, prop_name, &args[0]);
+ if let Some((_, schema)) = cli_cmd.info.parameters.lookup(prop_name) {
print_property_completion(schema, prop_name, &cli_cmd.completion_functions, &args[0], done);
}
}
let last = &args[args.len()-2];
if last.starts_with("--") && last.len() > 2 {
let prop_name = &last[2..];
- if let Some((_, schema)) = cli_cmd.info.parameters.properties.get(prop_name) {
+ if let Some((_, schema)) = cli_cmd.info.parameters.lookup(prop_name) {
print_property_completion(schema, prop_name, &cli_cmd.completion_functions, &prefix, done);
}
return;
}
}
- for (name, (_optional, _schema)) in &cli_cmd.info.parameters.properties {
+ for (name, _optional, _schema) in cli_cmd.info.parameters.properties {
if done.contains_key(*name) { continue; }
if all_arg_param.contains(name) { continue; }
let option = String::from("--") + name;
}
}
+const VERBOSE_HELP_SCHEMA: Schema = BooleanSchema::new("Verbose help.").schema();
+const COMMAND_HELP: ObjectSchema = ObjectSchema::new(
+ "Get help about specified command.",
+ &[ ("verbose", true, &VERBOSE_HELP_SCHEMA) ]
+);
+
+const API_METHOD_COMMAND_HELP: ApiMethod = ApiMethod::new_dummy(&COMMAND_HELP);
+
fn help_command_def() -> CliCommand {
- CliCommand::new(
- ApiMethod::new_dummy(
- ObjectSchema::new("Get help about specified command.")
- .optional("verbose", BooleanSchema::new("Verbose help."))
- )
- )
+ CliCommand::new(&API_METHOD_COMMAND_HELP)
}
pub fn run_cli_command(def: CommandLineInterface) {
pub type CompletionFunction = fn(&str, &HashMap<String, String>) -> Vec<String>;
pub struct CliCommand {
- pub info: ApiMethod,
+ pub info: &'static ApiMethod,
pub arg_param: Vec<&'static str>,
pub fixed_param: HashMap<&'static str, String>,
pub completion_functions: HashMap<String, CompletionFunction>,
impl CliCommand {
- pub fn new(info: ApiMethod) -> Self {
+ pub fn new(info: &'static ApiMethod) -> Self {
Self {
info, arg_param: vec![],
fixed_param: HashMap::new(),
let mut pos = 0;
- let properties = &schema.properties;
-
while pos < args.len() {
match parse_argument(args[pos].as_ref()) {
RawArgument::Separator => {
None => {
let mut want_bool = false;
let mut can_default = false;
- if let Some((_optional, param_schema)) = properties.get::<str>(&name) {
- if let Schema::Boolean(boolean_schema) = param_schema.as_ref() {
+ if let Some((_optional, param_schema)) = schema.lookup(&name) {
+ if let Schema::Boolean(boolean_schema) = param_schema {
want_bool = true;
if let Some(default) = boolean_schema.default {
if default == false {
) -> Result<(Value, Vec<String>), ParameterError> {
let mut errors = ParameterError::new();
- let properties = &schema.properties;
-
// first check if all arg_param exists in schema
let mut last_arg_param_is_optional = false;
for i in 0..arg_param.len() {
let name = arg_param[i];
- if let Some((optional, param_schema)) = properties.get::<str>(&name) {
+ if let Some((optional, param_schema)) = schema.lookup(&name) {
if i == arg_param.len() -1 {
- last_arg_param_is_optional = *optional;
- if let Schema::Array(_) = param_schema.as_ref() {
+ last_arg_param_is_optional = optional;
+ if let Schema::Array(_) = param_schema {
last_arg_param_is_array = true;
}
- } else if *optional {
+ } else if optional {
panic!("positional argument '{}' may not be optional", name);
}
} else {
#[test]
fn test_boolean_arg() {
- let schema = ObjectSchema::new("Parameters:")
- .required(
- "enable", BooleanSchema::new("Enable")
- );
+
+ const PARAMETERS: ObjectSchema = ObjectSchema::new(
+ "Parameters:",
+ &[ ("enable", false, &BooleanSchema::new("Enable").schema()) ],
+ );
let mut variants: Vec<(Vec<&str>, bool)> = vec![];
variants.push((vec!["-enable"], true));
variants.push((vec!["--enable", "false"], false));
for (args, expect) in variants {
- let res = parse_arguments(&args, &vec![], &schema);
+ let res = parse_arguments(&args, &vec![], &PARAMETERS);
assert!(res.is_ok());
if let Ok((options, rest)) = res {
assert!(options["enable"] == expect);
#[test]
fn test_argument_paramenter() {
- let schema = ObjectSchema::new("Parameters:")
- .required("enable", BooleanSchema::new("Enable."))
- .required("storage", StringSchema::new("Storage."));
+
+ const PARAMETERS: ObjectSchema = ObjectSchema::new(
+ "Parameters:",
+ &[
+ ("enable", false, &BooleanSchema::new("Enable.").schema()),
+ ("storage", false, &StringSchema::new("Storage.").schema()),
+ ],
+ );
let args = vec!["-enable", "local"];
- let res = parse_arguments(&args, &vec!["storage"], &schema);
+ let res = parse_arguments(&args, &vec!["storage"], &PARAMETERS);
assert!(res.is_ok());
if let Ok((options, rest)) = res {
assert!(options["enable"] == true);
use crate::api_schema::*;
-use std::sync::Arc;
-use lazy_static::lazy_static;
-use regex::Regex;
use std::fmt;
-lazy_static! {
+const_regex! {
/// Regular expression to parse repository URLs
- pub static ref BACKUP_REPO_URL_REGEX: Regex =
- Regex::new(r"^(?:(?:([\w@]+)@)?([\w\-_.]+):)?(\w+)$").unwrap();
-
- /// API schema format definition for repository URLs
- pub static ref BACKUP_REPO_URL: Arc<ApiStringFormat> =
- ApiStringFormat::Pattern(&BACKUP_REPO_URL_REGEX).into();
+ pub BACKUP_REPO_URL_REGEX = r"^(?:(?:([\w@]+)@)?([\w\-_.]+):)?(\w+)$";
}
+/// API schema format definition for repository URLs
+pub const BACKUP_REPO_URL: ApiStringFormat = ApiStringFormat::Pattern(&BACKUP_REPO_URL_REGEX);
+
/// Reference remote backup locations
///
/// host, and `user` defaults to `root@pam`.
fn from_str(url: &str) -> Result<Self, Self::Err> {
- let cap = BACKUP_REPO_URL_REGEX.captures(url)
+ let cap = (BACKUP_REPO_URL_REGEX.regex_obj)().captures(url)
.ok_or_else(|| format_err!("unable to parse repository url '{}'", url))?;
Ok(Self {
use proxmox::tools::{fs::file_set_contents, try_block};
-use crate::api_schema::{ObjectSchema, StringSchema};
+use crate::api_schema::{Schema, ObjectSchema, StringSchema};
use crate::section_config::{SectionConfig, SectionConfigData, SectionConfigPlugin};
lazy_static! {
static ref CONFIG: SectionConfig = init();
}
-fn init() -> SectionConfig {
- let plugin = SectionConfigPlugin::new(
- "datastore".to_string(),
- ObjectSchema::new("DataStore properties")
- .required("path", StringSchema::new("Directory name")),
- );
-
- let id_schema = StringSchema::new("DataStore ID schema.")
- .min_length(3)
- .into();
+const DIR_NAME_SCHEMA: Schema = StringSchema::new("Directory name").schema();
+const DATASTORE_ID_SCHEMA: Schema = StringSchema::new("DataStore ID schema.")
+ .min_length(3)
+ .schema();
+const DATASTORE_PROPERTIES: ObjectSchema = ObjectSchema::new(
+ "DataStore properties",
+ &[
+ ("path", false, &DIR_NAME_SCHEMA)
+ ]
+);
- let mut config = SectionConfig::new(id_schema);
+fn init() -> SectionConfig {
+ let plugin = SectionConfigPlugin::new("datastore".to_string(), &DATASTORE_PROPERTIES);
+ let mut config = SectionConfig::new(&DATASTORE_ID_SCHEMA);
config.register_plugin(plugin);
config
use serde_json::{json, Value};
-use std::sync::Arc;
-
use proxmox::tools::try_block;
use crate::api_schema::*;
pub struct SectionConfigPlugin {
type_name: String,
- properties: ObjectSchema,
+ properties: &'static ObjectSchema,
}
impl SectionConfigPlugin {
- pub fn new(type_name: String, properties: ObjectSchema) -> Self {
+ pub fn new(type_name: String, properties: &'static ObjectSchema) -> Self {
Self { type_name, properties }
}
pub struct SectionConfig {
plugins: HashMap<String, SectionConfigPlugin>,
- id_schema: Arc<Schema>,
+ id_schema: &'static Schema,
parse_section_header: fn(&str) -> Option<(String, String)>,
parse_section_content: fn(&str) -> Option<(String, String)>,
format_section_header: fn(type_name: &str, section_id: &str, data: &Value) -> String,
impl SectionConfig {
- pub fn new(id_schema: Arc<Schema>) -> Self {
+ pub fn new(id_schema: &'static Schema) -> Self {
Self {
plugins: HashMap::new(),
id_schema,
let mut state = ParseState::BeforeHeader;
let test_required_properties = |value: &Value, schema: &ObjectSchema| -> Result<(), Error> {
- for (name, (optional, _prop_schema)) in &schema.properties {
+ for (name, optional, _prop_schema) in schema.properties {
if *optional == false && value[name] == Value::Null {
return Err(format_err!("property '{}' is missing and it is not optional.", name));
}
if let Some((key, value)) = (self.parse_section_content)(line) {
//println!("CONTENT: key: {} value: {}", key, value);
- if let Some((_optional, prop_schema)) = plugin.properties.properties.get::<str>(&key) {
+ if let Some((_optional, prop_schema)) = plugin.properties.lookup(&key) {
match parse_simple_value(&value, prop_schema) {
Ok(value) => {
if config[&key] == Value::Null {
//let mut contents = String::new();
//file.read_to_string(&mut contents).unwrap();
- let plugin = SectionConfigPlugin::new(
- "lvmthin".to_string(),
- ObjectSchema::new("lvmthin properties")
- .required("thinpool", StringSchema::new("LVM thin pool name."))
- .required("vgname", StringSchema::new("LVM volume group name."))
- .optional("content", StringSchema::new("Storage content types."))
+ const PROPERTIES: ObjectSchema = ObjectSchema::new(
+ "lvmthin properties",
+ &[
+ ("content", true, &StringSchema::new("Storage content types.").schema()),
+ ("thinpool", false, &StringSchema::new("LVM thin pool name.").schema()),
+ ("vgname", false, &StringSchema::new("LVM volume group name.").schema()),
+ ],
);
- let id_schema = StringSchema::new("Storage ID schema.")
+ let plugin = SectionConfigPlugin::new("lvmthin".to_string(), &PROPERTIES);
+
+ const ID_SCHEMA: Schema = StringSchema::new("Storage ID schema.")
.min_length(3)
- .into();
+ .schema();
- let mut config = SectionConfig::new(id_schema);
+ let mut config = SectionConfig::new(&ID_SCHEMA);
config.register_plugin(plugin);
let raw = r"
use hyper::{Body, Request, Response, StatusCode};
use crate::tools;
+use crate::api_schema::api_handler::*;
use crate::api_schema::router::*;
use crate::server::formatter::*;
use crate::server::WorkerTask;
let formatter = &JSON_FORMATTER;
match self.router.find_method(&components, method, &mut uri_param) {
- MethodDefinition::None => {
+ None => {
let err = http_err!(NOT_FOUND, "Path not found.".to_string());
Box::new(future::ok((formatter.format_error)(err)))
}
- MethodDefinition::Simple(api_method) => {
- crate::server::rest::handle_sync_api_request(
- self.rpcenv.clone(), api_method, formatter, parts, body, uri_param)
- }
- MethodDefinition::Async(async_method) => {
- crate::server::rest::handle_async_api_request(
- self.rpcenv.clone(), async_method, formatter, parts, body, uri_param)
+ Some(api_method) => {
+ match api_method.handler {
+ ApiHandler::Sync(_) => {
+ crate::server::rest::handle_sync_api_request(
+ self.rpcenv.clone(), api_method, formatter, parts, body, uri_param)
+ }
+ ApiHandler::Async(_) => {
+ crate::server::rest::handle_async_api_request(
+ self.rpcenv.clone(), api_method, formatter, parts, body, uri_param)
+ }
+ }
}
}
}
use super::environment::RestEnvironment;
use super::formatter::*;
+use crate::api_schema::rpc_environment::*;
+use crate::api_schema::api_handler::*;
use crate::api_schema::config::*;
use crate::api_schema::router::*;
use crate::api_schema::*;
if is_json {
let mut params: Value = serde_json::from_str(utf8)?;
for (k, v) in uri_param {
- if let Some((_optional, prop_schema)) = obj_schema.properties.get::<str>(&k) {
+ if let Some((_optional, prop_schema)) = obj_schema.lookup(&k) {
params[&k] = parse_simple_value(&v, prop_schema)?;
}
}
uri_param: HashMap<String, String, S>,
) -> BoxFut
{
+ let handler = match info.handler {
+ ApiHandler::Async(_) => {
+ panic!("fixme");
+ }
+ ApiHandler::Sync(handler) => handler,
+ };
+
let params = get_request_parameters_async(info, parts, req_body, uri_param);
let delay_unauth_time = std::time::Instant::now() + std::time::Duration::from_millis(3000);
let resp = Pin::from(params)
.and_then(move |params| {
let mut delay = false;
- let resp = match (info.handler.as_ref().unwrap())(params, info, &mut rpcenv) {
+
+ let resp = match (handler)(params, info, &mut rpcenv) {
Ok(data) => (formatter.format_data)(data, &rpcenv),
Err(err) => {
if let Some(httperr) = err.downcast_ref::<HttpError>() {
pub fn handle_async_api_request<Env: RpcEnvironment>(
rpcenv: Env,
- info: &'static ApiAsyncMethod,
+ info: &'static ApiMethod,
formatter: &'static OutputFormatter,
parts: Parts,
req_body: Body,
uri_param: HashMap<String, String>,
) -> BoxFut
{
+ let handler = match info.handler {
+ ApiHandler::Sync(_) => {
+ panic!("fixme");
+ }
+ ApiHandler::Async(handler) => handler,
+ };
+
// fixme: convert parameters to Json
let mut param_list: Vec<(String, String)> = vec![];
}
};
- match (info.handler)(parts, req_body, params, info, Box::new(rpcenv)) {
+ match (handler)(parts, req_body, params, info, Box::new(rpcenv)) {
Ok(future) => future,
Err(err) => {
let resp = (formatter.format_error)(err);
}
match api.find_method(&components[2..], method, &mut uri_param) {
- MethodDefinition::None => {
+ None => {
let err = http_err!(NOT_FOUND, "Path not found.".to_string());
return Box::new(future::ok((formatter.format_error)(err)));
}
- MethodDefinition::Simple(api_method) => {
+ Some(api_method) => {
if api_method.protected && env_type == RpcEnvironmentType::PUBLIC {
return proxy_protected_request(api_method, parts, body);
} else {
- return handle_sync_api_request(rpcenv, api_method, formatter, parts, body, uri_param);
+ match api_method.handler {
+ ApiHandler::Sync(_) => {
+ return handle_sync_api_request(rpcenv, api_method, formatter, parts, body, uri_param);
+ }
+ ApiHandler::Async(_) => {
+ return handle_async_api_request(rpcenv, api_method, formatter, parts, body, uri_param);
+ }
+ }
}
}
- MethodDefinition::Async(async_method) => {
- return handle_async_api_request(rpcenv, async_method, formatter, parts, body, uri_param);
- }
}
}
} else {
static ref STORAGE_SECTION_CONFIG: SectionConfig = register_storage_plugins();
}
+const ID_SCHEMA: Schema = StringSchema::new("Storage ID schema.")
+ .min_length(3)
+ .schema();
+
+const LVMTHIN_PROPERTIES: ObjectSchema = ObjectSchema::new(
+ "lvmthin properties",
+ &[
+ ("thinpool", false, &StringSchema::new("LVM thin pool name.").schema()),
+ ("vgname", false, &StringSchema::new("LVM volume group name.").schema()),
+ ("content", true, &StringSchema::new("Storage content types.").schema()),
+ ],
+);
+
fn register_storage_plugins() -> SectionConfig {
- let plugin = SectionConfigPlugin::new(
- "lvmthin".to_string(),
- ObjectSchema::new("lvmthin properties")
- .required("thinpool", StringSchema::new("LVM thin pool name."))
- .required("vgname", StringSchema::new("LVM volume group name."))
- .optional("content", StringSchema::new("Storage content types."))
- );
-
- let id_schema = StringSchema::new("Storage ID schema.")
- .min_length(3)
- .into();
-
- let mut config = SectionConfig::new(id_schema);
+ let plugin = SectionConfigPlugin::new("lvmthin".to_string(), &LVMTHIN_PROPERTIES);
+ let mut config = SectionConfig::new(&ID_SCHEMA);
config.register_plugin(plugin);
config