use std::collections::HashMap;
+use std::future::Future;
use std::hash::BuildHasher;
use std::path::{Path, PathBuf};
use std::pin::Pin;
-use std::sync::Arc;
+use std::sync::{Arc, Mutex};
use std::task::{Context, Poll};
-use failure::*;
-use futures::future::{self, Either, FutureExt, TryFutureExt};
+use anyhow::{bail, format_err, Error};
+use futures::future::{self, FutureExt, TryFutureExt};
use futures::stream::TryStreamExt;
-use hyper::header;
+use hyper::body::HttpBody;
+use hyper::header::{self, HeaderMap};
use hyper::http::request::Parts;
-use hyper::rt::Future;
use hyper::{Body, Request, Response, StatusCode};
+use lazy_static::lazy_static;
+use regex::Regex;
use serde_json::{json, Value};
use tokio::fs::File;
+use tokio::time::Instant;
use url::form_urlencoded;
+use proxmox::api::schema::{
+ parse_parameter_strings, parse_simple_value, verify_json_object, ObjectSchemaType,
+ ParameterSchema,
+};
+use proxmox::api::{
+ check_api_permission, ApiHandler, ApiMethod, HttpError, Permission, RpcEnvironment,
+ RpcEnvironmentType,
+};
+use proxmox::http_err;
+
+use pbs_tools::compression::{DeflateEncoder, Level};
+use pbs_tools::stream::AsyncReaderStream;
+
+use super::auth::AuthError;
use super::environment::RestEnvironment;
use super::formatter::*;
-use crate::api_schema::rpc_environment::*;
-use crate::api_schema::config::*;
-use crate::api_schema::router::*;
-use crate::api_schema::*;
+use super::ApiConfig;
+
+use crate::api2::types::{Authid, Userid};
use crate::auth_helpers::*;
+use crate::config::cached_user_info::CachedUserInfo;
use crate::tools;
+use crate::tools::compression::CompressionMethod;
+use crate::tools::FileLogger;
-extern "C" { fn tzset(); }
+extern "C" {
+ fn tzset();
+}
pub struct RestServer {
pub api_config: Arc<ApiConfig>,
}
-impl RestServer {
+const MAX_URI_QUERY_LENGTH: usize = 3072;
+const CHUNK_SIZE_LIMIT: u64 = 32 * 1024;
+impl RestServer {
pub fn new(api_config: ApiConfig) -> Self {
- Self { api_config: Arc::new(api_config) }
+ Self {
+ api_config: Arc::new(api_config),
+ }
}
}
-impl tower_service::Service<&tokio_openssl::SslStream<tokio::net::TcpStream>> for RestServer {
+impl tower_service::Service<&Pin<Box<tokio_openssl::SslStream<tokio::net::TcpStream>>>>
+ for RestServer
+{
type Response = ApiService;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<ApiService, Error>> + Send>>;
Poll::Ready(Ok(()))
}
- fn call(&mut self, ctx: &tokio_openssl::SslStream<tokio::net::TcpStream>) -> Self::Future {
+ fn call(
+ &mut self,
+ ctx: &Pin<Box<tokio_openssl::SslStream<tokio::net::TcpStream>>>,
+ ) -> Self::Future {
match ctx.get_ref().peer_addr() {
- Err(err) => {
- future::err(format_err!("unable to get peer address - {}", err)).boxed()
- }
- Ok(peer) => {
- future::ok(ApiService { peer, api_config: self.api_config.clone() }).boxed()
- }
+ Err(err) => future::err(format_err!("unable to get peer address - {}", err)).boxed(),
+ Ok(peer) => future::ok(ApiService {
+ peer,
+ api_config: self.api_config.clone(),
+ })
+ .boxed(),
}
}
}
fn call(&mut self, ctx: &tokio::net::TcpStream) -> Self::Future {
match ctx.peer_addr() {
- Err(err) => {
- future::err(format_err!("unable to get peer address - {}", err)).boxed()
- }
- Ok(peer) => {
- future::ok(ApiService { peer, api_config: self.api_config.clone() }).boxed()
- }
+ Err(err) => future::err(format_err!("unable to get peer address - {}", err)).boxed(),
+ Ok(peer) => future::ok(ApiService {
+ peer,
+ api_config: self.api_config.clone(),
+ })
+ .boxed(),
}
}
}
+impl tower_service::Service<&tokio::net::UnixStream> for RestServer {
+ type Response = ApiService;
+ type Error = Error;
+ type Future = Pin<Box<dyn Future<Output = Result<ApiService, Error>> + Send>>;
+
+ fn poll_ready(&mut self, _cx: &mut Context) -> Poll<Result<(), Self::Error>> {
+ Poll::Ready(Ok(()))
+ }
+
+ fn call(&mut self, _ctx: &tokio::net::UnixStream) -> Self::Future {
+ // TODO: Find a way to actually represent the vsock peer in the ApiService struct - for now
+ // it doesn't really matter, so just use a fake IP address
+ let fake_peer = "0.0.0.0:807".parse().unwrap();
+ future::ok(ApiService {
+ peer: fake_peer,
+ api_config: self.api_config.clone(),
+ })
+ .boxed()
+ }
+}
+
pub struct ApiService {
pub peer: std::net::SocketAddr,
pub api_config: Arc<ApiConfig>,
}
fn log_response(
+ logfile: Option<&Arc<Mutex<FileLogger>>>,
peer: &std::net::SocketAddr,
method: hyper::Method,
- path: &str,
+ path_query: &str,
resp: &Response<Body>,
+ user_agent: Option<String>,
) {
+ if resp.extensions().get::<NoLogExtension>().is_some() {
+ return;
+ };
- if resp.extensions().get::<NoLogExtension>().is_some() { return; };
+ // we also log URL-to-long requests, so avoid message bigger than PIPE_BUF (4k on Linux)
+ // to profit from atomicty guarantees for O_APPEND opened logfiles
+ let path = &path_query[..MAX_URI_QUERY_LENGTH.min(path_query.len())];
let status = resp.status();
-
if !(status.is_success() || status.is_informational()) {
let reason = status.canonical_reason().unwrap_or("unknown reason");
- let mut message = "request failed";
- if let Some(data) = resp.extensions().get::<ErrorMessageExtension>() {
- message = &data.0;
- }
+ let message = match resp.extensions().get::<ErrorMessageExtension>() {
+ Some(data) => &data.0,
+ None => "request failed",
+ };
+
+ log::error!(
+ "{} {}: {} {}: [client {}] {}",
+ method.as_str(),
+ path,
+ status.as_str(),
+ reason,
+ peer,
+ message
+ );
+ }
+ if let Some(logfile) = logfile {
+ let auth_id = match resp.extensions().get::<Authid>() {
+ Some(auth_id) => auth_id.to_string(),
+ None => "-".to_string(),
+ };
+ let now = proxmox::tools::time::epoch_i64();
+ // time format which apache/nginx use (by default), copied from pve-http-server
+ let datetime = proxmox::tools::time::strftime_local("%d/%m/%Y:%H:%M:%S %z", now)
+ .unwrap_or_else(|_| "-".to_string());
+
+ logfile.lock().unwrap().log(format!(
+ "{} - {} [{}] \"{} {}\" {} {} {}",
+ peer.ip(),
+ auth_id,
+ datetime,
+ method.as_str(),
+ path,
+ status.as_str(),
+ resp.body().size_hint().lower(),
+ user_agent.unwrap_or_else(|| "-".to_string()),
+ ));
+ }
+}
+pub fn auth_logger() -> Result<FileLogger, Error> {
+ let logger_options = tools::FileLogOptions {
+ append: true,
+ prefix_time: true,
+ owned_by_backup: true,
+ ..Default::default()
+ };
+ FileLogger::new(pbs_buildcfg::API_AUTH_LOG_FN, logger_options)
+}
- log::error!("{} {}: {} {}: [client {}] {}", method.as_str(), path, status.as_str(), reason, peer, message);
+fn get_proxied_peer(headers: &HeaderMap) -> Option<std::net::SocketAddr> {
+ lazy_static! {
+ static ref RE: Regex = Regex::new(r#"for="([^"]+)""#).unwrap();
}
+ let forwarded = headers.get(header::FORWARDED)?.to_str().ok()?;
+ let capture = RE.captures(&forwarded)?;
+ let rhost = capture.get(1)?.as_str();
+
+ rhost.parse().ok()
+}
+
+fn get_user_agent(headers: &HeaderMap) -> Option<String> {
+ let agent = headers.get(header::USER_AGENT)?.to_str();
+ agent
+ .map(|s| {
+ let mut s = s.to_owned();
+ s.truncate(128);
+ s
+ })
+ .ok()
}
impl tower_service::Service<Request<Body>> for ApiService {
type Response = Response<Body>;
type Error = Error;
- type Future = Pin<Box<dyn Future<Output = Result<Response<Body>, Self::Error>> + Send>>;
+ #[allow(clippy::type_complexity)]
+ type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
fn poll_ready(&mut self, _cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
- let path = req.uri().path().to_owned();
+ let path = req.uri().path_and_query().unwrap().as_str().to_owned();
let method = req.method().clone();
+ let user_agent = get_user_agent(req.headers());
- let peer = self.peer;
- Pin::from(handle_request(self.api_config.clone(), req))
- .map(move |result| match result {
- Ok(res) => {
- log_response(&peer, method, &path, &res);
- Ok::<_, Self::Error>(res)
- }
+ let config = Arc::clone(&self.api_config);
+ let peer = match get_proxied_peer(req.headers()) {
+ Some(proxied_peer) => proxied_peer,
+ None => self.peer,
+ };
+ async move {
+ let response = match handle_request(Arc::clone(&config), req, &peer).await {
+ Ok(response) => response,
Err(err) => {
- if let Some(apierr) = err.downcast_ref::<HttpError>() {
- let mut resp = Response::new(Body::from(apierr.message.clone()));
- *resp.status_mut() = apierr.code;
- log_response(&peer, method, &path, &resp);
- Ok(resp)
- } else {
- let mut resp = Response::new(Body::from(err.to_string()));
- *resp.status_mut() = StatusCode::BAD_REQUEST;
- log_response(&peer, method, &path, &resp);
- Ok(resp)
- }
+ let (err, code) = match err.downcast_ref::<HttpError>() {
+ Some(apierr) => (apierr.message.clone(), apierr.code),
+ _ => (err.to_string(), StatusCode::BAD_REQUEST),
+ };
+ Response::builder()
+ .status(code)
+ .extension(ErrorMessageExtension(err.to_string()))
+ .body(err.into())?
}
- })
- .boxed()
+ };
+ let logger = config.get_file_log();
+ log_response(logger, &peer, method, &path, &response, user_agent);
+ Ok(response)
+ }
+ .boxed()
}
}
-fn get_request_parameters_async<S: 'static + BuildHasher + Send>(
- info: &'static ApiMethod,
+fn parse_query_parameters<S: 'static + BuildHasher + Send>(
+ param_schema: ParameterSchema,
+ form: &str, // x-www-form-urlencoded body data
+ parts: &Parts,
+ uri_param: &HashMap<String, String, S>,
+) -> Result<Value, Error> {
+ let mut param_list: Vec<(String, String)> = vec![];
+
+ if !form.is_empty() {
+ for (k, v) in form_urlencoded::parse(form.as_bytes()).into_owned() {
+ param_list.push((k, v));
+ }
+ }
+
+ if let Some(query_str) = parts.uri.query() {
+ for (k, v) in form_urlencoded::parse(query_str.as_bytes()).into_owned() {
+ if k == "_dc" {
+ continue;
+ } // skip extjs "disable cache" parameter
+ param_list.push((k, v));
+ }
+ }
+
+ for (k, v) in uri_param {
+ param_list.push((k.clone(), v.clone()));
+ }
+
+ let params = parse_parameter_strings(¶m_list, param_schema, true)?;
+
+ Ok(params)
+}
+
+async fn get_request_parameters<S: 'static + BuildHasher + Send>(
+ param_schema: ParameterSchema,
parts: Parts,
req_body: Body,
uri_param: HashMap<String, String, S>,
-) -> Box<dyn Future<Output = Result<Value, failure::Error>> + Send>
-{
+) -> Result<Value, Error> {
let mut is_json = false;
if let Some(value) = parts.headers.get(header::CONTENT_TYPE) {
Ok(Some("application/json")) => {
is_json = true;
}
- _ => {
- return Box::new(future::err(http_err!(BAD_REQUEST, "unsupported content type".to_string())));
- }
+ _ => bail!("unsupported content type {:?}", value.to_str()),
}
}
- let resp = req_body
- .map_err(|err| http_err!(BAD_REQUEST, format!("Promlems reading request body: {}", err)))
- .try_fold(Vec::new(), |mut acc, chunk| async move {
- if acc.len() + chunk.len() < 64*1024 { //fimxe: max request body size?
- acc.extend_from_slice(&*chunk);
- Ok(acc)
- } else {
- Err(http_err!(BAD_REQUEST, "Request body too large".to_string()))
- }
- })
- .and_then(move |body| async move {
- let utf8 = std::str::from_utf8(&body)?;
-
- let obj_schema = &info.parameters;
-
- if is_json {
- let mut params: Value = serde_json::from_str(utf8)?;
- for (k, v) in uri_param {
- if let Some((_optional, prop_schema)) = obj_schema.lookup(&k) {
- params[&k] = parse_simple_value(&v, prop_schema)?;
- }
- }
- verify_json_object(¶ms, obj_schema)?;
- return Ok(params);
- }
-
- let mut param_list: Vec<(String, String)> = vec![];
-
- if !utf8.is_empty() {
- for (k, v) in form_urlencoded::parse(utf8.as_bytes()).into_owned() {
- param_list.push((k, v));
- }
- }
+ let body = TryStreamExt::map_err(req_body, |err| {
+ http_err!(BAD_REQUEST, "Problems reading request body: {}", err)
+ })
+ .try_fold(Vec::new(), |mut acc, chunk| async move {
+ // FIXME: max request body size?
+ if acc.len() + chunk.len() < 64 * 1024 {
+ acc.extend_from_slice(&*chunk);
+ Ok(acc)
+ } else {
+ Err(http_err!(BAD_REQUEST, "Request body too large"))
+ }
+ })
+ .await?;
- if let Some(query_str) = parts.uri.query() {
- for (k, v) in form_urlencoded::parse(query_str.as_bytes()).into_owned() {
- if k == "_dc" { continue; } // skip extjs "disable cache" parameter
- param_list.push((k, v));
- }
- }
+ let utf8_data =
+ std::str::from_utf8(&body).map_err(|err| format_err!("Request body not uft8: {}", err))?;
- for (k, v) in uri_param {
- param_list.push((k.clone(), v.clone()));
+ if is_json {
+ let mut params: Value = serde_json::from_str(utf8_data)?;
+ for (k, v) in uri_param {
+ if let Some((_optional, prop_schema)) = param_schema.lookup(&k) {
+ params[&k] = parse_simple_value(&v, prop_schema)?;
}
-
- let params = parse_parameter_strings(¶m_list, obj_schema, true)?;
-
- Ok(params)
- }.boxed());
-
- Box::new(resp)
+ }
+ verify_json_object(¶ms, ¶m_schema)?;
+ return Ok(params);
+ } else {
+ parse_query_parameters(param_schema, utf8_data, &parts, &uri_param)
+ }
}
struct NoLogExtension();
-fn proxy_protected_request(
+async fn proxy_protected_request(
info: &'static ApiMethod,
mut parts: Parts,
req_body: Body,
-) -> ApiFuture {
-
+ peer: &std::net::SocketAddr,
+) -> Result<Response<Body>, Error> {
let mut uri_parts = parts.uri.clone().into_parts();
uri_parts.scheme = Some(http::uri::Scheme::HTTP);
parts.uri = new_uri;
- let request = Request::from_parts(parts, req_body);
+ let mut request = Request::from_parts(parts, req_body);
+ request.headers_mut().insert(
+ header::FORWARDED,
+ format!("for=\"{}\";", peer).parse().unwrap(),
+ );
+
+ let reload_timezone = info.reload_timezone;
let resp = hyper::client::Client::new()
.request(request)
.map_ok(|mut resp| {
resp.extensions_mut().insert(NoLogExtension());
resp
- });
-
+ })
+ .await?;
- let reload_timezone = info.reload_timezone;
- Box::new(async move {
- let result = resp.await;
- if reload_timezone {
- unsafe {
- tzset();
- }
+ if reload_timezone {
+ unsafe {
+ tzset();
}
- result
- })
+ }
+
+ Ok(resp)
}
-pub fn handle_sync_api_request<Env: RpcEnvironment, S: 'static + BuildHasher + Send>(
+pub async fn handle_api_request<Env: RpcEnvironment, S: 'static + BuildHasher + Send>(
mut rpcenv: Env,
info: &'static ApiMethod,
formatter: &'static OutputFormatter,
parts: Parts,
req_body: Body,
uri_param: HashMap<String, String, S>,
-) -> ApiFuture
-{
- let handler = match info.handler {
- ApiHandler::Async(_) => {
- panic!("fixme");
+) -> Result<Response<Body>, Error> {
+ let delay_unauth_time = std::time::Instant::now() + std::time::Duration::from_millis(3000);
+ let compression = extract_compression_method(&parts.headers);
+
+ let result = match info.handler {
+ ApiHandler::AsyncHttp(handler) => {
+ let params = parse_query_parameters(info.parameters, "", &parts, &uri_param)?;
+ (handler)(parts, req_body, params, info, Box::new(rpcenv)).await
+ }
+ ApiHandler::Sync(handler) => {
+ let params =
+ get_request_parameters(info.parameters, parts, req_body, uri_param).await?;
+ (handler)(params, info, &mut rpcenv).map(|data| (formatter.format_data)(data, &rpcenv))
+ }
+ ApiHandler::Async(handler) => {
+ let params =
+ get_request_parameters(info.parameters, parts, req_body, uri_param).await?;
+ (handler)(params, info, &mut rpcenv)
+ .await
+ .map(|data| (formatter.format_data)(data, &rpcenv))
}
- ApiHandler::Sync(handler) => handler,
};
-
- let params = get_request_parameters_async(info, parts, req_body, uri_param);
- let delay_unauth_time = std::time::Instant::now() + std::time::Duration::from_millis(3000);
-
- let resp = Pin::from(params)
- .and_then(move |params| {
- let mut delay = false;
-
- let resp = match (handler)(params, info, &mut rpcenv) {
- Ok(data) => (formatter.format_data)(data, &rpcenv),
- Err(err) => {
- if let Some(httperr) = err.downcast_ref::<HttpError>() {
- if httperr.code == StatusCode::UNAUTHORIZED {
- delay = true;
- }
- }
- (formatter.format_error)(err)
+ let mut resp = match result {
+ Ok(resp) => resp,
+ Err(err) => {
+ if let Some(httperr) = err.downcast_ref::<HttpError>() {
+ if httperr.code == StatusCode::UNAUTHORIZED {
+ tokio::time::sleep_until(Instant::from_std(delay_unauth_time)).await;
}
- };
-
- if info.reload_timezone {
- unsafe { tzset() };
}
+ (formatter.format_error)(err)
+ }
+ };
- if delay {
- Either::Left(delayed_response(resp, delay_unauth_time))
- } else {
- Either::Right(future::ok(resp))
- }
- })
- .or_else(move |err| {
- future::ok((formatter.format_error)(err))
- });
+ let resp = match compression {
+ Some(CompressionMethod::Deflate) => {
+ resp.headers_mut().insert(
+ header::CONTENT_ENCODING,
+ CompressionMethod::Deflate.content_encoding(),
+ );
+ resp.map(|body| {
+ Body::wrap_stream(DeflateEncoder::with_quality(
+ TryStreamExt::map_err(body, |err| {
+ proxmox::io_format_err!("error during compression: {}", err)
+ }),
+ Level::Default,
+ ))
+ })
+ }
+ None => resp,
+ };
- Box::new(resp)
+ if info.reload_timezone {
+ unsafe {
+ tzset();
+ }
+ }
+
+ Ok(resp)
}
-pub fn handle_async_api_request<Env: RpcEnvironment>(
- rpcenv: Env,
- info: &'static ApiMethod,
- formatter: &'static OutputFormatter,
+fn get_index(
+ userid: Option<Userid>,
+ csrf_token: Option<String>,
+ language: Option<String>,
+ api: &Arc<ApiConfig>,
parts: Parts,
- req_body: Body,
- uri_param: HashMap<String, String>,
-) -> ApiFuture
-{
- let handler = match info.handler {
- ApiHandler::Sync(_) => {
- panic!("fixme");
- }
- ApiHandler::Async(handler) => handler,
- };
-
- // fixme: convert parameters to Json
- let mut param_list: Vec<(String, String)> = vec![];
+) -> Response<Body> {
+ let nodename = proxmox::tools::nodename();
+ let user = userid.as_ref().map(|u| u.as_str()).unwrap_or("");
+
+ let csrf_token = csrf_token.unwrap_or_else(|| String::from(""));
+
+ let mut debug = false;
+ let mut template_file = "index";
if let Some(query_str) = parts.uri.query() {
for (k, v) in form_urlencoded::parse(query_str.as_bytes()).into_owned() {
- if k == "_dc" { continue; } // skip extjs "disable cache" parameter
- param_list.push((k, v));
+ if k == "debug" && v != "0" && v != "false" {
+ debug = true;
+ } else if k == "console" {
+ template_file = "console";
+ }
}
}
- for (k, v) in uri_param {
- param_list.push((k.clone(), v.clone()));
- }
-
- let params = match parse_parameter_strings(¶m_list, &info.parameters, true) {
- Ok(v) => v,
- Err(err) => {
- let resp = (formatter.format_error)(Error::from(err));
- return Box::new(future::ok(resp));
- }
- };
-
- match (handler)(parts, req_body, params, info, Box::new(rpcenv)) {
- Ok(future) => future,
- Err(err) => {
- let resp = (formatter.format_error)(err);
- Box::new(future::ok(resp))
+ let mut lang = String::from("");
+ if let Some(language) = language {
+ if Path::new(&format!("/usr/share/pbs-i18n/pbs-lang-{}.js", language)).exists() {
+ lang = language;
}
}
-}
-
-fn get_index(username: Option<String>, token: Option<String>) -> Response<Body> {
-
- let nodename = proxmox::tools::nodename();
- let username = username.unwrap_or_else(|| String::from(""));
- let token = token.unwrap_or_else(|| String::from(""));
-
- let setup = json!({
- "Setup": { "auth_cookie_name": "PBSAuthCookie" },
+ let data = json!({
"NodeName": nodename,
- "UserName": username,
- "CSRFPreventionToken": token,
+ "UserName": user,
+ "CSRFPreventionToken": csrf_token,
+ "language": lang,
+ "debug": debug,
});
- let index = format!(r###"
-<!DOCTYPE html>
-<html>
- <head>
- <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
- <meta http-equiv="X-UA-Compatible" content="IE=edge">
- <meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no">
- <title>Proxmox Backup Server</title>
- <link rel="icon" sizes="128x128" href="/images/logo-128.png" />
- <link rel="apple-touch-icon" sizes="128x128" href="/pve2/images/logo-128.png" />
- <link rel="stylesheet" type="text/css" href="/extjs/theme-crisp/resources/theme-crisp-all.css" />
- <link rel="stylesheet" type="text/css" href="/extjs/crisp/resources/charts-all.css" />
- <link rel="stylesheet" type="text/css" href="/fontawesome/css/font-awesome.css" />
- <script type='text/javascript'> function gettext(buf) {{ return buf; }} </script>
- <script type="text/javascript" src="/extjs/ext-all-debug.js"></script>
- <script type="text/javascript" src="/extjs/charts-debug.js"></script>
- <script type="text/javascript">
- Proxmox = {};
- </script>
- <script type="text/javascript" src="/widgettoolkit/proxmoxlib.js"></script>
- <script type="text/javascript" src="/extjs/locale/locale-en.js"></script>
- <script type="text/javascript">
- Ext.History.fieldid = 'x-history-field';
- </script>
- <script type="text/javascript" src="/js/proxmox-backup-gui.js"></script>
- </head>
- <body>
- <!-- Fields required for history management -->
- <form id="history-form" class="x-hidden">
- <input type="hidden" id="x-history-field"/>
- </form>
- </body>
-</html>
-"###, setup.to_string());
-
- Response::builder()
+ let (ct, index) = match api.render_template(template_file, &data) {
+ Ok(index) => ("text/html", index),
+ Err(err) => ("text/plain", format!("Error rendering template: {}", err)),
+ };
+
+ let mut resp = Response::builder()
.status(StatusCode::OK)
- .header(header::CONTENT_TYPE, "text/html")
+ .header(header::CONTENT_TYPE, ct)
.body(index.into())
- .unwrap()
+ .unwrap();
+
+ if let Some(userid) = userid {
+ resp.extensions_mut().insert(Authid::from((userid, None)));
+ }
+
+ resp
}
fn extension_to_content_type(filename: &Path) -> (&'static str, bool) {
-
if let Some(ext) = filename.extension().and_then(|osstr| osstr.to_str()) {
return match ext {
"css" => ("text/css", false),
("application/octet-stream", false)
}
-async fn simple_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
-
- let (content_type, _nocomp) = extension_to_content_type(&filename);
-
+async fn simple_static_file_download(
+ filename: PathBuf,
+ content_type: &'static str,
+ compression: Option<CompressionMethod>,
+) -> Result<Response<Body>, Error> {
use tokio::io::AsyncReadExt;
let mut file = File::open(filename)
.await
- .map_err(|err| http_err!(BAD_REQUEST, format!("File open failed: {}", err)))?;
+ .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
let mut data: Vec<u8> = Vec::new();
- file.read_to_end(&mut data)
- .await
- .map_err(|err| http_err!(BAD_REQUEST, format!("File read failed: {}", err)))?;
- let mut response = Response::new(data.into());
+ let mut response = match compression {
+ Some(CompressionMethod::Deflate) => {
+ let mut enc = DeflateEncoder::with_quality(data, Level::Default);
+ enc.compress_vec(&mut file, CHUNK_SIZE_LIMIT as usize)
+ .await?;
+ let mut response = Response::new(enc.into_inner().into());
+ response.headers_mut().insert(
+ header::CONTENT_ENCODING,
+ CompressionMethod::Deflate.content_encoding(),
+ );
+ response
+ }
+ None => {
+ file.read_to_end(&mut data)
+ .await
+ .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?;
+ Response::new(data.into())
+ }
+ };
+
response.headers_mut().insert(
header::CONTENT_TYPE,
- header::HeaderValue::from_static(content_type));
+ header::HeaderValue::from_static(content_type),
+ );
+
Ok(response)
}
-async fn chuncked_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
- let (content_type, _nocomp) = extension_to_content_type(&filename);
+async fn chuncked_static_file_download(
+ filename: PathBuf,
+ content_type: &'static str,
+ compression: Option<CompressionMethod>,
+) -> Result<Response<Body>, Error> {
+ let mut resp = Response::builder()
+ .status(StatusCode::OK)
+ .header(header::CONTENT_TYPE, content_type);
let file = File::open(filename)
.await
- .map_err(|err| http_err!(BAD_REQUEST, format!("File open failed: {}", err)))?;
-
- let payload = tokio::codec::FramedRead::new(file, tokio::codec::BytesCodec::new())
- .map_ok(|bytes| hyper::Chunk::from(bytes.freeze()));
- let body = Body::wrap_stream(payload);
-
- // fixme: set other headers ?
- Ok(Response::builder()
- .status(StatusCode::OK)
- .header(header::CONTENT_TYPE, content_type)
- .body(body)
- .unwrap()
- )
+ .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
+
+ let body = match compression {
+ Some(CompressionMethod::Deflate) => {
+ resp = resp.header(
+ header::CONTENT_ENCODING,
+ CompressionMethod::Deflate.content_encoding(),
+ );
+ Body::wrap_stream(DeflateEncoder::with_quality(
+ AsyncReaderStream::new(file),
+ Level::Default,
+ ))
+ }
+ None => Body::wrap_stream(AsyncReaderStream::new(file)),
+ };
+
+ Ok(resp.body(body).unwrap())
}
-fn handle_static_file_download(filename: PathBuf) -> ApiFuture {
+async fn handle_static_file_download(
+ filename: PathBuf,
+ compression: Option<CompressionMethod>,
+) -> Result<Response<Body>, Error> {
+ let metadata = tokio::fs::metadata(filename.clone())
+ .map_err(|err| http_err!(BAD_REQUEST, "File access problems: {}", err))
+ .await?;
- let response = tokio::fs::metadata(filename.clone())
- .map_err(|err| http_err!(BAD_REQUEST, format!("File access problems: {}", err)))
- .and_then(|metadata| async move {
- if metadata.len() < 1024*32 {
- simple_static_file_download(filename).await
- } else {
- chuncked_static_file_download(filename).await
- }
- });
+ let (content_type, nocomp) = extension_to_content_type(&filename);
+ let compression = if nocomp { None } else { compression };
- Box::new(response)
+ if metadata.len() < CHUNK_SIZE_LIMIT {
+ simple_static_file_download(filename, content_type, compression).await
+ } else {
+ chuncked_static_file_download(filename, content_type, compression).await
+ }
}
-fn extract_auth_data(headers: &http::HeaderMap) -> (Option<String>, Option<String>) {
-
- let mut ticket = None;
- if let Some(raw_cookie) = headers.get("COOKIE") {
- if let Ok(cookie) = raw_cookie.to_str() {
- ticket = tools::extract_auth_cookie(cookie, "PBSAuthCookie");
- }
+fn extract_lang_header(headers: &http::HeaderMap) -> Option<String> {
+ if let Some(Ok(cookie)) = headers.get("COOKIE").map(|v| v.to_str()) {
+ return tools::extract_cookie(cookie, "PBSLangCookie");
}
-
- let token = match headers.get("CSRFPreventionToken").map(|v| v.to_str()) {
- Some(Ok(v)) => Some(v.to_owned()),
- _ => None,
- };
-
- (ticket, token)
+ None
}
-fn check_auth(method: &hyper::Method, ticket: &Option<String>, token: &Option<String>) -> Result<String, Error> {
-
- let ticket_lifetime = tools::ticket::TICKET_LIFETIME;
-
- let username = match ticket {
- Some(ticket) => match tools::ticket::verify_rsa_ticket(public_auth_key(), "PBS", &ticket, None, -300, ticket_lifetime) {
- Ok((_age, Some(username))) => username.to_owned(),
- Ok((_, None)) => bail!("ticket without username."),
- Err(err) => return Err(err),
- }
- None => bail!("missing ticket"),
- };
-
- if method != hyper::Method::GET {
- if let Some(token) = token {
- println!("CSRF prevention token: {:?}", token);
- verify_csrf_prevention_token(csrf_secret(), &username, &token, -300, ticket_lifetime)?;
- } else {
- bail!("missing CSRF prevention token");
+// FIXME: support handling multiple compression methods
+fn extract_compression_method(headers: &http::HeaderMap) -> Option<CompressionMethod> {
+ if let Some(Ok(encodings)) = headers.get(header::ACCEPT_ENCODING).map(|v| v.to_str()) {
+ for encoding in encodings.split(&[',', ' '][..]) {
+ if let Ok(method) = encoding.parse() {
+ return Some(method);
+ }
}
}
-
- Ok(username)
+ None
}
-async fn delayed_response(
- resp: Response<Body>,
- delay_unauth_time: std::time::Instant,
+async fn handle_request(
+ api: Arc<ApiConfig>,
+ req: Request<Body>,
+ peer: &std::net::SocketAddr,
) -> Result<Response<Body>, Error> {
- tokio::timer::delay(delay_unauth_time).await;
- Ok(resp)
-}
-
-pub fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> ApiFuture {
-
let (parts, body) = req.into_parts();
-
let method = parts.method.clone();
-
- let (path, components) = match tools::normalize_uri_path(parts.uri.path()) {
- Ok((p,c)) => (p, c),
- Err(err) => return Box::new(future::err(http_err!(BAD_REQUEST, err.to_string()))),
- };
+ let (path, components) = tools::normalize_uri_path(parts.uri.path())?;
let comp_len = components.len();
- println!("REQUEST {} {}", method, path);
- println!("COMPO {:?}", components);
+ let query = parts.uri.query().unwrap_or_default();
+ if path.len() + query.len() > MAX_URI_QUERY_LENGTH {
+ return Ok(Response::builder()
+ .status(StatusCode::URI_TOO_LONG)
+ .body("".into())
+ .unwrap());
+ }
let env_type = api.env_type();
let mut rpcenv = RestEnvironment::new(env_type);
+ rpcenv.set_client_ip(Some(*peer));
+
+ let user_info = CachedUserInfo::new()?;
+ let auth = &api.api_auth;
+
let delay_unauth_time = std::time::Instant::now() + std::time::Duration::from_millis(3000);
+ let access_forbidden_time = std::time::Instant::now() + std::time::Duration::from_millis(500);
if comp_len >= 1 && components[0] == "api2" {
-
if comp_len >= 2 {
let format = components[1];
+
let formatter = match format {
"json" => &JSON_FORMATTER,
"extjs" => &EXTJS_FORMATTER,
- _ => {
- return Box::new(future::err(http_err!(BAD_REQUEST, format!("Unsupported output format '{}'.", format))));
- }
+ _ => bail!("Unsupported output format '{}'.", format),
};
let mut uri_param = HashMap::new();
+ let api_method = api.find_method(&components[2..], method.clone(), &mut uri_param);
- if comp_len == 4 && components[2] == "access" && components[3] == "ticket" {
- // explicitly allow those calls without auth
- } else {
- let (ticket, token) = extract_auth_data(&parts.headers);
- match check_auth(&method, &ticket, &token) {
- Ok(username) => {
+ let mut auth_required = true;
+ if let Some(api_method) = api_method {
+ if let Permission::World = *api_method.access.permission {
+ auth_required = false; // no auth for endpoints with World permission
+ }
+ }
- // fixme: check permissions
+ if auth_required {
+ match auth.check_auth(&parts.headers, &method, &user_info) {
+ Ok(authid) => rpcenv.set_auth_id(Some(authid.to_string())),
+ Err(auth_err) => {
+ let err = match auth_err {
+ AuthError::Generic(err) => err,
+ AuthError::NoData => {
+ format_err!("no authentication credentials provided.")
+ }
+ };
+ let peer = peer.ip();
+ auth_logger()?.log(format!(
+ "authentication failure; rhost={} msg={}",
+ peer, err
+ ));
- rpcenv.set_user(Some(username));
- }
- Err(err) => {
// always delay unauthorized calls by 3 seconds (from start of request)
- let err = http_err!(UNAUTHORIZED, format!("permission check failed - {}", err));
- return Box::new(
- delayed_response((formatter.format_error)(err), delay_unauth_time)
- );
+ let err = http_err!(UNAUTHORIZED, "authentication failed - {}", err);
+ tokio::time::sleep_until(Instant::from_std(delay_unauth_time)).await;
+ return Ok((formatter.format_error)(err));
}
}
}
- match api.find_method(&components[2..], method, &mut uri_param) {
+ match api_method {
None => {
- let err = http_err!(NOT_FOUND, "Path not found.".to_string());
- return Box::new(future::ok((formatter.format_error)(err)));
+ let err = http_err!(NOT_FOUND, "Path '{}' not found.", path);
+ return Ok((formatter.format_error)(err));
}
Some(api_method) => {
- if api_method.protected && env_type == RpcEnvironmentType::PUBLIC {
- return proxy_protected_request(api_method, parts, body);
+ let auth_id = rpcenv.get_auth_id();
+ if !check_api_permission(
+ api_method.access.permission,
+ auth_id.as_deref(),
+ &uri_param,
+ user_info.as_ref(),
+ ) {
+ let err = http_err!(FORBIDDEN, "permission check failed");
+ tokio::time::sleep_until(Instant::from_std(access_forbidden_time)).await;
+ return Ok((formatter.format_error)(err));
+ }
+
+ let result = if api_method.protected && env_type == RpcEnvironmentType::PUBLIC {
+ proxy_protected_request(api_method, parts, body, peer).await
} else {
- match api_method.handler {
- ApiHandler::Sync(_) => {
- return handle_sync_api_request(rpcenv, api_method, formatter, parts, body, uri_param);
- }
- ApiHandler::Async(_) => {
- return handle_async_api_request(rpcenv, api_method, formatter, parts, body, uri_param);
- }
- }
+ handle_api_request(rpcenv, api_method, formatter, parts, body, uri_param)
+ .await
+ };
+
+ let mut response = match result {
+ Ok(resp) => resp,
+ Err(err) => (formatter.format_error)(err),
+ };
+
+ if let Some(auth_id) = auth_id {
+ let auth_id: Authid = auth_id.parse()?;
+ response.extensions_mut().insert(auth_id);
}
+
+ return Ok(response);
}
}
}
// not Auth required for accessing files!
if method != hyper::Method::GET {
- return Box::new(future::err(http_err!(BAD_REQUEST, "Unsupported method".to_string())));
+ bail!("Unsupported HTTP method {}", method);
}
if comp_len == 0 {
- let (ticket, token) = extract_auth_data(&parts.headers);
- if ticket != None {
- match check_auth(&method, &ticket, &token) {
- Ok(username) => {
- let new_token = assemble_csrf_prevention_token(csrf_secret(), &username);
- return Box::new(future::ok(get_index(Some(username), Some(new_token))));
- }
- _ => {
- return Box::new(delayed_response(get_index(None, None), delay_unauth_time));
+ let language = extract_lang_header(&parts.headers);
+ match auth.check_auth(&parts.headers, &method, &user_info) {
+ Ok(auth_id) => {
+ if !auth_id.is_token() {
+ let userid = auth_id.user();
+ let new_csrf_token = assemble_csrf_prevention_token(csrf_secret(), userid);
+ return Ok(get_index(
+ Some(userid.clone()),
+ Some(new_csrf_token),
+ language,
+ &api,
+ parts,
+ ));
}
}
- } else {
- return Box::new(future::ok(get_index(None, None)));
+ Err(AuthError::Generic(_)) => {
+ tokio::time::sleep_until(Instant::from_std(delay_unauth_time)).await;
+ }
+ Err(AuthError::NoData) => {}
}
+ return Ok(get_index(None, None, language, &api, parts));
} else {
let filename = api.find_alias(&components);
- return handle_static_file_download(filename);
+ let compression = extract_compression_method(&parts.headers);
+ return handle_static_file_download(filename, compression).await;
}
}
- Box::new(future::err(http_err!(NOT_FOUND, "Path not found.".to_string())))
+ Err(http_err!(NOT_FOUND, "Path '{}' not found.", path))
}