]> git.proxmox.com Git - proxmox-backup.git/blobdiff - src/server/rest.rs
server/rest: add ApiAuth trait to make user auth generic
[proxmox-backup.git] / src / server / rest.rs
index 912327f04b36f436cf71bcd3259f127c52f8d275..c482bab2b57b3cf04239abff5fb87d09815f0d62 100644 (file)
@@ -3,61 +3,68 @@ use std::future::Future;
 use std::hash::BuildHasher;
 use std::path::{Path, PathBuf};
 use std::pin::Pin;
-use std::sync::Arc;
+use std::sync::{Arc, Mutex};
 use std::task::{Context, Poll};
 
 use anyhow::{bail, format_err, Error};
 use futures::future::{self, FutureExt, TryFutureExt};
 use futures::stream::TryStreamExt;
-use hyper::header;
+use hyper::body::HttpBody;
+use hyper::header::{self, HeaderMap};
 use hyper::http::request::Parts;
 use hyper::{Body, Request, Response, StatusCode};
+use lazy_static::lazy_static;
+use regex::Regex;
 use serde_json::{json, Value};
 use tokio::fs::File;
 use tokio::time::Instant;
 use url::form_urlencoded;
 
-use proxmox::http_err;
+use proxmox::api::schema::{
+    parse_parameter_strings, parse_simple_value, verify_json_object, ObjectSchemaType,
+    ParameterSchema,
+};
 use proxmox::api::{
-    ApiHandler,
-    ApiMethod,
-    HttpError,
-    Permission,
-    RpcEnvironment,
+    check_api_permission, ApiHandler, ApiMethod, HttpError, Permission, RpcEnvironment,
     RpcEnvironmentType,
-    check_api_permission,
-};
-use proxmox::api::schema::{
-    ObjectSchema,
-    parse_parameter_strings,
-    parse_simple_value,
-    verify_json_object,
 };
+use proxmox::http_err;
 
+use super::auth::AuthError;
 use super::environment::RestEnvironment;
 use super::formatter::*;
 use super::ApiConfig;
 
+use crate::api2::types::{Authid, Userid};
 use crate::auth_helpers::*;
-use crate::api2::types::Userid;
-use crate::tools;
-use crate::tools::ticket::Ticket;
 use crate::config::cached_user_info::CachedUserInfo;
+use crate::tools;
+use crate::tools::compression::{CompressionMethod, DeflateEncoder, Level};
+use crate::tools::AsyncReaderStream;
+use crate::tools::FileLogger;
 
-extern "C"  { fn tzset(); }
+extern "C" {
+    fn tzset();
+}
 
 pub struct RestServer {
     pub api_config: Arc<ApiConfig>,
 }
 
-impl RestServer {
+const MAX_URI_QUERY_LENGTH: usize = 3072;
+const CHUNK_SIZE_LIMIT: u64 = 32 * 1024;
 
+impl RestServer {
     pub fn new(api_config: ApiConfig) -> Self {
-        Self { api_config: Arc::new(api_config) }
+        Self {
+            api_config: Arc::new(api_config),
+        }
     }
 }
 
-impl tower_service::Service<&tokio_openssl::SslStream<tokio::net::TcpStream>> for RestServer {
+impl tower_service::Service<&Pin<Box<tokio_openssl::SslStream<tokio::net::TcpStream>>>>
+    for RestServer
+{
     type Response = ApiService;
     type Error = Error;
     type Future = Pin<Box<dyn Future<Output = Result<ApiService, Error>> + Send>>;
@@ -66,14 +73,17 @@ impl tower_service::Service<&tokio_openssl::SslStream<tokio::net::TcpStream>> fo
         Poll::Ready(Ok(()))
     }
 
-    fn call(&mut self, ctx: &tokio_openssl::SslStream<tokio::net::TcpStream>) -> Self::Future {
+    fn call(
+        &mut self,
+        ctx: &Pin<Box<tokio_openssl::SslStream<tokio::net::TcpStream>>>,
+    ) -> Self::Future {
         match ctx.get_ref().peer_addr() {
-            Err(err) => {
-                future::err(format_err!("unable to get peer address - {}", err)).boxed()
-            }
-            Ok(peer) => {
-                future::ok(ApiService { peer, api_config: self.api_config.clone() }).boxed()
-            }
+            Err(err) => future::err(format_err!("unable to get peer address - {}", err)).boxed(),
+            Ok(peer) => future::ok(ApiService {
+                peer,
+                api_config: self.api_config.clone(),
+            })
+            .boxed(),
         }
     }
 }
@@ -89,29 +99,57 @@ impl tower_service::Service<&tokio::net::TcpStream> for RestServer {
 
     fn call(&mut self, ctx: &tokio::net::TcpStream) -> Self::Future {
         match ctx.peer_addr() {
-            Err(err) => {
-                future::err(format_err!("unable to get peer address - {}", err)).boxed()
-            }
-            Ok(peer) => {
-                future::ok(ApiService { peer, api_config: self.api_config.clone() }).boxed()
-            }
+            Err(err) => future::err(format_err!("unable to get peer address - {}", err)).boxed(),
+            Ok(peer) => future::ok(ApiService {
+                peer,
+                api_config: self.api_config.clone(),
+            })
+            .boxed(),
         }
     }
 }
 
+impl tower_service::Service<&tokio::net::UnixStream> for RestServer {
+    type Response = ApiService;
+    type Error = Error;
+    type Future = Pin<Box<dyn Future<Output = Result<ApiService, Error>> + Send>>;
+
+    fn poll_ready(&mut self, _cx: &mut Context) -> Poll<Result<(), Self::Error>> {
+        Poll::Ready(Ok(()))
+    }
+
+    fn call(&mut self, _ctx: &tokio::net::UnixStream) -> Self::Future {
+        // TODO: Find a way to actually represent the vsock peer in the ApiService struct - for now
+        // it doesn't really matter, so just use a fake IP address
+        let fake_peer = "0.0.0.0:807".parse().unwrap();
+        future::ok(ApiService {
+            peer: fake_peer,
+            api_config: self.api_config.clone(),
+        })
+        .boxed()
+    }
+}
+
 pub struct ApiService {
     pub peer: std::net::SocketAddr,
     pub api_config: Arc<ApiConfig>,
 }
 
 fn log_response(
+    logfile: Option<&Arc<Mutex<FileLogger>>>,
     peer: &std::net::SocketAddr,
     method: hyper::Method,
-    path: &str,
+    path_query: &str,
     resp: &Response<Body>,
+    user_agent: Option<String>,
 ) {
+    if resp.extensions().get::<NoLogExtension>().is_some() {
+        return;
+    };
 
-    if resp.extensions().get::<NoLogExtension>().is_some() { return; };
+    // we also log URL-to-long requests, so avoid message bigger than PIPE_BUF (4k on Linux)
+    // to profit from atomicty guarantees for O_APPEND opened logfiles
+    let path = &path_query[..MAX_URI_QUERY_LENGTH.min(path_query.len())];
 
     let status = resp.status();
 
@@ -123,57 +161,116 @@ fn log_response(
             message = &data.0;
         }
 
-        log::error!("{} {}: {} {}: [client {}] {}", method.as_str(), path, status.as_str(), reason, peer, message);
+        log::error!(
+            "{} {}: {} {}: [client {}] {}",
+            method.as_str(),
+            path,
+            status.as_str(),
+            reason,
+            peer,
+            message
+        );
     }
+    if let Some(logfile) = logfile {
+        let auth_id = match resp.extensions().get::<Authid>() {
+            Some(auth_id) => auth_id.to_string(),
+            None => "-".to_string(),
+        };
+        let now = proxmox::tools::time::epoch_i64();
+        // time format which apache/nginx use (by default), copied from pve-http-server
+        let datetime = proxmox::tools::time::strftime_local("%d/%m/%Y:%H:%M:%S %z", now)
+            .unwrap_or_else(|_| "-".to_string());
+
+        logfile.lock().unwrap().log(format!(
+            "{} - {} [{}] \"{} {}\" {} {} {}",
+            peer.ip(),
+            auth_id,
+            datetime,
+            method.as_str(),
+            path,
+            status.as_str(),
+            resp.body().size_hint().lower(),
+            user_agent.unwrap_or_else(|| "-".to_string()),
+        ));
+    }
+}
+pub fn auth_logger() -> Result<FileLogger, Error> {
+    let logger_options = tools::FileLogOptions {
+        append: true,
+        prefix_time: true,
+        owned_by_backup: true,
+        ..Default::default()
+    };
+    FileLogger::new(crate::buildcfg::API_AUTH_LOG_FN, logger_options)
+}
+
+fn get_proxied_peer(headers: &HeaderMap) -> Option<std::net::SocketAddr> {
+    lazy_static! {
+        static ref RE: Regex = Regex::new(r#"for="([^"]+)""#).unwrap();
+    }
+    let forwarded = headers.get(header::FORWARDED)?.to_str().ok()?;
+    let capture = RE.captures(&forwarded)?;
+    let rhost = capture.get(1)?.as_str();
+
+    rhost.parse().ok()
+}
+
+fn get_user_agent(headers: &HeaderMap) -> Option<String> {
+    let agent = headers.get(header::USER_AGENT)?.to_str();
+    agent
+        .map(|s| {
+            let mut s = s.to_owned();
+            s.truncate(128);
+            s
+        })
+        .ok()
 }
 
 impl tower_service::Service<Request<Body>> for ApiService {
     type Response = Response<Body>;
     type Error = Error;
-    type Future = Pin<Box<dyn Future<Output = Result<Response<Body>, Self::Error>> + Send>>;
+    #[allow(clippy::type_complexity)]
+    type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
 
     fn poll_ready(&mut self, _cx: &mut Context) -> Poll<Result<(), Self::Error>> {
         Poll::Ready(Ok(()))
     }
 
     fn call(&mut self, req: Request<Body>) -> Self::Future {
-        let path = req.uri().path().to_owned();
+        let path = req.uri().path_and_query().unwrap().as_str().to_owned();
         let method = req.method().clone();
+        let user_agent = get_user_agent(req.headers());
 
         let config = Arc::clone(&self.api_config);
-        let peer = self.peer;
+        let peer = match get_proxied_peer(req.headers()) {
+            Some(proxied_peer) => proxied_peer,
+            None => self.peer,
+        };
         async move {
-            match handle_request(config, req).await {
-                Ok(res) => {
-                    log_response(&peer, method, &path, &res);
-                    Ok::<_, Self::Error>(res)
-                }
+            let response = match handle_request(Arc::clone(&config), req, &peer).await {
+                Ok(response) => response,
                 Err(err) => {
-                    if let Some(apierr) = err.downcast_ref::<HttpError>() {
-                        let mut resp = Response::new(Body::from(apierr.message.clone()));
-                        *resp.status_mut() = apierr.code;
-                        log_response(&peer, method, &path, &resp);
-                        Ok(resp)
-                    } else {
-                        let mut resp = Response::new(Body::from(err.to_string()));
-                        *resp.status_mut() = StatusCode::BAD_REQUEST;
-                        log_response(&peer, method, &path, &resp);
-                        Ok(resp)
-                    }
+                    let (err, code) = match err.downcast_ref::<HttpError>() {
+                        Some(apierr) => (apierr.message.clone(), apierr.code),
+                        _ => (err.to_string(), StatusCode::BAD_REQUEST),
+                    };
+                    Response::builder().status(code).body(err.into())?
                 }
-            }
+            };
+            let logger = config.get_file_log();
+            log_response(logger, &peer, method, &path, &response, user_agent);
+            Ok(response)
         }
         .boxed()
     }
 }
 
 fn parse_query_parameters<S: 'static + BuildHasher + Send>(
-    param_schema: &ObjectSchema,
+    param_schema: ParameterSchema,
     form: &str, // x-www-form-urlencoded body data
     parts: &Parts,
     uri_param: &HashMap<String, String, S>,
 ) -> Result<Value, Error> {
-
     let mut param_list: Vec<(String, String)> = vec![];
 
     if !form.is_empty() {
@@ -184,7 +281,9 @@ fn parse_query_parameters<S: 'static + BuildHasher + Send>(
 
     if let Some(query_str) = parts.uri.query() {
         for (k, v) in form_urlencoded::parse(query_str.as_bytes()).into_owned() {
-            if k == "_dc" { continue; } // skip extjs "disable cache" parameter
+            if k == "_dc" {
+                continue;
+            } // skip extjs "disable cache" parameter
             param_list.push((k, v));
         }
     }
@@ -199,12 +298,11 @@ fn parse_query_parameters<S: 'static + BuildHasher + Send>(
 }
 
 async fn get_request_parameters<S: 'static + BuildHasher + Send>(
-    param_schema: &ObjectSchema,
+    param_schema: ParameterSchema,
     parts: Parts,
     req_body: Body,
     uri_param: HashMap<String, String, S>,
 ) -> Result<Value, Error> {
-
     let mut is_json = false;
 
     if let Some(value) = parts.headers.get(header::CONTENT_TYPE) {
@@ -219,19 +317,22 @@ async fn get_request_parameters<S: 'static + BuildHasher + Send>(
         }
     }
 
-    let body = req_body
-        .map_err(|err| http_err!(BAD_REQUEST, "Promlems reading request body: {}", err))
-        .try_fold(Vec::new(), |mut acc, chunk| async move {
-            if acc.len() + chunk.len() < 64*1024 { //fimxe: max request body size?
-                acc.extend_from_slice(&*chunk);
-                Ok(acc)
-            } else {
-                Err(http_err!(BAD_REQUEST, "Request body too large"))
-            }
-        }).await?;
+    let body = TryStreamExt::map_err(req_body, |err| {
+        http_err!(BAD_REQUEST, "Problems reading request body: {}", err)
+    })
+    .try_fold(Vec::new(), |mut acc, chunk| async move {
+        // FIXME: max request body size?
+        if acc.len() + chunk.len() < 64 * 1024 {
+            acc.extend_from_slice(&*chunk);
+            Ok(acc)
+        } else {
+            Err(http_err!(BAD_REQUEST, "Request body too large"))
+        }
+    })
+    .await?;
 
-    let utf8_data = std::str::from_utf8(&body)
-        .map_err(|err| format_err!("Request body not uft8: {}", err))?;
+    let utf8_data =
+        std::str::from_utf8(&body).map_err(|err| format_err!("Request body not uft8: {}", err))?;
 
     if is_json {
         let mut params: Value = serde_json::from_str(utf8_data)?;
@@ -240,7 +341,7 @@ async fn get_request_parameters<S: 'static + BuildHasher + Send>(
                 params[&k] = parse_simple_value(&v, prop_schema)?;
             }
         }
-        verify_json_object(&params, param_schema)?;
+        verify_json_object(&params, &param_schema)?;
         return Ok(params);
     } else {
         parse_query_parameters(param_schema, utf8_data, &parts, &uri_param)
@@ -253,8 +354,8 @@ async fn proxy_protected_request(
     info: &'static ApiMethod,
     mut parts: Parts,
     req_body: Body,
+    peer: &std::net::SocketAddr,
 ) -> Result<Response<Body>, Error> {
-
     let mut uri_parts = parts.uri.clone().into_parts();
 
     uri_parts.scheme = Some(http::uri::Scheme::HTTP);
@@ -263,7 +364,11 @@ async fn proxy_protected_request(
 
     parts.uri = new_uri;
 
-    let request = Request::from_parts(parts, req_body);
+    let mut request = Request::from_parts(parts, req_body);
+    request.headers_mut().insert(
+        header::FORWARDED,
+        format!("for=\"{}\";", peer).parse().unwrap(),
+    );
 
     let reload_timezone = info.reload_timezone;
 
@@ -276,7 +381,11 @@ async fn proxy_protected_request(
         })
         .await?;
 
-    if reload_timezone { unsafe { tzset(); } }
+    if reload_timezone {
+        unsafe {
+            tzset();
+        }
+    }
 
     Ok(resp)
 }
@@ -289,8 +398,8 @@ pub async fn handle_api_request<Env: RpcEnvironment, S: 'static + BuildHasher +
     req_body: Body,
     uri_param: HashMap<String, String, S>,
 ) -> Result<Response<Body>, Error> {
-
     let delay_unauth_time = std::time::Instant::now() + std::time::Duration::from_millis(3000);
+    let compression = extract_compression_method(&parts.headers);
 
     let result = match info.handler {
         ApiHandler::AsyncHttp(handler) => {
@@ -298,47 +407,69 @@ pub async fn handle_api_request<Env: RpcEnvironment, S: 'static + BuildHasher +
             (handler)(parts, req_body, params, info, Box::new(rpcenv)).await
         }
         ApiHandler::Sync(handler) => {
-            let params = get_request_parameters(info.parameters, parts, req_body, uri_param).await?;
-            (handler)(params, info, &mut rpcenv)
-                .map(|data| (formatter.format_data)(data, &rpcenv))
+            let params =
+                get_request_parameters(info.parameters, parts, req_body, uri_param).await?;
+            (handler)(params, info, &mut rpcenv).map(|data| (formatter.format_data)(data, &rpcenv))
         }
         ApiHandler::Async(handler) => {
-            let params = get_request_parameters(info.parameters, parts, req_body, uri_param).await?;
+            let params =
+                get_request_parameters(info.parameters, parts, req_body, uri_param).await?;
             (handler)(params, info, &mut rpcenv)
                 .await
                 .map(|data| (formatter.format_data)(data, &rpcenv))
         }
     };
 
-    let resp = match result {
+    let mut resp = match result {
         Ok(resp) => resp,
         Err(err) => {
             if let Some(httperr) = err.downcast_ref::<HttpError>() {
                 if httperr.code == StatusCode::UNAUTHORIZED {
-                    tokio::time::delay_until(Instant::from_std(delay_unauth_time)).await;
+                    tokio::time::sleep_until(Instant::from_std(delay_unauth_time)).await;
                 }
             }
             (formatter.format_error)(err)
         }
     };
 
-    if info.reload_timezone { unsafe { tzset(); } }
+    let resp = match compression {
+        Some(CompressionMethod::Deflate) => {
+            resp.headers_mut().insert(
+                header::CONTENT_ENCODING,
+                CompressionMethod::Deflate.content_encoding(),
+            );
+            resp.map(|body| {
+                Body::wrap_stream(DeflateEncoder::with_quality(
+                    body.map_err(|err| {
+                        proxmox::io_format_err!("error during compression: {}", err)
+                    }),
+                    Level::Default,
+                ))
+            })
+        }
+        None => resp,
+    };
+
+    if info.reload_timezone {
+        unsafe {
+            tzset();
+        }
+    }
 
     Ok(resp)
 }
 
 fn get_index(
     userid: Option<Userid>,
-    token: Option<String>,
+    csrf_token: Option<String>,
     language: Option<String>,
     api: &Arc<ApiConfig>,
     parts: Parts,
-) ->  Response<Body> {
-
+) -> Response<Body> {
     let nodename = proxmox::tools::nodename();
-    let userid = userid.as_ref().map(|u| u.as_str()).unwrap_or("");
+    let user = userid.as_ref().map(|u| u.as_str()).unwrap_or("");
 
-    let token = token.unwrap_or_else(|| String::from(""));
+    let csrf_token = csrf_token.unwrap_or_else(|| String::from(""));
 
     let mut debug = false;
     let mut template_file = "index";
@@ -362,28 +493,32 @@ fn get_index(
 
     let data = json!({
         "NodeName": nodename,
-        "UserName": userid,
-        "CSRFPreventionToken": token,
+        "UserName": user,
+        "CSRFPreventionToken": csrf_token,
         "language": lang,
         "debug": debug,
+        "enableTapeUI": api.enable_tape_ui,
     });
 
     let (ct, index) = match api.render_template(template_file, &data) {
         Ok(index) => ("text/html", index),
-        Err(err) => {
-            ("text/plain", format!("Error rendering template: {}", err))
-        }
+        Err(err) => ("text/plain", format!("Error rendering template: {}", err)),
     };
 
-    Response::builder()
+    let mut resp = Response::builder()
         .status(StatusCode::OK)
         .header(header::CONTENT_TYPE, ct)
         .body(index.into())
-        .unwrap()
+        .unwrap();
+
+    if let Some(userid) = userid {
+        resp.extensions_mut().insert(Authid::from((userid, None)));
+    }
+
+    resp
 }
 
 fn extension_to_content_type(filename: &Path) -> (&'static str, bool) {
-
     if let Some(ext) = filename.extension().and_then(|osstr| osstr.to_str()) {
         return match ext {
             "css" => ("text/css", false),
@@ -411,10 +546,11 @@ fn extension_to_content_type(filename: &Path) -> (&'static str, bool) {
     ("application/octet-stream", false)
 }
 
-async fn simple_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
-
-    let (content_type, _nocomp) = extension_to_content_type(&filename);
-
+async fn simple_static_file_download(
+    filename: PathBuf,
+    content_type: &'static str,
+    compression: Option<CompressionMethod>,
+) -> Result<Response<Body>, Error> {
     use tokio::io::AsyncReadExt;
 
     let mut file = File::open(filename)
@@ -422,126 +558,139 @@ async fn simple_static_file_download(filename: PathBuf) -> Result<Response<Body>
         .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
 
     let mut data: Vec<u8> = Vec::new();
-    file.read_to_end(&mut data)
-        .await
-        .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?;
 
-    let mut response = Response::new(data.into());
+    let mut response = match compression {
+        Some(CompressionMethod::Deflate) => {
+            let mut enc = DeflateEncoder::with_quality(data, Level::Default);
+            enc.compress_vec(&mut file, CHUNK_SIZE_LIMIT as usize).await?;
+            let mut response = Response::new(enc.into_inner().into());
+            response.headers_mut().insert(
+                header::CONTENT_ENCODING,
+                CompressionMethod::Deflate.content_encoding(),
+            );
+            response
+        }
+        None => {
+            file.read_to_end(&mut data)
+                .await
+                .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?;
+            Response::new(data.into())
+        }
+    };
+
     response.headers_mut().insert(
         header::CONTENT_TYPE,
-        header::HeaderValue::from_static(content_type));
+        header::HeaderValue::from_static(content_type),
+    );
+
     Ok(response)
 }
 
-async fn chuncked_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
-    let (content_type, _nocomp) = extension_to_content_type(&filename);
+async fn chuncked_static_file_download(
+    filename: PathBuf,
+    content_type: &'static str,
+    compression: Option<CompressionMethod>,
+) -> Result<Response<Body>, Error> {
+    let mut resp = Response::builder()
+        .status(StatusCode::OK)
+        .header(header::CONTENT_TYPE, content_type);
 
     let file = File::open(filename)
         .await
         .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
 
-    let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
-        .map_ok(|bytes| hyper::body::Bytes::from(bytes.freeze()));
-    let body = Body::wrap_stream(payload);
-
-    // fixme: set other headers ?
-    Ok(Response::builder()
-       .status(StatusCode::OK)
-       .header(header::CONTENT_TYPE, content_type)
-       .body(body)
-       .unwrap()
-    )
-}
+    let body = match compression {
+        Some(CompressionMethod::Deflate) => {
+            resp = resp.header(
+                header::CONTENT_ENCODING,
+                CompressionMethod::Deflate.content_encoding(),
+            );
+            Body::wrap_stream(DeflateEncoder::with_quality(
+                AsyncReaderStream::new(file),
+                Level::Default,
+            ))
+        }
+        None => Body::wrap_stream(AsyncReaderStream::new(file)),
+    };
 
-async fn handle_static_file_download(filename: PathBuf) ->  Result<Response<Body>, Error> {
+    Ok(resp.body(body).unwrap())
+}
 
+async fn handle_static_file_download(
+    filename: PathBuf,
+    compression: Option<CompressionMethod>,
+) -> Result<Response<Body>, Error> {
     let metadata = tokio::fs::metadata(filename.clone())
         .map_err(|err| http_err!(BAD_REQUEST, "File access problems: {}", err))
         .await?;
 
-    if metadata.len() < 1024*32 {
-        simple_static_file_download(filename).await
+    let (content_type, nocomp) = extension_to_content_type(&filename);
+    let compression = if nocomp { None } else { compression };
+
+    if metadata.len() < CHUNK_SIZE_LIMIT {
+        simple_static_file_download(filename, content_type, compression).await
     } else {
-        chuncked_static_file_download(filename).await
+        chuncked_static_file_download(filename, content_type, compression).await
     }
 }
 
-fn extract_auth_data(headers: &http::HeaderMap) -> (Option<String>, Option<String>, Option<String>) {
-
-    let mut ticket = None;
-    let mut language = None;
-    if let Some(raw_cookie) = headers.get("COOKIE") {
-        if let Ok(cookie) = raw_cookie.to_str() {
-            ticket = tools::extract_cookie(cookie, "PBSAuthCookie");
-            language = tools::extract_cookie(cookie, "PBSLangCookie");
-        }
+fn extract_lang_header(headers: &http::HeaderMap) -> Option<String> {
+    if let Some(Ok(cookie)) = headers.get("COOKIE").map(|v| v.to_str()) {
+        return tools::extract_cookie(cookie, "PBSLangCookie");
     }
-
-    let token = match headers.get("CSRFPreventionToken").map(|v| v.to_str()) {
-        Some(Ok(v)) => Some(v.to_owned()),
-        _ => None,
-    };
-
-    (ticket, token, language)
+    None
 }
 
-fn check_auth(
-    method: &hyper::Method,
-    ticket: &Option<String>,
-    token: &Option<String>,
-    user_info: &CachedUserInfo,
-) -> Result<Userid, Error> {
-    let ticket_lifetime = tools::ticket::TICKET_LIFETIME;
-
-    let ticket = ticket.as_ref().map(String::as_str);
-    let userid: Userid = Ticket::parse(&ticket.ok_or_else(|| format_err!("missing ticket"))?)?
-        .verify_with_time_frame(public_auth_key(), "PBS", None, -300..ticket_lifetime)?;
-
-    if !user_info.is_active_user(&userid) {
-        bail!("user account disabled or expired.");
-    }
-
-    if method != hyper::Method::GET {
-        if let Some(token) = token {
-            verify_csrf_prevention_token(csrf_secret(), &userid, &token, -300, ticket_lifetime)?;
-        } else {
-            bail!("missing CSRF prevention token");
+// FIXME: support handling multiple compression methods
+fn extract_compression_method(headers: &http::HeaderMap) -> Option<CompressionMethod> {
+    if let Some(Ok(encodings)) = headers.get(header::ACCEPT_ENCODING).map(|v| v.to_str()) {
+        for encoding in encodings.split(&[',', ' '][..]) {
+            if let Ok(method) = encoding.parse() {
+                return Some(method);
+            }
         }
     }
-
-    Ok(userid)
+    None
 }
 
-pub async fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> Result<Response<Body>, Error> {
-
+async fn handle_request(
+    api: Arc<ApiConfig>,
+    req: Request<Body>,
+    peer: &std::net::SocketAddr,
+) -> Result<Response<Body>, Error> {
     let (parts, body) = req.into_parts();
-
     let method = parts.method.clone();
     let (path, components) = tools::normalize_uri_path(parts.uri.path())?;
 
     let comp_len = components.len();
 
-    //println!("REQUEST {} {}", method, path);
-    //println!("COMPO {:?}", components);
+    let query = parts.uri.query().unwrap_or_default();
+    if path.len() + query.len() > MAX_URI_QUERY_LENGTH {
+        return Ok(Response::builder()
+            .status(StatusCode::URI_TOO_LONG)
+            .body("".into())
+            .unwrap());
+    }
 
     let env_type = api.env_type();
     let mut rpcenv = RestEnvironment::new(env_type);
 
+    rpcenv.set_client_ip(Some(*peer));
+
     let user_info = CachedUserInfo::new()?;
+    let auth = &api.api_auth;
 
     let delay_unauth_time = std::time::Instant::now() + std::time::Duration::from_millis(3000);
     let access_forbidden_time = std::time::Instant::now() + std::time::Duration::from_millis(500);
 
     if comp_len >= 1 && components[0] == "api2" {
-
         if comp_len >= 2 {
-
             let format = components[1];
 
             let formatter = match format {
                 "json" => &JSON_FORMATTER,
                 "extjs" => &EXTJS_FORMATTER,
-                _ =>  bail!("Unsupported output format '{}'.", format),
+                _ => bail!("Unsupported output format '{}'.", format),
             };
 
             let mut uri_param = HashMap::new();
@@ -555,13 +704,24 @@ pub async fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> Result<R
             }
 
             if auth_required {
-                let (ticket, token, _) = extract_auth_data(&parts.headers);
-                match check_auth(&method, &ticket, &token, &user_info) {
-                    Ok(userid) => rpcenv.set_user(Some(userid.to_string())),
-                    Err(err) => {
+                match auth.check_auth(&parts.headers, &method, &user_info) {
+                    Ok(authid) => rpcenv.set_auth_id(Some(authid.to_string())),
+                    Err(auth_err) => {
+                        let err = match auth_err {
+                            AuthError::Generic(err) => err,
+                            AuthError::NoData => {
+                                format_err!("no authentication credentials provided.")
+                            }
+                        };
+                        let peer = peer.ip();
+                        auth_logger()?.log(format!(
+                            "authentication failure; rhost={} msg={}",
+                            peer, err
+                        ));
+
                         // always delay unauthorized calls by 3 seconds (from start of request)
                         let err = http_err!(UNAUTHORIZED, "authentication failed - {}", err);
-                        tokio::time::delay_until(Instant::from_std(delay_unauth_time)).await;
+                        tokio::time::sleep_until(Instant::from_std(delay_unauth_time)).await;
                         return Ok((formatter.format_error)(err));
                     }
                 }
@@ -573,28 +733,40 @@ pub async fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> Result<R
                     return Ok((formatter.format_error)(err));
                 }
                 Some(api_method) => {
-                    let user = rpcenv.get_user();
-                    if !check_api_permission(api_method.access.permission, user.as_deref(), &uri_param, user_info.as_ref()) {
+                    let auth_id = rpcenv.get_auth_id();
+                    if !check_api_permission(
+                        api_method.access.permission,
+                        auth_id.as_deref(),
+                        &uri_param,
+                        user_info.as_ref(),
+                    ) {
                         let err = http_err!(FORBIDDEN, "permission check failed");
-                        tokio::time::delay_until(Instant::from_std(access_forbidden_time)).await;
+                        tokio::time::sleep_until(Instant::from_std(access_forbidden_time)).await;
                         return Ok((formatter.format_error)(err));
                     }
 
                     let result = if api_method.protected && env_type == RpcEnvironmentType::PUBLIC {
-                        proxy_protected_request(api_method, parts, body).await
+                        proxy_protected_request(api_method, parts, body, peer).await
                     } else {
-                        handle_api_request(rpcenv, api_method, formatter, parts, body, uri_param).await
+                        handle_api_request(rpcenv, api_method, formatter, parts, body, uri_param)
+                            .await
                     };
 
-                    if let Err(err) = result {
-                        return Ok((formatter.format_error)(err));
+                    let mut response = match result {
+                        Ok(resp) => resp,
+                        Err(err) => (formatter.format_error)(err),
+                    };
+
+                    if let Some(auth_id) = auth_id {
+                        let auth_id: Authid = auth_id.parse()?;
+                        response.extensions_mut().insert(auth_id);
                     }
-                    return result;
+
+                    return Ok(response);
                 }
             }
-
         }
-     } else {
+    } else {
         // not Auth required for accessing files!
 
         if method != hyper::Method::GET {
@@ -602,24 +774,31 @@ pub async fn handle_request(api: Arc<ApiConfig>, req: Request<Body>) -> Result<R
         }
 
         if comp_len == 0 {
-            let (ticket, token, language) = extract_auth_data(&parts.headers);
-            if ticket != None {
-                match check_auth(&method, &ticket, &token, &user_info) {
-                    Ok(userid) => {
-                        let new_token = assemble_csrf_prevention_token(csrf_secret(), &userid);
-                        return Ok(get_index(Some(userid), Some(new_token), language, &api, parts));
-                    }
-                    _ => {
-                        tokio::time::delay_until(Instant::from_std(delay_unauth_time)).await;
-                        return Ok(get_index(None, None, language, &api, parts));
+            let language = extract_lang_header(&parts.headers);
+            match auth.check_auth(&parts.headers, &method, &user_info) {
+                Ok(auth_id) => {
+                    if !auth_id.is_token() {
+                        let userid = auth_id.user();
+                        let new_csrf_token = assemble_csrf_prevention_token(csrf_secret(), userid);
+                        return Ok(get_index(
+                            Some(userid.clone()),
+                            Some(new_csrf_token),
+                            language,
+                            &api,
+                            parts,
+                        ));
                     }
                 }
-            } else {
-                return Ok(get_index(None, None, language, &api, parts));
+                Err(AuthError::Generic(_)) => {
+                    tokio::time::sleep_until(Instant::from_std(delay_unauth_time)).await;
+                }
+                Err(AuthError::NoData) => {}
             }
+            return Ok(get_index(None, None, language, &api, parts));
         } else {
             let filename = api.find_alias(&components);
-            return handle_static_file_download(filename).await;
+            let compression = extract_compression_method(&parts.headers);
+            return handle_static_file_download(filename, compression).await;
         }
     }