[pbs-devel] [PATCH proxmox-backup v2 5/5] server/rest: compress static files
Thomas Lamprecht
t.lamprecht at proxmox.com
Fri Apr 2 14:32:41 CEST 2021
On 01.04.21 16:11, Dominik Csapak wrote:
> compress them on the fly
>
> Signed-off-by: Dominik Csapak <d.csapak at proxmox.com>
> ---
> src/server/rest.rs | 93 ++++++++++++++++++++++++++++++++--------------
> 1 file changed, 66 insertions(+), 27 deletions(-)
>
> diff --git a/src/server/rest.rs b/src/server/rest.rs
> index 357d1b81..6b1bb0cb 100644
> --- a/src/server/rest.rs
> +++ b/src/server/rest.rs
> @@ -40,6 +40,7 @@ use crate::auth_helpers::*;
> use crate::config::cached_user_info::CachedUserInfo;
> use crate::tools;
> use crate::tools::compression::{CompressionMethod, DeflateEncoder, Level};
> +use crate::tools::AsyncReaderStream;
> use crate::tools::FileLogger;
>
> extern "C" {
> @@ -432,16 +433,18 @@ pub async fn handle_api_request<Env: RpcEnvironment, S: 'static + BuildHasher +
>
> let resp = match compression {
> Some(CompressionMethod::Deflate) => {
> - resp.headers_mut()
> - .insert(header::CONTENT_ENCODING, CompressionMethod::Deflate.content_encoding());
> - resp.map(|body|
> + resp.headers_mut().insert(
> + header::CONTENT_ENCODING,
> + CompressionMethod::Deflate.content_encoding(),
> + );
> + resp.map(|body| {
> Body::wrap_stream(DeflateEncoder::with_quality(
> body.map_err(|err| {
> proxmox::io_format_err!("error during compression: {}", err)
> }),
> Level::Fastest,
> - )),
> - )
> + ))
> + })
> }
> Some(_other) => {
> // fixme: implement other compression algorithms
> @@ -546,9 +549,11 @@ fn extension_to_content_type(filename: &Path) -> (&'static str, bool) {
> ("application/octet-stream", false)
> }
>
> -async fn simple_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
> - let (content_type, _nocomp) = extension_to_content_type(&filename);
> -
> +async fn simple_static_file_download(
> + filename: PathBuf,
> + content_type: &'static str,
> + compression: Option<CompressionMethod>,
> +) -> Result<Response<Body>, Error> {
> use tokio::io::AsyncReadExt;
>
> let mut file = File::open(filename)
> @@ -556,46 +561,79 @@ async fn simple_static_file_download(filename: PathBuf) -> Result<Response<Body>
> .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
>
> let mut data: Vec<u8> = Vec::new();
> - file.read_to_end(&mut data)
> - .await
> - .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?;
>
> - let mut response = Response::new(data.into());
> + let mut response = match compression {
> + Some(CompressionMethod::Deflate) => {
> + let mut enc = DeflateEncoder::with_quality(data, Level::Fastest);
> + enc.compress_vec(&mut file, 32 * 1024).await?;
as talked off-list, that value should really be in a `const CHUNK_RESPONSE_LIMIT` or the like.
> + let mut response = Response::new(enc.into_inner().into());
> + response.headers_mut().insert(
> + header::CONTENT_ENCODING,
> + CompressionMethod::Deflate.content_encoding(),
> + );
> + response
> + }
> + Some(_) | None => {
> + file.read_to_end(&mut data)
> + .await
> + .map_err(|err| http_err!(BAD_REQUEST, "File read failed: {}", err))?;
> + Response::new(data.into())
> + }
> + };
> +
> response.headers_mut().insert(
> header::CONTENT_TYPE,
> header::HeaderValue::from_static(content_type),
> );
> +
> Ok(response)
> }
>
> -async fn chuncked_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
> - let (content_type, _nocomp) = extension_to_content_type(&filename);
> +async fn chuncked_static_file_download(
> + filename: PathBuf,
> + content_type: &'static str,
> + compression: Option<CompressionMethod>,
> +) -> Result<Response<Body>, Error> {
> + let mut resp = Response::builder()
> + .status(StatusCode::OK)
> + .header(header::CONTENT_TYPE, content_type);
>
> let file = File::open(filename)
> .await
> .map_err(|err| http_err!(BAD_REQUEST, "File open failed: {}", err))?;
>
> - let payload = tokio_util::codec::FramedRead::new(file, tokio_util::codec::BytesCodec::new())
> - .map_ok(|bytes| bytes.freeze());
> - let body = Body::wrap_stream(payload);
> + let body = match compression {
> + Some(CompressionMethod::Deflate) => {
> + resp = resp.header(
> + header::CONTENT_ENCODING,
> + CompressionMethod::Deflate.content_encoding(),
> + );
> + Body::wrap_stream(DeflateEncoder::with_quality(
> + AsyncReaderStream::new(file),
> + Level::Fastest,
> + ))
> + }
> + Some(_) | None => Body::wrap_stream(AsyncReaderStream::new(file)),
> + };
>
> - // FIXME: set other headers ?
> - Ok(Response::builder()
> - .status(StatusCode::OK)
> - .header(header::CONTENT_TYPE, content_type)
> - .body(body)
> - .unwrap())
> + Ok(resp.body(body).unwrap())
> }
>
> -async fn handle_static_file_download(filename: PathBuf) -> Result<Response<Body>, Error> {
> +async fn handle_static_file_download(
> + filename: PathBuf,
> + compression: Option<CompressionMethod>,
> +) -> Result<Response<Body>, Error> {
> let metadata = tokio::fs::metadata(filename.clone())
> .map_err(|err| http_err!(BAD_REQUEST, "File access problems: {}", err))
> .await?;
>
> + let (content_type, nocomp) = extension_to_content_type(&filename);
> + let compression = if nocomp { None } else { compression };
> +
> if metadata.len() < 1024 * 32 {
the const from above could then be used here too
> - simple_static_file_download(filename).await
> + simple_static_file_download(filename, content_type, compression).await
> } else {
> - chuncked_static_file_download(filename).await
> + chuncked_static_file_download(filename, content_type, compression).await
> }
> }
>
> @@ -773,7 +811,8 @@ async fn handle_request(
> }
> } else {
> let filename = api.find_alias(&components);
> - return handle_static_file_download(filename).await;
> + let compression = extract_compression_method(&parts.headers);
> + return handle_static_file_download(filename, compression).await;
> }
> }
>
>
More information about the pbs-devel
mailing list