[pbs-devel] [PATCH proxmox-backup 2/2] api2/admin/datastore/pxar_file_download: download directory as zip
Dominik Csapak
d.csapak at proxmox.com
Tue Oct 13 11:50:42 CEST 2020
by using the new ZipEncoderStream and recursively add files to it
the zip only contains normal files and hardlinks (by simply copying the
content), no empty directories/symlinks/etc
Signed-off-by: Dominik Csapak <d.csapak at proxmox.com>
---
src/api2/admin/datastore.rs | 132 +++++++++++++++++++++++++++++++-----
www/window/FileBrowser.js | 8 +++
2 files changed, 123 insertions(+), 17 deletions(-)
diff --git a/src/api2/admin/datastore.rs b/src/api2/admin/datastore.rs
index c260b62d..3f22ffe7 100644
--- a/src/api2/admin/datastore.rs
+++ b/src/api2/admin/datastore.rs
@@ -2,12 +2,15 @@ use std::collections::{HashSet, HashMap};
use std::ffi::OsStr;
use std::os::unix::ffi::OsStrExt;
use std::sync::{Arc, Mutex};
+use std::pin::Pin;
+use std::path::PathBuf;
use anyhow::{bail, format_err, Error};
use futures::*;
use hyper::http::request::Parts;
use hyper::{header, Body, Response, StatusCode};
use serde_json::{json, Value};
+use tokio::sync::mpsc::Sender;
use proxmox::api::{
api, ApiResponseFuture, ApiHandler, ApiMethod, Router,
@@ -19,7 +22,7 @@ use proxmox::tools::fs::{replace_file, CreateOptions};
use proxmox::try_block;
use proxmox::{http_err, identity, list_subdirs_api_method, sortable};
-use pxar::accessor::aio::Accessor;
+use pxar::accessor::aio::{Accessor, FileContents, FileEntry};
use pxar::EntryKind;
use crate::api2::types::*;
@@ -29,7 +32,15 @@ use crate::config::datastore;
use crate::config::cached_user_info::CachedUserInfo;
use crate::server::WorkerTask;
-use crate::tools::{self, AsyncReaderStream, WrappedReaderStream};
+use crate::tools::{
+ self,
+ AsyncReaderStream,
+ WrappedReaderStream,
+ zip::{
+ self,
+ ZipEncoderStream,
+ }
+};
use crate::config::acl::{
PRIV_DATASTORE_AUDIT,
PRIV_DATASTORE_MODIFY,
@@ -1243,6 +1254,68 @@ fn catalog(
Ok(res.into())
}
+fn recurse_files<T: 'static>(
+ mut sender: Sender<zip::File<FileContents<T>>>,
+ mut decoder: Accessor<T>,
+ prefix: PathBuf,
+ file: FileEntry<T>,
+) -> Pin<
+ Box<
+ dyn Future<Output = Result<(Sender<zip::File<FileContents<T>>>, Accessor<T>), Error>>
+ + Send
+ + 'static,
+ >,
+>
+where
+ T: Clone + pxar::accessor::ReadAt + Unpin + Send + Sync,
+{
+ Box::pin(async move {
+ let metadata = file.entry().metadata();
+ let path = file.entry().path().strip_prefix(&prefix)?.to_path_buf();
+
+ match file.kind() {
+ EntryKind::File { .. } => {
+ let entry = (
+ path,
+ metadata.stat.mtime.secs,
+ metadata.stat.mode as u16,
+ file.contents().await?,
+ );
+ sender
+ .send(entry)
+ .await
+ .map_err(|err| format_err!("could not send file entry: {}", err))?;
+ }
+ EntryKind::Hardlink(_) => {
+ let realfile = decoder.follow_hardlink(&file).await?;
+ let entry = (
+ path,
+ metadata.stat.mtime.secs,
+ metadata.stat.mode as u16,
+ realfile.contents().await?,
+ );
+ sender
+ .send(entry)
+ .await
+ .map_err(|err| format_err!("could not send file entry: {}", err))?;
+ }
+ EntryKind::Directory => {
+ let dir = file.enter_directory().await?;
+ let mut readdir = dir.read_dir();
+ while let Some(entry) = readdir.next().await {
+ let entry = entry?.decode_entry().await?;
+ let (sender_tmp, decoder_tmp) = recurse_files(sender, decoder, prefix.clone(), entry).await?;
+ sender = sender_tmp;
+ decoder = decoder_tmp;
+ }
+ }
+ _ => {} // ignore all else
+ };
+
+ Ok((sender, decoder))
+ })
+}
+
#[sortable]
pub const API_METHOD_PXAR_FILE_DOWNLOAD: ApiMethod = ApiMethod::new(
&ApiHandler::AsyncHttp(&pxar_file_download),
@@ -1325,22 +1398,47 @@ fn pxar_file_download(
.lookup(OsStr::from_bytes(file_path)).await?
.ok_or(format_err!("error opening '{:?}'", file_path))?;
- let file = match file.kind() {
- EntryKind::File { .. } => file,
- EntryKind::Hardlink(_) => {
- decoder.follow_hardlink(&file).await?
- },
- // TODO symlink
- other => bail!("cannot download file of type {:?}", other),
- };
+ let body = match file.kind() {
+ EntryKind::File { .. } =>
+ Body::wrap_stream(
+ AsyncReaderStream::new(file.contents().await?)
+ .map_err(move |err| {
+ eprintln!("error during streaming of file '{:?}' - {}", filepath, err);
+ err
+ })
+ ),
+ EntryKind::Hardlink(_) =>
+ Body::wrap_stream(
+ AsyncReaderStream::new(decoder.follow_hardlink(&file).await?.contents().await?)
+ .map_err(move |err| {
+ eprintln!("error during streaming of hardlink '{:?}' - {}", filepath, err);
+ err
+ })
+ ),
+ EntryKind::Directory => {
+ let (sender, receiver) = tokio::sync::mpsc::channel(100);
+ let mut prefix = PathBuf::new();
+ let mut components = file.entry().path().components();
+ components.next_back(); // discar last
+ for comp in components {
+ prefix.push(comp);
+ }
- let body = Body::wrap_stream(
- AsyncReaderStream::new(file.contents().await?)
- .map_err(move |err| {
- eprintln!("error during streaming of '{:?}' - {}", filepath, err);
- err
- })
- );
+ crate::server::spawn_internal_task(async move {
+ let _ = recurse_files(sender, decoder, prefix, file).await?;
+ Ok::<(), Error>(())
+ });
+
+ Body::wrap_stream(
+ ZipEncoderStream::new(receiver)
+ .map_err(move |err| {
+ eprintln!("error during streaming of zip '{:?}' - {}", filepath, err);
+ err
+ })
+ )
+ }
+ other => bail!("cannot download file of type {:?}", other)
+ };
// fixme: set other headers ?
Ok(Response::builder()
diff --git a/www/window/FileBrowser.js b/www/window/FileBrowser.js
index 2ac50e1a..01b5d79b 100644
--- a/www/window/FileBrowser.js
+++ b/www/window/FileBrowser.js
@@ -87,6 +87,9 @@ Ext.define("PBS.window.FileBrowser", {
};
params.filepath = data.filepath;
atag.download = data.text;
+ if (data.type === 'd') {
+ atag.download += ".zip";
+ }
atag.href = me
.buildUrl(`/api2/json/admin/datastore/${view.datastore}/pxar-file-download`, params);
atag.click();
@@ -106,6 +109,11 @@ Ext.define("PBS.window.FileBrowser", {
case 'f':
canDownload = true;
break;
+ case 'd':
+ if (data.depth > 1) {
+ canDownload = true;
+ }
+ break;
default: break;
}
--
2.20.1
More information about the pbs-devel
mailing list