[pbs-devel] [RFC proxmox-backup 16/39] api: backup: conditionally upload chunks to S3 object store backend
Christian Ebner
c.ebner at proxmox.com
Mon May 19 13:46:17 CEST 2025
Upload fixed and dynamic sized chunks to either the filesystem or
the S3 object store, depending on the configured backend.
Signed-off-by: Christian Ebner <c.ebner at proxmox.com>
---
src/api2/backup/upload_chunk.rs | 45 ++++++++++++++++++++++-----------
1 file changed, 30 insertions(+), 15 deletions(-)
diff --git a/src/api2/backup/upload_chunk.rs b/src/api2/backup/upload_chunk.rs
index 20259660a..59f9ca558 100644
--- a/src/api2/backup/upload_chunk.rs
+++ b/src/api2/backup/upload_chunk.rs
@@ -15,7 +15,8 @@ use proxmox_sortable_macro::sortable;
use pbs_api_types::{BACKUP_ARCHIVE_NAME_SCHEMA, CHUNK_DIGEST_SCHEMA};
use pbs_datastore::file_formats::{DataBlobHeader, EncryptedDataBlobHeader};
-use pbs_datastore::{DataBlob, DataStore};
+use pbs_datastore::{DataBlob, DataStore, DatastoreBackend};
+use pbs_s3_client::PutObjectResponse;
use pbs_tools::json::{required_integer_param, required_string_param};
use super::environment::*;
@@ -153,16 +154,10 @@ fn upload_fixed_chunk(
) -> ApiResponseFuture {
async move {
let wid = required_integer_param(¶m, "wid")? as usize;
- let size = required_integer_param(¶m, "size")? as u32;
- let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
-
- let digest_str = required_string_param(¶m, "digest")?;
- let digest = <[u8; 32]>::from_hex(digest_str)?;
-
let env: &BackupEnvironment = rpcenv.as_ref();
let (digest, size, compressed_size, is_duplicate) =
- UploadChunk::new(req_body, env.datastore.clone(), digest, size, encoded_size).await?;
+ upload_to_backend(req_body, param, env.datastore.clone(), &env.backend).await?;
env.register_fixed_chunk(wid, digest, size, compressed_size, is_duplicate)?;
let digest_str = hex::encode(digest);
@@ -222,16 +217,10 @@ fn upload_dynamic_chunk(
) -> ApiResponseFuture {
async move {
let wid = required_integer_param(¶m, "wid")? as usize;
- let size = required_integer_param(¶m, "size")? as u32;
- let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
-
- let digest_str = required_string_param(¶m, "digest")?;
- let digest = <[u8; 32]>::from_hex(digest_str)?;
-
let env: &BackupEnvironment = rpcenv.as_ref();
let (digest, size, compressed_size, is_duplicate) =
- UploadChunk::new(req_body, env.datastore.clone(), digest, size, encoded_size).await?;
+ upload_to_backend(req_body, param, env.datastore.clone(), &env.backend).await?;
env.register_dynamic_chunk(wid, digest, size, compressed_size, is_duplicate)?;
let digest_str = hex::encode(digest);
@@ -243,6 +232,32 @@ fn upload_dynamic_chunk(
.boxed()
}
+async fn upload_to_backend(
+ req_body: Body,
+ param: Value,
+ datastore: Arc<DataStore>,
+ backend: &DatastoreBackend,
+) -> Result<([u8; 32], u32, u32, bool), Error> {
+ let size = required_integer_param(¶m, "size")? as u32;
+ let encoded_size = required_integer_param(¶m, "encoded-size")? as u32;
+ let digest_str = required_string_param(¶m, "digest")?;
+ let digest = <[u8; 32]>::from_hex(digest_str)?;
+
+ match backend {
+ DatastoreBackend::Filesystem => {
+ UploadChunk::new(req_body, datastore, digest, size, encoded_size).await
+ }
+ DatastoreBackend::S3(s3_client) => {
+ let is_duplicate = match s3_client.put_object(digest.into(), req_body).await? {
+ PutObjectResponse::PreconditionFailed => true,
+ PutObjectResponse::NeedsRetry => bail!("concurrent operation, reupload required"),
+ PutObjectResponse::Success(_content) => false,
+ };
+ Ok((digest, size, encoded_size, is_duplicate))
+ }
+ }
+}
+
pub const API_METHOD_UPLOAD_SPEEDTEST: ApiMethod = ApiMethod::new(
&ApiHandler::AsyncHttp(&upload_speedtest),
&ObjectSchema::new("Test upload speed.", &[]),
--
2.39.5
More information about the pbs-devel
mailing list