[pbs-devel] [PATCH proxmox-backup v2 06/12] local store cache: refactor fetch and insert of chunks for s3 backend
Christian Ebner
c.ebner at proxmox.com
Wed Oct 8 17:21:19 CEST 2025
Move the common logic for both, the fetching when no local chunk
marker file is found and when the marker file has no content into a
common helper. This is in preparation for further restructuring in
the following patches/commits.
Signed-off-by: Christian Ebner <c.ebner at proxmox.com>
---
.../src/local_datastore_lru_cache.rs | 47 +++++++++----------
1 file changed, 23 insertions(+), 24 deletions(-)
diff --git a/pbs-datastore/src/local_datastore_lru_cache.rs b/pbs-datastore/src/local_datastore_lru_cache.rs
index 7d0b3e114..ea92bc9b3 100644
--- a/pbs-datastore/src/local_datastore_lru_cache.rs
+++ b/pbs-datastore/src/local_datastore_lru_cache.rs
@@ -115,18 +115,8 @@ impl LocalDatastoreLruCache {
// Expected chunk to be present since LRU cache has it, but it is missing
// locally, try to fetch again
if err.kind() == std::io::ErrorKind::NotFound {
- let object_key = crate::s3::object_key_from_digest(digest)?;
- match cacher.client.get_object(object_key).await? {
- None => {
- bail!("could not fetch object with key {}", hex::encode(digest))
- }
- Some(response) => {
- let bytes = response.content.collect().await?.to_bytes();
- let chunk = DataBlob::from_raw(bytes.to_vec())?;
- self.store.insert_chunk(&chunk, digest)?;
- return Ok(Some(chunk));
- }
- }
+ let chunk = self.fetch_and_insert(cacher.client.clone(), digest).await?;
+ return Ok(Some(chunk));
} else {
return Err(Error::from(err));
}
@@ -138,18 +128,8 @@ impl LocalDatastoreLruCache {
use std::io::Seek;
// Check if file is empty marker file, try fetching content if so
if file.seek(std::io::SeekFrom::End(0))? == 0 {
- let object_key = crate::s3::object_key_from_digest(digest)?;
- match cacher.client.get_object(object_key).await? {
- None => {
- bail!("could not fetch object with key {}", hex::encode(digest))
- }
- Some(response) => {
- let bytes = response.content.collect().await?.to_bytes();
- let chunk = DataBlob::from_raw(bytes.to_vec())?;
- self.store.insert_chunk(&chunk, digest)?;
- return Ok(Some(chunk));
- }
- }
+ let chunk = self.fetch_and_insert(cacher.client.clone(), digest).await?;
+ return Ok(Some(chunk));
} else {
return Err(err);
}
@@ -165,4 +145,23 @@ impl LocalDatastoreLruCache {
pub fn contains(&self, digest: &[u8; 32]) -> bool {
self.cache.contains(*digest)
}
+
+ async fn fetch_and_insert(
+ &self,
+ client: Arc<S3Client>,
+ digest: &[u8; 32],
+ ) -> Result<DataBlob, Error> {
+ let object_key = crate::s3::object_key_from_digest(digest)?;
+ match client.get_object(object_key).await? {
+ None => {
+ bail!("could not fetch object with key {}", hex::encode(digest))
+ }
+ Some(response) => {
+ let bytes = response.content.collect().await?.to_bytes();
+ let chunk = DataBlob::from_raw(bytes.to_vec())?;
+ self.store.insert_chunk(&chunk, digest)?;
+ Ok(chunk)
+ }
+ }
+ }
}
--
2.47.3
More information about the pbs-devel
mailing list