[pbs-devel] [PATCH proxmox-backup v2 5/9] syncjob: use do_sync_job also for scheduled sync jobs
Dominik Csapak
d.csapak at proxmox.com
Tue Aug 11 11:57:20 CEST 2020
Signed-off-by: Dominik Csapak <d.csapak at proxmox.com>
---
changes from v1:
* drop the hunks for manual sync, already done in the previous patches
src/bin/proxmox-backup-proxy.rs | 66 +++------------------------------
1 file changed, 5 insertions(+), 61 deletions(-)
diff --git a/src/bin/proxmox-backup-proxy.rs b/src/bin/proxmox-backup-proxy.rs
index 3f7bf3ec..911f1057 100644
--- a/src/bin/proxmox-backup-proxy.rs
+++ b/src/bin/proxmox-backup-proxy.rs
@@ -18,6 +18,8 @@ use proxmox_backup::server::{ApiConfig, rest::*};
use proxmox_backup::auth_helpers::*;
use proxmox_backup::tools::disks::{ DiskManage, zfs_pool_stats };
+use proxmox_backup::api2::pull::do_sync_job;
+
fn main() {
proxmox_backup::tools::setup_safe_path_env();
@@ -472,10 +474,7 @@ async fn schedule_datastore_prune() {
async fn schedule_datastore_sync_jobs() {
use proxmox_backup::{
- backup::DataStore,
- client::{ HttpClient, HttpClientOptions, BackupRepository, pull::pull_store },
- server::{ WorkerTask },
- config::{ sync::{self, SyncJobConfig}, remote::{self, Remote} },
+ config::{ sync::{self, SyncJobConfig}},
tools::systemd::time::{ parse_calendar_event, compute_next_event },
};
@@ -487,14 +486,6 @@ async fn schedule_datastore_sync_jobs() {
Ok((config, _digest)) => config,
};
- let remote_config = match remote::config() {
- Err(err) => {
- eprintln!("unable to read remote config - {}", err);
- return;
- }
- Ok((config, _digest)) => config,
- };
-
for (job_id, (_, job_config)) in config.sections {
let job_config: SyncJobConfig = match serde_json::from_value(job_config) {
Ok(c) => c,
@@ -550,57 +541,10 @@ async fn schedule_datastore_sync_jobs() {
};
if next > now { continue; }
-
- let job_id2 = job_id.clone();
-
- let tgt_store = match DataStore::lookup_datastore(&job_config.store) {
- Ok(datastore) => datastore,
- Err(err) => {
- eprintln!("lookup_datastore '{}' failed - {}", job_config.store, err);
- continue;
- }
- };
-
- let remote: Remote = match remote_config.lookup("remote", &job_config.remote) {
- Ok(remote) => remote,
- Err(err) => {
- eprintln!("remote_config lookup failed: {}", err);
- continue;
- }
- };
-
let userid = Userid::backup_userid().clone();
- let delete = job_config.remove_vanished.unwrap_or(true);
-
- if let Err(err) = WorkerTask::spawn(
- worker_type,
- Some(job_id.clone()),
- userid.clone(),
- false,
- move |worker| async move {
- worker.log(format!("Starting datastore sync job '{}'", job_id));
- worker.log(format!("task triggered by schedule '{}'", event_str));
- worker.log(format!("Sync datastore '{}' from '{}/{}'",
- job_config.store, job_config.remote, job_config.remote_store));
-
- let options = HttpClientOptions::new()
- .password(Some(remote.password.clone()))
- .fingerprint(remote.fingerprint.clone());
-
- let client = HttpClient::new(&remote.host, &remote.userid, options)?;
- let _auth_info = client.login() // make sure we can auth
- .await
- .map_err(|err| format_err!("remote connection to '{}' failed - {}", remote.host, err))?;
-
- let src_repo = BackupRepository::new(Some(remote.userid), Some(remote.host), job_config.remote_store);
-
- pull_store(&worker, &client, &src_repo, tgt_store, delete, userid).await?;
-
- Ok(())
- }
- ) {
- eprintln!("unable to start datastore sync job {} - {}", job_id2, err);
+ if let Err(err) = do_sync_job(&job_id, job_config, &userid, Some(event_str)) {
+ eprintln!("unable to start datastore sync job {} - {}", &job_id, err);
}
}
}
--
2.20.1
More information about the pbs-devel
mailing list