mirror of
https://git.proxmox.com/git/proxmox-backup
synced 2025-07-11 20:11:47 +00:00
client: backup: conditionally write catalog for file level backups
Only write the catalog when using the regular backup mode, do not write it when using the split archive mode. Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
This commit is contained in:
parent
4dd816b343
commit
c0302805c4
@ -15,7 +15,7 @@ use nix::sys::stat::Mode;
|
|||||||
use proxmox_async::blocking::TokioWriterAdapter;
|
use proxmox_async::blocking::TokioWriterAdapter;
|
||||||
use proxmox_io::StdChannelWriter;
|
use proxmox_io::StdChannelWriter;
|
||||||
|
|
||||||
use pbs_datastore::catalog::CatalogWriter;
|
use pbs_datastore::catalog::{BackupCatalogWriter, CatalogWriter};
|
||||||
|
|
||||||
use crate::inject_reused_chunks::InjectChunks;
|
use crate::inject_reused_chunks::InjectChunks;
|
||||||
use crate::pxar::create::PxarWriters;
|
use crate::pxar::create::PxarWriters;
|
||||||
@ -42,7 +42,7 @@ impl Drop for PxarBackupStream {
|
|||||||
impl PxarBackupStream {
|
impl PxarBackupStream {
|
||||||
pub fn new<W: Write + Send + 'static>(
|
pub fn new<W: Write + Send + 'static>(
|
||||||
dir: Dir,
|
dir: Dir,
|
||||||
catalog: Arc<Mutex<CatalogWriter<W>>>,
|
catalog: Option<Arc<Mutex<CatalogWriter<W>>>>,
|
||||||
options: crate::pxar::PxarCreateOptions,
|
options: crate::pxar::PxarCreateOptions,
|
||||||
boundaries: Option<mpsc::Sender<InjectChunks>>,
|
boundaries: Option<mpsc::Sender<InjectChunks>>,
|
||||||
separate_payload_stream: bool,
|
separate_payload_stream: bool,
|
||||||
@ -82,7 +82,10 @@ impl PxarBackupStream {
|
|||||||
let handler = async move {
|
let handler = async move {
|
||||||
if let Err(err) = crate::pxar::create_archive(
|
if let Err(err) = crate::pxar::create_archive(
|
||||||
dir,
|
dir,
|
||||||
PxarWriters::new(writer, Some(catalog)),
|
PxarWriters::new(
|
||||||
|
writer,
|
||||||
|
catalog.map(|c| c as Arc<Mutex<dyn BackupCatalogWriter + Send>>),
|
||||||
|
),
|
||||||
crate::pxar::Flags::DEFAULT,
|
crate::pxar::Flags::DEFAULT,
|
||||||
move |path| {
|
move |path| {
|
||||||
log::debug!("{:?}", path);
|
log::debug!("{:?}", path);
|
||||||
@ -122,7 +125,7 @@ impl PxarBackupStream {
|
|||||||
|
|
||||||
pub fn open<W: Write + Send + 'static>(
|
pub fn open<W: Write + Send + 'static>(
|
||||||
dirname: &Path,
|
dirname: &Path,
|
||||||
catalog: Arc<Mutex<CatalogWriter<W>>>,
|
catalog: Option<Arc<Mutex<CatalogWriter<W>>>>,
|
||||||
options: crate::pxar::PxarCreateOptions,
|
options: crate::pxar::PxarCreateOptions,
|
||||||
boundaries: Option<mpsc::Sender<InjectChunks>>,
|
boundaries: Option<mpsc::Sender<InjectChunks>>,
|
||||||
separate_payload_stream: bool,
|
separate_payload_stream: bool,
|
||||||
|
@ -192,7 +192,7 @@ async fn backup_directory<P: AsRef<Path>>(
|
|||||||
archive_name: &str,
|
archive_name: &str,
|
||||||
payload_target: Option<&str>,
|
payload_target: Option<&str>,
|
||||||
chunk_size: Option<usize>,
|
chunk_size: Option<usize>,
|
||||||
catalog: Arc<Mutex<CatalogWriter<TokioWriterAdapter<StdChannelWriter<Error>>>>>,
|
catalog: Option<Arc<Mutex<CatalogWriter<TokioWriterAdapter<StdChannelWriter<Error>>>>>>,
|
||||||
pxar_create_options: pbs_client::pxar::PxarCreateOptions,
|
pxar_create_options: pbs_client::pxar::PxarCreateOptions,
|
||||||
upload_options: UploadOptions,
|
upload_options: UploadOptions,
|
||||||
) -> Result<(BackupStats, Option<BackupStats>), Error> {
|
) -> Result<(BackupStats, Option<BackupStats>), Error> {
|
||||||
@ -1059,19 +1059,20 @@ async fn create_backup(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// start catalog upload on first use
|
// start catalog upload on first use
|
||||||
if catalog.is_none() {
|
if catalog.is_none() && !detection_mode.is_data() && !detection_mode.is_metadata() {
|
||||||
let catalog_upload_res =
|
let catalog_upload_res =
|
||||||
spawn_catalog_upload(client.clone(), crypto.mode == CryptMode::Encrypt)?;
|
spawn_catalog_upload(client.clone(), crypto.mode == CryptMode::Encrypt)?;
|
||||||
catalog = Some(catalog_upload_res.catalog_writer);
|
catalog = Some(catalog_upload_res.catalog_writer);
|
||||||
catalog_result_rx = Some(catalog_upload_res.result);
|
catalog_result_rx = Some(catalog_upload_res.result);
|
||||||
}
|
}
|
||||||
let catalog = catalog.as_ref().unwrap();
|
|
||||||
|
|
||||||
log_file("directory", &filename, &target);
|
log_file("directory", &filename, &target);
|
||||||
catalog
|
if let Some(catalog) = catalog.as_ref() {
|
||||||
.lock()
|
catalog
|
||||||
.unwrap()
|
.lock()
|
||||||
.start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?;
|
.unwrap()
|
||||||
|
.start_directory(std::ffi::CString::new(target.as_str())?.as_c_str())?;
|
||||||
|
}
|
||||||
|
|
||||||
let mut previous_ref = None;
|
let mut previous_ref = None;
|
||||||
let max_cache_size = if detection_mode.is_metadata() {
|
let max_cache_size = if detection_mode.is_metadata() {
|
||||||
@ -1139,7 +1140,7 @@ async fn create_backup(
|
|||||||
&target,
|
&target,
|
||||||
payload_target.as_deref(),
|
payload_target.as_deref(),
|
||||||
chunk_size_opt,
|
chunk_size_opt,
|
||||||
catalog.clone(),
|
catalog.as_ref().cloned(),
|
||||||
pxar_options,
|
pxar_options,
|
||||||
upload_options,
|
upload_options,
|
||||||
)
|
)
|
||||||
@ -1155,7 +1156,9 @@ async fn create_backup(
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
manifest.add_file(target, stats.size, stats.csum, crypto.mode)?;
|
manifest.add_file(target, stats.size, stats.csum, crypto.mode)?;
|
||||||
catalog.lock().unwrap().end_directory()?;
|
if let Some(catalog) = catalog.as_ref() {
|
||||||
|
catalog.lock().unwrap().end_directory()?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
(BackupSpecificationType::IMAGE, false) => {
|
(BackupSpecificationType::IMAGE, false) => {
|
||||||
log_file("image", &filename, &target);
|
log_file("image", &filename, &target);
|
||||||
|
Loading…
Reference in New Issue
Block a user