mirror of
https://git.proxmox.com/git/proxmox-backup
synced 2025-10-04 22:02:47 +00:00
file-restore: add 'format' and 'zstd' parameters to 'extract' command
if the target ist stdout, we can now specify the exact format by making use of the new 'format' parameter of the restore daemons 'extract' api note that extracting a pxar from a source pxar (container/host backups) won't work currently since we would have to reencode as pxar first Signed-off-by: Dominik Csapak <d.csapak@proxmox.com> [ T: fixed missing proxmox-compression dependency ] Signed-off-by: Thomas Lamprecht <t.lamprecht@proxmox.com>
This commit is contained in:
parent
702ff41471
commit
cc900ae2f7
@ -14,6 +14,7 @@ log = "0.4"
|
|||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
tokio = { version = "1.6", features = [ "io-std", "rt", "rt-multi-thread", "time" ] }
|
tokio = { version = "1.6", features = [ "io-std", "rt", "rt-multi-thread", "time" ] }
|
||||||
|
tokio-util = { version = "0.7", features = ["io"] }
|
||||||
|
|
||||||
pxar = { version = "0.10.1", features = [ "tokio-io" ] }
|
pxar = { version = "0.10.1", features = [ "tokio-io" ] }
|
||||||
|
|
||||||
@ -25,6 +26,7 @@ proxmox-schema = { version = "1.3.1", features = [ "api-macro" ] }
|
|||||||
proxmox-time = "1"
|
proxmox-time = "1"
|
||||||
proxmox-uuid = "1"
|
proxmox-uuid = "1"
|
||||||
proxmox-sys = { version = "0.4", features = [ "logrotate" ] }
|
proxmox-sys = { version = "0.4", features = [ "logrotate" ] }
|
||||||
|
proxmox-compression = "0.1"
|
||||||
|
|
||||||
pbs-api-types = { path = "../pbs-api-types" }
|
pbs-api-types = { path = "../pbs-api-types" }
|
||||||
pbs-buildcfg = { path = "../pbs-buildcfg" }
|
pbs-buildcfg = { path = "../pbs-buildcfg" }
|
||||||
|
@ -11,7 +11,7 @@ use serde_json::{json, Value};
|
|||||||
use proxmox_router::cli::*;
|
use proxmox_router::cli::*;
|
||||||
use proxmox_schema::api;
|
use proxmox_schema::api;
|
||||||
|
|
||||||
use pbs_api_types::{BackupDir, BackupNamespace};
|
use pbs_api_types::{file_restore::FileRestoreFormat, BackupDir, BackupNamespace};
|
||||||
use pbs_client::BackupRepository;
|
use pbs_client::BackupRepository;
|
||||||
use pbs_datastore::catalog::ArchiveEntry;
|
use pbs_datastore::catalog::ArchiveEntry;
|
||||||
use pbs_datastore::manifest::BackupManifest;
|
use pbs_datastore::manifest::BackupManifest;
|
||||||
@ -55,7 +55,8 @@ pub trait BlockRestoreDriver {
|
|||||||
details: SnapRestoreDetails,
|
details: SnapRestoreDetails,
|
||||||
img_file: String,
|
img_file: String,
|
||||||
path: Vec<u8>,
|
path: Vec<u8>,
|
||||||
pxar: bool,
|
format: Option<FileRestoreFormat>,
|
||||||
|
zstd: bool,
|
||||||
) -> Async<Result<Box<dyn tokio::io::AsyncRead + Unpin + Send>, Error>>;
|
) -> Async<Result<Box<dyn tokio::io::AsyncRead + Unpin + Send>, Error>>;
|
||||||
|
|
||||||
/// Return status of all running/mapped images, result value is (id, extra data), where id must
|
/// Return status of all running/mapped images, result value is (id, extra data), where id must
|
||||||
@ -101,10 +102,13 @@ pub async fn data_extract(
|
|||||||
details: SnapRestoreDetails,
|
details: SnapRestoreDetails,
|
||||||
img_file: String,
|
img_file: String,
|
||||||
path: Vec<u8>,
|
path: Vec<u8>,
|
||||||
pxar: bool,
|
format: Option<FileRestoreFormat>,
|
||||||
|
zstd: bool,
|
||||||
) -> Result<Box<dyn tokio::io::AsyncRead + Send + Unpin>, Error> {
|
) -> Result<Box<dyn tokio::io::AsyncRead + Send + Unpin>, Error> {
|
||||||
let driver = driver.unwrap_or(DEFAULT_DRIVER).resolve();
|
let driver = driver.unwrap_or(DEFAULT_DRIVER).resolve();
|
||||||
driver.data_extract(details, img_file, path, pxar).await
|
driver
|
||||||
|
.data_extract(details, img_file, path, format, zstd)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[api(
|
#[api(
|
||||||
|
@ -10,7 +10,7 @@ use serde_json::json;
|
|||||||
|
|
||||||
use proxmox_sys::fs::lock_file;
|
use proxmox_sys::fs::lock_file;
|
||||||
|
|
||||||
use pbs_api_types::{BackupDir, BackupNamespace};
|
use pbs_api_types::{file_restore::FileRestoreFormat, BackupDir, BackupNamespace};
|
||||||
use pbs_client::{BackupRepository, VsockClient, DEFAULT_VSOCK_PORT};
|
use pbs_client::{BackupRepository, VsockClient, DEFAULT_VSOCK_PORT};
|
||||||
use pbs_datastore::catalog::ArchiveEntry;
|
use pbs_datastore::catalog::ArchiveEntry;
|
||||||
|
|
||||||
@ -217,7 +217,8 @@ impl BlockRestoreDriver for QemuBlockDriver {
|
|||||||
details: SnapRestoreDetails,
|
details: SnapRestoreDetails,
|
||||||
img_file: String,
|
img_file: String,
|
||||||
mut path: Vec<u8>,
|
mut path: Vec<u8>,
|
||||||
pxar: bool,
|
format: Option<FileRestoreFormat>,
|
||||||
|
zstd: bool,
|
||||||
) -> Async<Result<Box<dyn tokio::io::AsyncRead + Unpin + Send>, Error>> {
|
) -> Async<Result<Box<dyn tokio::io::AsyncRead + Unpin + Send>, Error>> {
|
||||||
async move {
|
async move {
|
||||||
let client = ensure_running(&details).await?;
|
let client = ensure_running(&details).await?;
|
||||||
@ -226,13 +227,13 @@ impl BlockRestoreDriver for QemuBlockDriver {
|
|||||||
}
|
}
|
||||||
let path = base64::encode(img_file.bytes().chain(path).collect::<Vec<u8>>());
|
let path = base64::encode(img_file.bytes().chain(path).collect::<Vec<u8>>());
|
||||||
let (mut tx, rx) = tokio::io::duplex(1024 * 4096);
|
let (mut tx, rx) = tokio::io::duplex(1024 * 4096);
|
||||||
|
let mut data = json!({ "path": path, "zstd": zstd });
|
||||||
|
if let Some(format) = format {
|
||||||
|
data["format"] = serde_json::to_value(format)?;
|
||||||
|
}
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
if let Err(err) = client
|
if let Err(err) = client
|
||||||
.download(
|
.download("api2/json/extract", Some(data), &mut tx)
|
||||||
"api2/json/extract",
|
|
||||||
Some(json!({ "path": path, "pxar": pxar })),
|
|
||||||
&mut tx,
|
|
||||||
)
|
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
log::error!("reading file extraction stream failed - {}", err);
|
log::error!("reading file extraction stream failed - {}", err);
|
||||||
|
@ -4,8 +4,11 @@ use std::path::PathBuf;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::{bail, format_err, Error};
|
use anyhow::{bail, format_err, Error};
|
||||||
|
use futures::StreamExt;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
use tokio::io::AsyncWriteExt;
|
||||||
|
|
||||||
|
use proxmox_compression::zstd::ZstdEncoder;
|
||||||
use proxmox_router::cli::{
|
use proxmox_router::cli::{
|
||||||
complete_file_name, default_table_format_options, format_and_print_result_full,
|
complete_file_name, default_table_format_options, format_and_print_result_full,
|
||||||
get_output_format, init_cli_logger, run_cli_command, CliCommand, CliCommandMap, CliEnvironment,
|
get_output_format, init_cli_logger, run_cli_command, CliCommand, CliCommandMap, CliEnvironment,
|
||||||
@ -17,8 +20,8 @@ use proxmox_sys::fs::{create_path, CreateOptions};
|
|||||||
use pxar::accessor::aio::Accessor;
|
use pxar::accessor::aio::Accessor;
|
||||||
use pxar::decoder::aio::Decoder;
|
use pxar::decoder::aio::Decoder;
|
||||||
|
|
||||||
use pbs_api_types::{BackupDir, BackupNamespace, CryptMode};
|
use pbs_api_types::{file_restore::FileRestoreFormat, BackupDir, BackupNamespace, CryptMode};
|
||||||
use pbs_client::pxar::{create_zip, extract_sub_dir, extract_sub_dir_seq};
|
use pbs_client::pxar::{create_tar, create_zip, extract_sub_dir, extract_sub_dir_seq};
|
||||||
use pbs_client::tools::{
|
use pbs_client::tools::{
|
||||||
complete_group_or_snapshot, complete_repository, connect, extract_repository_from_value,
|
complete_group_or_snapshot, complete_repository, connect, extract_repository_from_value,
|
||||||
key_source::{
|
key_source::{
|
||||||
@ -346,9 +349,19 @@ async fn list(
|
|||||||
description: "Group/Snapshot path.",
|
description: "Group/Snapshot path.",
|
||||||
},
|
},
|
||||||
"path": {
|
"path": {
|
||||||
description: "Path to restore. Directories will be restored as .zip files if extracted to stdout.",
|
description: "Path to restore. Directories will be restored as archive files if extracted to stdout.",
|
||||||
type: String,
|
type: String,
|
||||||
},
|
},
|
||||||
|
"format": {
|
||||||
|
type: FileRestoreFormat,
|
||||||
|
optional: true,
|
||||||
|
},
|
||||||
|
"zstd": {
|
||||||
|
type: bool,
|
||||||
|
description: "If true, output will be zstd compressed.",
|
||||||
|
optional: true,
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
"base64": {
|
"base64": {
|
||||||
type: Boolean,
|
type: Boolean,
|
||||||
description: "If set, 'path' will be interpreted as base64 encoded.",
|
description: "If set, 'path' will be interpreted as base64 encoded.",
|
||||||
@ -392,6 +405,8 @@ async fn extract(
|
|||||||
path: String,
|
path: String,
|
||||||
base64: bool,
|
base64: bool,
|
||||||
target: Option<String>,
|
target: Option<String>,
|
||||||
|
format: Option<FileRestoreFormat>,
|
||||||
|
zstd: bool,
|
||||||
param: Value,
|
param: Value,
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let repo = extract_repository_from_value(¶m)?;
|
let repo = extract_repository_from_value(¶m)?;
|
||||||
@ -450,7 +465,7 @@ async fn extract(
|
|||||||
let archive_size = reader.archive_size();
|
let archive_size = reader.archive_size();
|
||||||
let reader = LocalDynamicReadAt::new(reader);
|
let reader = LocalDynamicReadAt::new(reader);
|
||||||
let decoder = Accessor::new(reader, archive_size).await?;
|
let decoder = Accessor::new(reader, archive_size).await?;
|
||||||
extract_to_target(decoder, &path, target).await?;
|
extract_to_target(decoder, &path, target, format, zstd).await?;
|
||||||
}
|
}
|
||||||
ExtractPath::VM(file, path) => {
|
ExtractPath::VM(file, path) => {
|
||||||
let details = SnapRestoreDetails {
|
let details = SnapRestoreDetails {
|
||||||
@ -466,7 +481,15 @@ async fn extract(
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some(mut target) = target {
|
if let Some(mut target) = target {
|
||||||
let reader = data_extract(driver, details, file, path.clone(), true).await?;
|
let reader = data_extract(
|
||||||
|
driver,
|
||||||
|
details,
|
||||||
|
file,
|
||||||
|
path.clone(),
|
||||||
|
Some(FileRestoreFormat::Pxar),
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
let decoder = Decoder::from_tokio(reader).await?;
|
let decoder = Decoder::from_tokio(reader).await?;
|
||||||
extract_sub_dir_seq(&target, decoder).await?;
|
extract_sub_dir_seq(&target, decoder).await?;
|
||||||
|
|
||||||
@ -477,7 +500,8 @@ async fn extract(
|
|||||||
format_err!("unable to remove temporary .pxarexclude-cli file - {}", e)
|
format_err!("unable to remove temporary .pxarexclude-cli file - {}", e)
|
||||||
})?;
|
})?;
|
||||||
} else {
|
} else {
|
||||||
let mut reader = data_extract(driver, details, file, path.clone(), false).await?;
|
let mut reader =
|
||||||
|
data_extract(driver, details, file, path.clone(), format, zstd).await?;
|
||||||
tokio::io::copy(&mut reader, &mut tokio::io::stdout()).await?;
|
tokio::io::copy(&mut reader, &mut tokio::io::stdout()).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -493,31 +517,77 @@ async fn extract_to_target<T>(
|
|||||||
decoder: Accessor<T>,
|
decoder: Accessor<T>,
|
||||||
path: &[u8],
|
path: &[u8],
|
||||||
target: Option<PathBuf>,
|
target: Option<PathBuf>,
|
||||||
|
format: Option<FileRestoreFormat>,
|
||||||
|
zstd: bool,
|
||||||
) -> Result<(), Error>
|
) -> Result<(), Error>
|
||||||
where
|
where
|
||||||
T: pxar::accessor::ReadAt + Clone + Send + Sync + Unpin + 'static,
|
T: pxar::accessor::ReadAt + Clone + Send + Sync + Unpin + 'static,
|
||||||
{
|
{
|
||||||
let path = if path.is_empty() { b"/" } else { path };
|
let path = if path.is_empty() { b"/" } else { path };
|
||||||
|
let path = OsStr::from_bytes(path);
|
||||||
let root = decoder.open_root().await?;
|
|
||||||
let file = root
|
|
||||||
.lookup(OsStr::from_bytes(path))
|
|
||||||
.await?
|
|
||||||
.ok_or_else(|| format_err!("error opening '{:?}'", path))?;
|
|
||||||
|
|
||||||
if let Some(target) = target {
|
if let Some(target) = target {
|
||||||
extract_sub_dir(target, decoder, OsStr::from_bytes(path)).await?;
|
extract_sub_dir(target, decoder, path).await?;
|
||||||
} else {
|
} else {
|
||||||
match file.kind() {
|
extract_archive(decoder, path, format, zstd).await?;
|
||||||
pxar::EntryKind::File { .. } => {
|
|
||||||
tokio::io::copy(&mut file.contents().await?, &mut tokio::io::stdout()).await?;
|
|
||||||
}
|
}
|
||||||
_ => {
|
|
||||||
create_zip(tokio::io::stdout(), decoder, OsStr::from_bytes(path)).await?;
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn extract_archive<T>(
|
||||||
|
decoder: Accessor<T>,
|
||||||
|
path: &OsStr,
|
||||||
|
format: Option<FileRestoreFormat>,
|
||||||
|
zstd: bool,
|
||||||
|
) -> Result<(), Error>
|
||||||
|
where
|
||||||
|
T: pxar::accessor::ReadAt + Clone + Send + Sync + Unpin + 'static,
|
||||||
|
{
|
||||||
|
let path = path.to_owned();
|
||||||
|
let root = decoder.open_root().await?;
|
||||||
|
let file = root
|
||||||
|
.lookup(&path)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| format_err!("error opening '{:?}'", &path))?;
|
||||||
|
|
||||||
|
let (mut writer, mut reader) = tokio::io::duplex(1024 * 1024);
|
||||||
|
if file.is_regular_file() {
|
||||||
|
match format {
|
||||||
|
Some(FileRestoreFormat::Plain) | None => {}
|
||||||
|
_ => bail!("cannot extract single files as archive"),
|
||||||
|
}
|
||||||
|
tokio::spawn(
|
||||||
|
async move { tokio::io::copy(&mut file.contents().await?, &mut writer).await },
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
match format {
|
||||||
|
Some(FileRestoreFormat::Pxar) => {
|
||||||
|
bail!("pxar target not supported for pxar source");
|
||||||
|
}
|
||||||
|
Some(FileRestoreFormat::Plain) => {
|
||||||
|
bail!("plain file not supported for non-regular files");
|
||||||
|
}
|
||||||
|
Some(FileRestoreFormat::Zip) | None => {
|
||||||
|
tokio::spawn(create_zip(writer, decoder, path));
|
||||||
|
}
|
||||||
|
Some(FileRestoreFormat::Tar) => {
|
||||||
|
tokio::spawn(create_tar(writer, decoder, path));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if zstd {
|
||||||
|
let mut zstdstream = ZstdEncoder::new(tokio_util::io::ReaderStream::new(reader))?;
|
||||||
|
let mut stdout = tokio::io::stdout();
|
||||||
|
while let Some(buf) = zstdstream.next().await {
|
||||||
|
let buf = buf?;
|
||||||
|
stdout.write_all(&buf).await?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tokio::io::copy(&mut reader, &mut tokio::io::stdout()).await?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user