server: notifications: send tape notifications via notification system

If the `notification-mode` parameter is set to `legacy-sendmail`, then
we still use the new infrastructure, but don't consider the
notification config and use a hard-coded sendmail endpoint directly.

Signed-off-by: Lukas Wagner <l.wagner@proxmox.com>
Tested-by: Gabriel Goller <g.goller@proxmox.com>
Reviewed-by: Gabriel Goller <g.goller@proxmox.com>
Tested-by: Maximiliano Sandoval <m.sandoval@proxmox.com>
Signed-off-by: Thomas Lamprecht <t.lamprecht@proxmox.com>
This commit is contained in:
Lukas Wagner 2024-04-23 13:52:12 +02:00 committed by Thomas Lamprecht
parent 1d2069d158
commit a4f1b175d1
14 changed files with 245 additions and 229 deletions

View File

@ -57,6 +57,12 @@ usr/share/proxmox-backup/templates/default/sync-err-body.txt.hbs
usr/share/proxmox-backup/templates/default/sync-ok-body.txt.hbs
usr/share/proxmox-backup/templates/default/sync-err-subject.txt.hbs
usr/share/proxmox-backup/templates/default/sync-ok-subject.txt.hbs
usr/share/proxmox-backup/templates/default/tape-backup-err-body.txt.hbs
usr/share/proxmox-backup/templates/default/tape-backup-err-subject.txt.hbs
usr/share/proxmox-backup/templates/default/tape-backup-ok-body.txt.hbs
usr/share/proxmox-backup/templates/default/tape-backup-ok-subject.txt.hbs
usr/share/proxmox-backup/templates/default/tape-load-body.txt.hbs
usr/share/proxmox-backup/templates/default/tape-load-subject.txt.hbs
usr/share/proxmox-backup/templates/default/test-body.txt.hbs
usr/share/proxmox-backup/templates/default/test-body.html.hbs
usr/share/proxmox-backup/templates/default/test-subject.txt.hbs

View File

@ -10,7 +10,7 @@ use proxmox_sys::{task_log, task_warn, WorkerTaskContext};
use pbs_api_types::{
print_ns_and_snapshot, print_store_and_ns, Authid, MediaPoolConfig, Operation,
TapeBackupJobConfig, TapeBackupJobSetup, TapeBackupJobStatus, Userid, JOB_ID_SCHEMA,
TapeBackupJobConfig, TapeBackupJobSetup, TapeBackupJobStatus, JOB_ID_SCHEMA,
PRIV_DATASTORE_READ, PRIV_TAPE_AUDIT, PRIV_TAPE_WRITE, UPID_SCHEMA,
};
@ -19,10 +19,11 @@ use pbs_datastore::backup_info::{BackupDir, BackupInfo};
use pbs_datastore::{DataStore, StoreProgress};
use proxmox_rest_server::WorkerTask;
use crate::tape::TapeNotificationMode;
use crate::{
server::{
jobstate::{compute_schedule_status, Job, JobState},
lookup_user_email, TapeBackupJobSummary,
TapeBackupJobSummary,
},
tape::{
changer::update_changer_online_status,
@ -162,12 +163,6 @@ pub fn do_tape_backup_job(
Some(lock_tape_device(&drive_config, &setup.drive)?)
};
let notify_user = setup
.notify_user
.as_ref()
.unwrap_or_else(|| Userid::root_userid());
let email = lookup_user_email(notify_user);
let upid_str = WorkerTask::new_thread(
&worker_type,
Some(job_id.clone()),
@ -206,7 +201,6 @@ pub fn do_tape_backup_job(
datastore,
&pool_config,
&setup,
email.clone(),
&mut summary,
false,
)
@ -214,16 +208,13 @@ pub fn do_tape_backup_job(
let status = worker.create_state(&job_result);
if let Some(email) = email {
if let Err(err) = crate::server::send_tape_backup_status(
&email,
Some(job.jobname()),
&setup,
&job_result,
summary,
) {
eprintln!("send tape backup notification failed: {}", err);
}
eprintln!("send tape backup notification failed: {err}");
}
if let Err(err) = job.finish(status) {
@ -328,12 +319,6 @@ pub fn backup(
let job_id = format!("{}:{}:{}", setup.store, setup.pool, setup.drive);
let notify_user = setup
.notify_user
.as_ref()
.unwrap_or_else(|| Userid::root_userid());
let email = lookup_user_email(notify_user);
let upid_str = WorkerTask::new_thread(
"tape-backup",
Some(job_id),
@ -349,21 +334,14 @@ pub fn backup(
datastore,
&pool_config,
&setup,
email.clone(),
&mut summary,
force_media_set,
);
if let Some(email) = email {
if let Err(err) = crate::server::send_tape_backup_status(
&email,
None,
&setup,
&job_result,
summary,
) {
eprintln!("send tape backup notification failed: {}", err);
}
if let Err(err) =
crate::server::send_tape_backup_status(None, &setup, &job_result, summary)
{
eprintln!("send tape backup notification failed: {err}");
}
// ignore errors
@ -386,7 +364,6 @@ fn backup_worker(
datastore: Arc<DataStore>,
pool_config: &MediaPoolConfig,
setup: &TapeBackupJobSetup,
email: Option<String>,
summary: &mut TapeBackupJobSummary,
force_media_set: bool,
) -> Result<(), Error> {
@ -399,9 +376,16 @@ fn backup_worker(
let ns_magic = !root_namespace.is_root() || setup.max_depth != Some(0);
let pool = MediaPool::with_config(TAPE_STATUS_DIR, pool_config, changer_name, false)?;
let notification_mode = TapeNotificationMode::from(setup);
let mut pool_writer =
PoolWriter::new(pool, &setup.drive, worker, email, force_media_set, ns_magic)?;
let mut pool_writer = PoolWriter::new(
pool,
&setup.drive,
worker,
notification_mode,
force_media_set,
ns_magic,
)?;
let mut group_list = Vec::new();
let namespaces = datastore.recursive_iter_backup_ns_ok(root_namespace, setup.max_depth)?;

View File

@ -18,9 +18,10 @@ use proxmox_uuid::Uuid;
use pbs_api_types::{
parse_ns_and_snapshot, print_ns_and_snapshot, Authid, BackupDir, BackupNamespace, CryptMode,
Operation, TapeRestoreNamespace, Userid, DATASTORE_MAP_ARRAY_SCHEMA, DATASTORE_MAP_LIST_SCHEMA,
DRIVE_NAME_SCHEMA, MAX_NAMESPACE_DEPTH, PRIV_DATASTORE_BACKUP, PRIV_DATASTORE_MODIFY,
PRIV_TAPE_READ, TAPE_RESTORE_NAMESPACE_SCHEMA, TAPE_RESTORE_SNAPSHOT_SCHEMA, UPID_SCHEMA,
NotificationMode, Operation, TapeRestoreNamespace, Userid, DATASTORE_MAP_ARRAY_SCHEMA,
DATASTORE_MAP_LIST_SCHEMA, DRIVE_NAME_SCHEMA, MAX_NAMESPACE_DEPTH, PRIV_DATASTORE_BACKUP,
PRIV_DATASTORE_MODIFY, PRIV_TAPE_READ, TAPE_RESTORE_NAMESPACE_SCHEMA,
TAPE_RESTORE_SNAPSHOT_SCHEMA, UPID_SCHEMA,
};
use pbs_config::CachedUserInfo;
use pbs_datastore::dynamic_index::DynamicIndexReader;
@ -34,8 +35,8 @@ use pbs_tape::{
use proxmox_rest_server::WorkerTask;
use crate::backup::check_ns_modification_privs;
use crate::tape::TapeNotificationMode;
use crate::{
server::lookup_user_email,
tape::{
drive::{lock_tape_device, request_and_load_media, set_tape_device_state, TapeDriver},
file_formats::{
@ -289,6 +290,10 @@ pub const ROUTER: Router = Router::new().post(&API_METHOD_RESTORE);
type: Userid,
optional: true,
},
"notification-mode": {
type: NotificationMode,
optional: true,
},
"snapshots": {
description: "List of snapshots.",
type: Array,
@ -322,6 +327,7 @@ pub fn restore(
namespaces: Option<Vec<String>>,
media_set: String,
notify_user: Option<Userid>,
notification_mode: Option<NotificationMode>,
snapshots: Option<Vec<String>>,
owner: Option<Authid>,
rpcenv: &mut dyn RpcEnvironment,
@ -329,6 +335,8 @@ pub fn restore(
let auth_id: Authid = rpcenv.get_auth_id().unwrap().parse()?;
let user_info = CachedUserInfo::new()?;
let notification_mode = TapeNotificationMode::from((notify_user, notification_mode));
let mut store_map = DataStoreMap::try_from(store)
.map_err(|err| format_err!("cannot parse store mapping: {err}"))?;
let namespaces = if let Some(maps) = namespaces {
@ -394,11 +402,6 @@ pub fn restore(
let restore_owner = owner.as_ref().unwrap_or(&auth_id);
let email = notify_user
.as_ref()
.and_then(lookup_user_email)
.or_else(|| lookup_user_email(&auth_id.clone().into()));
task_log!(worker, "Mediaset '{media_set}'");
task_log!(worker, "Pool: {pool}");
@ -412,7 +415,7 @@ pub fn restore(
&drive,
store_map,
restore_owner,
email,
&notification_mode,
user_info,
&auth_id,
)
@ -425,7 +428,7 @@ pub fn restore(
&drive,
store_map,
restore_owner,
email,
&notification_mode,
&auth_id,
)
};
@ -452,7 +455,7 @@ fn restore_full_worker(
drive_name: &str,
store_map: DataStoreMap,
restore_owner: &Authid,
email: Option<String>,
notification_mode: &TapeNotificationMode,
auth_id: &Authid,
) -> Result<(), Error> {
let members = inventory.compute_media_set_members(&media_set_uuid)?;
@ -519,7 +522,7 @@ fn restore_full_worker(
&store_map,
&mut checked_chunks_map,
restore_owner,
&email,
notification_mode,
auth_id,
)?;
}
@ -635,7 +638,7 @@ fn restore_list_worker(
drive_name: &str,
store_map: DataStoreMap,
restore_owner: &Authid,
email: Option<String>,
notification_mode: &TapeNotificationMode,
user_info: Arc<CachedUserInfo>,
auth_id: &Authid,
) -> Result<(), Error> {
@ -779,7 +782,7 @@ fn restore_list_worker(
&drive_config,
drive_name,
&media_id.label,
&email,
notification_mode,
)?;
file_list.sort_unstable();
@ -833,7 +836,7 @@ fn restore_list_worker(
&drive_config,
drive_name,
&media_id.label,
&email,
notification_mode,
)?;
restore_file_chunk_map(worker.clone(), &mut drive, &store_map, file_chunk_map)?;
}
@ -1241,7 +1244,7 @@ pub fn request_and_restore_media(
store_map: &DataStoreMap,
checked_chunks_map: &mut HashMap<String, HashSet<[u8; 32]>>,
restore_owner: &Authid,
email: &Option<String>,
notification_mode: &TapeNotificationMode,
auth_id: &Authid,
) -> Result<(), Error> {
let media_set_uuid = match media_id.media_set_label {
@ -1249,8 +1252,13 @@ pub fn request_and_restore_media(
Some(ref set) => &set.uuid,
};
let (mut drive, info) =
request_and_load_media(&worker, drive_config, drive_name, &media_id.label, email)?;
let (mut drive, info) = request_and_load_media(
&worker,
drive_config,
drive_name,
&media_id.label,
notification_mode,
)?;
match info.media_set_label {
None => {

View File

@ -1,19 +1,17 @@
use anyhow::Error;
use const_format::concatcp;
use serde_json::json;
use std::collections::HashMap;
use std::path::Path;
use std::time::{Duration, Instant};
use handlebars::{Handlebars, TemplateError};
use anyhow::Error;
use const_format::concatcp;
use nix::unistd::Uid;
use serde_json::json;
use proxmox_lang::try_block;
use proxmox_notify::context::pbs::PBS_CONTEXT;
use proxmox_schema::ApiType;
use proxmox_sys::email::sendmail;
use proxmox_sys::fs::{create_path, CreateOptions};
use crate::tape::TapeNotificationMode;
use pbs_api_types::{
APTUpdateInfo, DataStoreConfig, DatastoreNotify, GarbageCollectionStatus, NotificationMode,
Notify, SyncJobConfig, TapeBackupJobSetup, User, Userid, VerificationJobConfig,
@ -23,92 +21,6 @@ use proxmox_notify::{Endpoint, Notification, Severity};
const SPOOL_DIR: &str = concatcp!(pbs_buildcfg::PROXMOX_BACKUP_STATE_DIR, "/notifications");
const TAPE_BACKUP_OK_TEMPLATE: &str = r###"
{{#if id ~}}
Job ID: {{id}}
{{/if~}}
Datastore: {{job.store}}
Tape Pool: {{job.pool}}
Tape Drive: {{job.drive}}
{{#if snapshot-list ~}}
Snapshots included:
{{#each snapshot-list~}}
{{this}}
{{/each~}}
{{/if}}
Duration: {{duration}}
{{#if used-tapes }}
Used Tapes:
{{#each used-tapes~}}
{{this}}
{{/each~}}
{{/if}}
Tape Backup successful.
Please visit the web interface for further details:
<https://{{fqdn}}:{{port}}/#DataStore-{{job.store}}>
"###;
const TAPE_BACKUP_ERR_TEMPLATE: &str = r###"
{{#if id ~}}
Job ID: {{id}}
{{/if~}}
Datastore: {{job.store}}
Tape Pool: {{job.pool}}
Tape Drive: {{job.drive}}
{{#if snapshot-list ~}}
Snapshots included:
{{#each snapshot-list~}}
{{this}}
{{/each~}}
{{/if}}
{{#if used-tapes }}
Used Tapes:
{{#each used-tapes~}}
{{this}}
{{/each~}}
{{/if}}
Tape Backup failed: {{error}}
Please visit the web interface for further details:
<https://{{fqdn}}:{{port}}/#pbsServerAdministration:tasks>
"###;
lazy_static::lazy_static! {
static ref HANDLEBARS: Handlebars<'static> = {
let mut hb = Handlebars::new();
let result: Result<(), TemplateError> = try_block!({
hb.set_strict_mode(true);
hb.register_escape_fn(handlebars::no_escape);
hb.register_template_string("tape_backup_ok_template", TAPE_BACKUP_OK_TEMPLATE)?;
hb.register_template_string("tape_backup_err_template", TAPE_BACKUP_ERR_TEMPLATE)?;
Ok(())
});
if let Err(err) = result {
eprintln!("error during template registration: {err}");
}
hb
};
}
/// Initialize the notification system by setting context in proxmox_notify
pub fn init() -> Result<(), Error> {
proxmox_notify::context::set_context(&PBS_CONTEXT);
@ -229,30 +141,6 @@ pub struct TapeBackupJobSummary {
pub used_tapes: Option<Vec<String>>,
}
fn send_job_status_mail(email: &str, subject: &str, text: &str) -> Result<(), Error> {
let (config, _) = crate::config::node::config()?;
let from = config.email_from;
// NOTE: some (web)mailers have big problems displaying text mails, so include html as well
let escaped_text = handlebars::html_escape(text);
let html = format!("<html><body><pre>\n{escaped_text}\n<pre>");
let nodename = proxmox_sys::nodename();
let author = format!("Proxmox Backup Server - {nodename}");
sendmail(
&[email],
subject,
Some(text),
Some(&html),
from.as_deref(),
Some(&author),
)?;
Ok(())
}
pub fn send_gc_status(
datastore: &str,
status: &GarbageCollectionStatus,
@ -463,7 +351,6 @@ pub fn send_sync_status(job: &SyncJobConfig, result: &Result<(), Error>) -> Resu
}
pub fn send_tape_backup_status(
email: &str,
id: Option<&str>,
job: &TapeBackupJobSetup,
result: &Result<(), Error>,
@ -478,62 +365,86 @@ pub fn send_tape_backup_status(
"id": id,
"snapshot-list": summary.snapshot_list,
"used-tapes": summary.used_tapes,
"duration": duration.to_string(),
"job-duration": duration.to_string(),
});
let text = match result {
Ok(()) => HANDLEBARS.render("tape_backup_ok_template", &data)?,
let (template, severity) = match result {
Ok(()) => ("tape-backup-ok", Severity::Info),
Err(err) => {
data["error"] = err.to_string().into();
HANDLEBARS.render("tape_backup_err_template", &data)?
("tape-backup-err", Severity::Error)
}
};
let subject = match (result, id) {
(Ok(()), Some(id)) => format!("Tape Backup '{id}' datastore '{}' successful", job.store,),
(Ok(()), None) => format!("Tape Backup datastore '{}' successful", job.store,),
(Err(_), Some(id)) => format!("Tape Backup '{id}' datastore '{}' failed", job.store,),
(Err(_), None) => format!("Tape Backup datastore '{}' failed", job.store,),
};
let mut metadata = HashMap::from([
("datastore".into(), job.store.clone()),
("media-pool".into(), job.pool.clone()),
("hostname".into(), proxmox_sys::nodename().into()),
("type".into(), "tape-backup".into()),
]);
send_job_status_mail(email, &subject, &text)?;
if let Some(id) = id {
metadata.insert("job-id".into(), id.into());
}
let notification = Notification::from_template(severity, template, data, metadata);
let mode = TapeNotificationMode::from(job);
match &mode {
TapeNotificationMode::LegacySendmail { notify_user } => {
let email = lookup_user_email(notify_user);
if let Some(email) = email {
send_sendmail_legacy_notification(notification, &email)?;
}
}
TapeNotificationMode::NotificationSystem => {
send_notification(notification)?;
}
}
Ok(())
}
/// Send email to a person to request a manual media change
pub fn send_load_media_email(
pub fn send_load_media_notification(
mode: &TapeNotificationMode,
changer: bool,
device: &str,
label_text: &str,
to: &str,
reason: Option<String>,
) -> Result<(), Error> {
use std::fmt::Write as _;
let device_type = if changer { "changer" } else { "drive" };
let subject = format!("Load Media '{label_text}' request for {device_type} '{device}'");
let data = json!({
"device-type": device_type,
"device": device,
"label-text": label_text,
"reason": reason,
"is-changer": changer,
});
let mut text = String::new();
let metadata = HashMap::from([
("hostname".into(), proxmox_sys::nodename().into()),
("type".into(), "tape-load".into()),
]);
let notification = Notification::from_template(Severity::Notice, "tape-load", data, metadata);
if let Some(reason) = reason {
let _ = write!(
text,
"The {device_type} has the wrong or no tape(s) inserted. Error:\n{reason}\n\n"
);
match mode {
TapeNotificationMode::LegacySendmail { notify_user } => {
let email = lookup_user_email(notify_user);
if let Some(email) = email {
send_sendmail_legacy_notification(notification, &email)?;
}
}
TapeNotificationMode::NotificationSystem => {
send_notification(notification)?;
}
}
if changer {
text.push_str("Please insert the requested media into the changer.\n\n");
let _ = writeln!(text, "Changer: {device}");
} else {
text.push_str("Please insert the requested media into the backup drive.\n\n");
let _ = writeln!(text, "Drive: {device}");
}
let _ = writeln!(text, "Media: {label_text}");
send_job_status_mail(to, &subject, &text)
Ok(())
}
fn get_server_url() -> (String, usize) {
@ -653,9 +564,3 @@ pub fn lookup_datastore_notify_settings(
(email, notify, notification_mode)
}
#[test]
fn test_template_register() {
assert!(HANDLEBARS.has_template("tape_backup_ok_template"));
assert!(HANDLEBARS.has_template("tape_backup_err_template"));
}

View File

@ -27,8 +27,9 @@ use pbs_key_config::KeyConfig;
use pbs_tape::{sg_tape::TapeAlertFlags, BlockReadError, MediaContentHeader, TapeRead, TapeWrite};
use crate::tape::TapeNotificationMode;
use crate::{
server::send_load_media_email,
server::send_load_media_notification,
tape::{
changer::{MediaChange, MtxMediaChanger},
drive::virtual_tape::open_virtual_tape_drive,
@ -368,7 +369,7 @@ pub fn request_and_load_media(
config: &SectionConfigData,
drive: &str,
label: &MediaLabel,
notify_email: &Option<String>,
notification_mode: &TapeNotificationMode,
) -> Result<(Box<dyn TapeDriver>, MediaId), Error> {
let check_label = |handle: &mut dyn TapeDriver, uuid: &proxmox_uuid::Uuid| {
if let Ok((Some(media_id), _)) = handle.read_label() {
@ -428,15 +429,14 @@ pub fn request_and_load_media(
device_type,
device
);
if let Some(to) = notify_email {
send_load_media_email(
send_load_media_notification(
notification_mode,
changer.is_some(),
device,
&label_text,
to,
Some(new.to_string()),
)?;
}
*old = new;
}
Ok(())

View File

@ -1,6 +1,7 @@
//! Magnetic tape backup
use anyhow::{format_err, Error};
use proxmox_auth_api::types::Userid;
use proxmox_sys::fs::{create_path, CreateOptions};
@ -29,6 +30,7 @@ pub use media_catalog::*;
mod media_catalog_cache;
pub use media_catalog_cache::*;
use pbs_api_types::{NotificationMode, TapeBackupJobSetup};
mod pool_writer;
pub use pool_writer::*;
@ -128,3 +130,28 @@ pub fn create_changer_state_dir() -> Result<(), Error> {
Ok(())
}
#[derive(Clone)]
pub enum TapeNotificationMode {
LegacySendmail { notify_user: Userid },
NotificationSystem,
}
impl From<&TapeBackupJobSetup> for TapeNotificationMode {
fn from(value: &TapeBackupJobSetup) -> Self {
Self::from((value.notify_user.clone(), value.notification_mode.clone()))
}
}
impl From<(Option<Userid>, Option<NotificationMode>)> for TapeNotificationMode {
fn from(value: (Option<Userid>, Option<NotificationMode>)) -> Self {
match value.1.as_ref().unwrap_or(&Default::default()) {
NotificationMode::LegacySendmail => {
let notify_user = value.0.as_ref().unwrap_or(Userid::root_userid()).clone();
Self::LegacySendmail { notify_user }
}
NotificationMode::NotificationSystem => Self::NotificationSystem,
}
}
}

View File

@ -25,7 +25,8 @@ use crate::tape::{
file_formats::{
tape_write_catalog, tape_write_snapshot_archive, ChunkArchiveWriter, MediaSetLabel,
},
MediaCatalog, MediaId, MediaPool, COMMIT_BLOCK_SIZE, MAX_CHUNK_ARCHIVE_SIZE, TAPE_STATUS_DIR,
MediaCatalog, MediaId, MediaPool, TapeNotificationMode, COMMIT_BLOCK_SIZE,
MAX_CHUNK_ARCHIVE_SIZE, TAPE_STATUS_DIR,
};
use super::file_formats::{
@ -52,7 +53,7 @@ pub struct PoolWriter {
drive_name: String,
status: Option<PoolWriterState>,
catalog_set: Arc<Mutex<CatalogSet>>,
notify_email: Option<String>,
notification_mode: TapeNotificationMode,
ns_magic: bool,
used_tapes: HashSet<Uuid>,
}
@ -62,7 +63,7 @@ impl PoolWriter {
mut pool: MediaPool,
drive_name: &str,
worker: &WorkerTask,
notify_email: Option<String>,
notification_mode: TapeNotificationMode,
force_media_set: bool,
ns_magic: bool,
) -> Result<Self, Error> {
@ -90,7 +91,7 @@ impl PoolWriter {
drive_name: drive_name.to_string(),
status: None,
catalog_set: Arc::new(Mutex::new(catalog_set)),
notify_email,
notification_mode,
ns_magic,
used_tapes: HashSet::new(),
})
@ -248,7 +249,7 @@ impl PoolWriter {
&drive_config,
&self.drive_name,
media.label(),
&self.notify_email,
&self.notification_mode,
)?;
// test for critical tape alert flags

View File

@ -17,6 +17,12 @@ NOTIFICATION_TEMPLATES= \
default/sync-ok-body.txt.hbs \
default/sync-err-subject.txt.hbs \
default/sync-ok-subject.txt.hbs \
default/tape-backup-err-body.txt.hbs \
default/tape-backup-err-subject.txt.hbs \
default/tape-backup-ok-body.txt.hbs \
default/tape-backup-ok-subject.txt.hbs \
default/tape-load-body.txt.hbs \
default/tape-load-subject.txt.hbs \
default/test-body.txt.hbs \
default/test-body.html.hbs \
default/test-subject.txt.hbs \

View File

@ -0,0 +1,26 @@
{{#if id ~}}
Job ID: {{id}}
{{/if~}}
Datastore: {{job.store}}
Tape Pool: {{job.pool}}
Tape Drive: {{job.drive}}
{{#if snapshot-list ~}}
Snapshots included:
{{#each snapshot-list~}}
{{this}}
{{/each~}}
{{/if}}
{{#if used-tapes }}
Used Tapes:
{{#each used-tapes~}}
{{this}}
{{/each~}}
{{/if}}
Tape Backup failed: {{error}}
Please visit the web interface for further details:
<https://{{fqdn}}:{{port}}/#pbsServerAdministration:tasks>

View File

@ -0,0 +1,5 @@
{{#if id~}}
Tape Backup '{{ id }}' datastore '{{ job.store }}' failed
{{else~}}
Tape Backup datastore '{{ job.store }}' failed
{{/if}}

View File

@ -0,0 +1,27 @@
{{#if id ~}}
Job ID: {{id}}
{{/if~}}
Datastore: {{job.store}}
Tape Pool: {{job.pool}}
Tape Drive: {{job.drive}}
{{#if snapshot-list ~}}
Snapshots included:
{{#each snapshot-list~}}
{{this}}
{{/each~}}
{{/if}}
Duration: {{job-duration}}
{{#if used-tapes }}
Used Tapes:
{{#each used-tapes~}}
{{this}}
{{/each~}}
{{/if}}
Tape Backup successful.
Please visit the web interface for further details:
<https://{{fqdn}}:{{port}}/#DataStore-{{job.store}}>

View File

@ -0,0 +1,5 @@
{{#if id~}}
Tape Backup '{{ id }}' datastore '{{ job.store }}' successful
{{else~}}
Tape Backup datastore '{{ job.store }}' successful
{{/if}}

View File

@ -0,0 +1,15 @@
{{#if reason~}}
The {{ device-type }} has the wrong or no tape(s) inserted. Error:
{{ reason }}
{{/if~}}
{{#if is-changer~}}
Please insert the requested media into the changer.
Changer: {{ device }}
{{else}}
Please insert the requested media into the backup drive.
Drive: {{ device }}
{{/if}}
Media: {{ label-text }}

View File

@ -0,0 +1 @@
Load Media '{{ label-text }}' request for {{ device-type }} '{{ device }}'