diff --git a/pbs-datastore/examples/ls-snapshots.rs b/pbs-datastore/examples/ls-snapshots.rs index 977f2cf8..2eeea489 100644 --- a/pbs-datastore/examples/ls-snapshots.rs +++ b/pbs-datastore/examples/ls-snapshots.rs @@ -18,7 +18,7 @@ fn run() -> Result<(), Error> { None => None, }; - let store = unsafe { DataStore::open_path("", &base, None)? }; + let store = unsafe { DataStore::open_path("", base, None)? }; for ns in store.recursive_iter_backup_ns_ok(Default::default(), max_depth)? { println!("found namespace store:/{}", ns); diff --git a/pbs-datastore/src/catalog.rs b/pbs-datastore/src/catalog.rs index 2b75a804..11c14b64 100644 --- a/pbs-datastore/src/catalog.rs +++ b/pbs-datastore/src/catalog.rs @@ -644,7 +644,7 @@ impl CatalogReader { } CatalogEntryType::File => { let mut mtime_string = mtime.to_string(); - if let Ok(s) = proxmox_time::strftime_local("%FT%TZ", mtime as i64) { + if let Ok(s) = proxmox_time::strftime_local("%FT%TZ", mtime) { mtime_string = s; } diff --git a/pbs-datastore/src/chunk_stat.rs b/pbs-datastore/src/chunk_stat.rs index eb7d55d5..3d7e75e1 100644 --- a/pbs-datastore/src/chunk_stat.rs +++ b/pbs-datastore/src/chunk_stat.rs @@ -27,8 +27,8 @@ impl ChunkStat { impl std::fmt::Debug for ChunkStat { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let avg = ((self.size as f64) / (self.chunk_count as f64)) as usize; - let compression = (self.compressed_size * 100) / (self.size as u64); - let rate = (self.disk_size * 100) / (self.size as u64); + let compression = (self.compressed_size * 100) / self.size; + let rate = (self.disk_size * 100) / self.size; let elapsed = self.start_time.elapsed().unwrap(); let elapsed = (elapsed.as_secs() as f64) + (elapsed.subsec_millis() as f64) / 1000.0; diff --git a/pbs-datastore/src/chunk_store.rs b/pbs-datastore/src/chunk_store.rs index 9f9fb26f..fb282749 100644 --- a/pbs-datastore/src/chunk_store.rs +++ b/pbs-datastore/src/chunk_store.rs @@ -183,7 +183,7 @@ impl ChunkStore { let lockfile_path = Self::lockfile_path(&base); - let locker = ProcessLocker::new(&lockfile_path)?; + let locker = ProcessLocker::new(lockfile_path)?; Ok(ChunkStore { name: name.to_owned(), diff --git a/pbs-datastore/src/datastore.rs b/pbs-datastore/src/datastore.rs index 7718133c..7372e002 100644 --- a/pbs-datastore/src/datastore.rs +++ b/pbs-datastore/src/datastore.rs @@ -855,7 +855,7 @@ impl DataStore { use walkdir::WalkDir; - let walker = WalkDir::new(&base).into_iter(); + let walker = WalkDir::new(base).into_iter(); // make sure we skip .chunks (and other hidden files to keep it simple) fn is_hidden(entry: &walkdir::DirEntry) -> bool { diff --git a/pbs-datastore/src/dynamic_index.rs b/pbs-datastore/src/dynamic_index.rs index 3743b89d..71a5082e 100644 --- a/pbs-datastore/src/dynamic_index.rs +++ b/pbs-datastore/src/dynamic_index.rs @@ -247,7 +247,7 @@ impl IndexFile for DynamicIndexReader { } fn index_size(&self) -> usize { - self.size as usize + self.size } fn chunk_from_offset(&self, offset: u64) -> Option<(usize, u64)> { diff --git a/pbs-datastore/src/paperkey.rs b/pbs-datastore/src/paperkey.rs index f95cec0b..14b62264 100644 --- a/pbs-datastore/src/paperkey.rs +++ b/pbs-datastore/src/paperkey.rs @@ -130,7 +130,7 @@ fn paperkey_html( writeln!(output, "

")?; let qr_code = generate_qr_code("svg", block)?; - let qr_code = base64::encode_config(&qr_code, base64::STANDARD_NO_PAD); + let qr_code = base64::encode_config(qr_code, base64::STANDARD_NO_PAD); writeln!(output, "
")?; writeln!(output, "( writeln!(output, "

")?; let qr_code = generate_qr_code("svg", lines)?; - let qr_code = base64::encode_config(&qr_code, base64::STANDARD_NO_PAD); + let qr_code = base64::encode_config(qr_code, base64::STANDARD_NO_PAD); writeln!(output, "
")?; writeln!(output, " Result<(std::fs::File, CreateOptions), Error> { let timeout = std::time::Duration::new(10, 0); Ok(( - open_file_locked(&lock_path, timeout, true, options.clone())?, + open_file_locked(lock_path, timeout, true, options.clone())?, options, )) } @@ -64,7 +64,7 @@ fn get_active_operations_do( None }; - let data = match file_read_optional_string(&path)? { + let data = match file_read_optional_string(path)? { Some(data) => serde_json::from_str::>(&data)? .iter() .filter_map(