Main: Add spinner printing out hashing throughput

This commit is contained in:
networkException 2025-01-03 17:48:16 +01:00
parent 9056adb574
commit 24398c0994
Signed by: networkException
GPG key ID: E3877443AE684391

View file

@ -6,9 +6,11 @@
use chrono::Local;
use clap::Parser;
use indicatif::{ParallelProgressIterator, ProgressBar, ProgressStyle, ProgressFinish};
use indicatif::{MultiProgress, ParallelProgressIterator, ProgressBar, ProgressFinish, ProgressStyle};
use rayon::ThreadPoolBuilder;
use rayon::iter::{ParallelIterator, IntoParallelRefIterator};
use core::option::Option::None;
use core::time::Duration;
use std::fs::{File, OpenOptions};
use std::io::Write;
use std::path::Path;
@ -65,10 +67,19 @@ fn run() -> Result<()> {
}
}
let progress_bar = ProgressBar::new(hashed_files.len() as u64);
progress_bar.set_style(ProgressStyle::default_bar()
.template("Hashing files... {bar} {pos:>7}/{len:7} [{elapsed_precise}] ")
.on_finish(ProgressFinish::AndLeave));
let multi_progress = MultiProgress::new();
let progress_bar = ProgressBar::new(hashed_files.len() as u64)
.with_finish(ProgressFinish::AndLeave)
.with_style(ProgressStyle::default_bar()
.template("Hashing files... {bar} {pos:>7}/{len:7} [{elapsed_precise}]").unwrap());
let throughput_spinner = ProgressBar::new_spinner()
.with_style(ProgressStyle::default_spinner()
.template("{bytes_per_sec}").unwrap());
multi_progress.add(progress_bar.clone());
multi_progress.add(throughput_spinner.clone());
let pool = ThreadPoolBuilder::new().num_threads(args.jobs as usize).build().unwrap();
@ -87,40 +98,40 @@ fn run() -> Result<()> {
}
}
pool.install(|| {
return hashed_files.par_iter().progress_with(progress_bar).try_for_each(|path| -> Result<()> {
// Just opening the file and hashing using io::copy is roughly ~2.5x faster compared to sha256::digest_file
let mut hasher = Sha256::new();
let mut file = File::open(path)?;
throughput_spinner.enable_steady_tick(Duration::from_millis(100));
io::copy(&mut file, &mut hasher)?;
pool.install(|| hashed_files.par_iter().progress_with(progress_bar).try_for_each(|path| -> Result<()> {
// Just opening the file and hashing using io::copy is roughly ~2.5x faster compared to sha256::digest_file
let mut hasher = Sha256::new();
let mut file = File::open(path)?;
let hash = format!("{:x}", hasher.finalize());
let filename = path.file_name().unwrap().to_str().unwrap();
io::copy(&mut file, &mut throughput_spinner.wrap_write(&mut hasher))?;
if filename != hash {
let path_string = path.to_str().unwrap();
let hash = format!("{:x}", hasher.finalize());
let filename = path.file_name().unwrap().to_str().unwrap();
eprintln!("Integrity check failed for {}: Expected {}, got {}", path_string, filename, hash);
if filename != hash {
let path_string = path.to_str().unwrap();
let file = OpenOptions::new()
.create(true)
.append(true)
.open("restic-integrity-log");
eprintln!("Integrity check failed for {}: Expected {}, got {}", path_string, filename, hash);
match file {
Err(error) => eprintln!("Unable to write to restic-integrity-log: {}", error),
Ok(mut file) => {
if let Err(error) = writeln!(file, "{}: Integrity check failed for {}: Expected {}, got {}", Local::now().format("%Y-%m-%d %H:%M:%S"), path_string, filename, hash) {
eprintln!("Unable to write to restic-integrity-log: {}", error)
}
let file = OpenOptions::new()
.create(true)
.append(true)
.open("restic-integrity-log");
match file {
Err(error) => eprintln!("Unable to write to restic-integrity-log: {}", error),
Ok(mut file) => {
if let Err(error) = writeln!(file, "{}: Integrity check failed for {}: Expected {}, got {}", Local::now().format("%Y-%m-%d %H:%M:%S"), path_string, filename, hash) {
eprintln!("Unable to write to restic-integrity-log: {}", error)
}
}
}
}
Ok(())
});
})
Ok(())
}))
}
fn main() {