Main: Allow specifying which number of files to start with

This patch introduces a new flag `-s` or `--start`, allowing users to
start running the tool with a starting index into the list of hashed
files generated from traversing either the repository or a single
directory.
This commit is contained in:
networkException 2022-04-22 02:33:50 +02:00
parent ae4ff20534
commit 7c815712ae
No known key found for this signature in database
GPG key ID: C1F2658DC370C8FC

View file

@ -17,6 +17,9 @@ struct Args {
#[clap(short, long, default_value_t = 0)]
jobs: u8,
#[clap(short, long, default_value_t = 0)]
start: usize,
/// If the path argument should be used to check all files in a directory it points to instead of a restic repository (use for testing)
#[clap(long)]
single_directory: bool
@ -25,6 +28,7 @@ struct Args {
fn run() -> Result<()> {
let args = Args::parse();
let path = Path::new(&args.path);
let start = args.start;
let mut directories_with_hashed_files = Vec::new();
@ -60,6 +64,19 @@ fn run() -> Result<()> {
let pool = ThreadPoolBuilder::new().num_threads(args.jobs as usize).build().unwrap();
let hashed_files_len = hashed_files.len();
if start != 0 {
match hashed_files_len.checked_sub(start) {
None => return Err(Error::new(ErrorKind::Other, format!("Specified start at {} is larger than the total number of files {}", start, hashed_files_len))),
Some(remaining) => {
println!("Starting at {} files, {} remaining...", start, remaining);
hashed_files.drain(0..start);
}
}
}
pool.install(|| {
return hashed_files.par_iter().progress_with(progress_bar).try_for_each(|path| -> Result<()> {
// Just opening the file and hashing using io::copy is roughly ~2.5x fater compared to sha256::digest_file