Main: Allow specifying which number of files to start with

This patch introduces a new flag `-s` or `--start`, allowing users to
start running the tool with a starting index into the list of hashed
files generated from traversing either the repository or a single
directory.
This commit is contained in:
networkException 2022-04-22 02:33:50 +02:00
parent 279dc41175
commit 91da8d374b
Signed by: networkException
GPG key ID: E3877443AE684391

View file

@ -17,6 +17,9 @@ struct Args {
#[clap(short, long, default_value_t = 0)] #[clap(short, long, default_value_t = 0)]
jobs: u8, jobs: u8,
#[clap(short, long, default_value_t = 0)]
start: usize,
/// If the path argument should be used to check all files in a directory it points to instead of a restic repository (use for testing) /// If the path argument should be used to check all files in a directory it points to instead of a restic repository (use for testing)
#[clap(long)] #[clap(long)]
single_directory: bool single_directory: bool
@ -25,6 +28,7 @@ struct Args {
fn run() -> Result<()> { fn run() -> Result<()> {
let args = Args::parse(); let args = Args::parse();
let path = Path::new(&args.path); let path = Path::new(&args.path);
let start = args.start;
let mut directories_with_hashed_files = Vec::new(); let mut directories_with_hashed_files = Vec::new();
@ -60,6 +64,19 @@ fn run() -> Result<()> {
let pool = ThreadPoolBuilder::new().num_threads(args.jobs as usize).build().unwrap(); let pool = ThreadPoolBuilder::new().num_threads(args.jobs as usize).build().unwrap();
let hashed_files_len = hashed_files.len();
if start != 0 {
match hashed_files_len.checked_sub(start) {
None => return Err(Error::new(ErrorKind::Other, format!("Specified start at {} is larger than the total number of files {}", start, hashed_files_len))),
Some(remaining) => {
println!("Starting at {} files, {} remaining...", start, remaining);
hashed_files.drain(0..start);
}
}
}
pool.install(|| { pool.install(|| {
return hashed_files.par_iter().progress_with(progress_bar).try_for_each(|path| -> Result<()> { return hashed_files.par_iter().progress_with(progress_bar).try_for_each(|path| -> Result<()> {
// Just opening the file and hashing using io::copy is roughly ~2.5x fater compared to sha256::digest_file // Just opening the file and hashing using io::copy is roughly ~2.5x fater compared to sha256::digest_file