2025-07-15 21:06:35 -06:00
|
|
|
use clap::Parser;
|
|
|
|
use std::collections::VecDeque;
|
2025-07-18 00:08:01 -06:00
|
|
|
use std::fs::{self, File};
|
|
|
|
use std::path::PathBuf;
|
|
|
|
use std::sync::{Arc, Mutex};
|
|
|
|
use std::thread::{self, available_parallelism};
|
|
|
|
|
|
|
|
mod hashers;
|
2025-07-18 01:13:11 -06:00
|
|
|
mod common;
|
|
|
|
|
|
|
|
const ALGORITHMS: [&'static str; 3] = ["sha256", "sha384", "sha512"];
|
|
|
|
const UNSECURE_ALGORITHMS: [&'static str; 1] = ["md5"];
|
2025-07-15 21:06:35 -06:00
|
|
|
|
|
|
|
#[derive(Parser)]
|
|
|
|
#[command(name = "psha")]
|
|
|
|
#[command(version = option_env!("CARGO_PKG_VERSION"))]
|
2025-07-18 00:08:01 -06:00
|
|
|
#[command(about = "A parallel checksum tool for various algorithms")]
|
2025-07-15 21:06:35 -06:00
|
|
|
#[command(long_about = None)]
|
|
|
|
struct Args {
|
|
|
|
#[arg(
|
2025-07-18 00:08:01 -06:00
|
|
|
short,
|
|
|
|
long,
|
|
|
|
help = "Use at most this number of threads, 0 means as many as there are processor cores",
|
|
|
|
default_value = "0"
|
|
|
|
)]
|
|
|
|
threads: usize,
|
|
|
|
|
|
|
|
#[arg(
|
|
|
|
short,
|
|
|
|
long,
|
|
|
|
help = "Enable debug output (thread info, algorithm used/detected)"
|
|
|
|
)]
|
|
|
|
debug: bool,
|
|
|
|
|
|
|
|
#[arg(
|
|
|
|
short,
|
|
|
|
long,
|
|
|
|
help = "Specify an algorithm for hashing",
|
|
|
|
default_value = "sha256",
|
2025-07-18 01:13:11 -06:00
|
|
|
value_parser = {
|
|
|
|
let mut cleaned: Vec<&str> = vec![];
|
|
|
|
for i in ALGORITHMS {
|
|
|
|
cleaned.push(i);
|
|
|
|
}
|
|
|
|
|
|
|
|
for i in UNSECURE_ALGORITHMS {
|
|
|
|
cleaned.push(i);
|
|
|
|
}
|
|
|
|
|
|
|
|
clap::builder::PossibleValuesParser::new(Vec::from(cleaned))
|
|
|
|
}
|
2025-07-18 00:08:01 -06:00
|
|
|
)]
|
|
|
|
algorithm: String,
|
|
|
|
|
|
|
|
#[arg(
|
|
|
|
short = 'f',
|
|
|
|
long,
|
|
|
|
help = "Show canonicalized (relative paths converted to absolute) file paths"
|
|
|
|
)]
|
|
|
|
canonicalize: bool,
|
|
|
|
|
|
|
|
#[arg(
|
|
|
|
short = 'c',
|
|
|
|
long,
|
|
|
|
help = "Read checksums from the file(s) and verify them"
|
2025-07-15 21:06:35 -06:00
|
|
|
)]
|
2025-07-18 00:08:01 -06:00
|
|
|
check: Vec<PathBuf>,
|
|
|
|
|
|
|
|
#[arg(
|
|
|
|
short = 'q',
|
|
|
|
long,
|
|
|
|
help = "(only used with -c) Only print checksums that fail; do not print OK for files that are successful"
|
|
|
|
)]
|
|
|
|
failures_only: bool,
|
|
|
|
|
|
|
|
#[arg(
|
|
|
|
short = 'Q',
|
|
|
|
long,
|
|
|
|
help = "(only used with -c) Suppress all output to stdout, including failures"
|
|
|
|
)]
|
|
|
|
quiet: bool,
|
|
|
|
|
|
|
|
#[arg(trailing_var_arg = true)]
|
2025-07-15 21:06:35 -06:00
|
|
|
files: Vec<PathBuf>,
|
|
|
|
}
|
|
|
|
|
2025-07-18 00:08:01 -06:00
|
|
|
struct ThreadInfo {
|
|
|
|
debug: Arc<bool>,
|
|
|
|
failures_only: Arc<bool>,
|
|
|
|
quiet: Arc<bool>,
|
|
|
|
thread_id: usize,
|
|
|
|
filenames: Arc<Mutex<VecDeque<PathBuf>>>,
|
|
|
|
algorithm: Arc<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn hash(info: ThreadInfo) -> Result<(), String> {
|
|
|
|
loop {
|
|
|
|
let filename = match info.filenames.lock().unwrap().pop_front() {
|
2025-07-15 21:06:35 -06:00
|
|
|
Some(f) => f,
|
2025-07-18 00:08:01 -06:00
|
|
|
None => break,
|
2025-07-15 21:06:35 -06:00
|
|
|
};
|
|
|
|
|
2025-07-18 00:08:01 -06:00
|
|
|
if !*info.quiet && *info.debug {
|
2025-07-18 01:27:16 -06:00
|
|
|
common::debug(format!(
|
2025-07-18 00:08:01 -06:00
|
|
|
"thread {} is hashing file '{}'",
|
|
|
|
info.thread_id,
|
|
|
|
filename.as_path().display()
|
2025-07-18 01:27:16 -06:00
|
|
|
));
|
2025-07-18 00:08:01 -06:00
|
|
|
}
|
|
|
|
|
2025-07-18 01:27:16 -06:00
|
|
|
if filename.is_dir() {
|
|
|
|
common::error(format!("{}: Is a directory", filename.as_path().display()));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let file = match File::open(&filename) {
|
|
|
|
Err(e) => {
|
|
|
|
common::error(format!("{}: {}", filename.as_path().display(), e));
|
|
|
|
continue;
|
|
|
|
},
|
|
|
|
Ok(f) => f
|
|
|
|
};
|
|
|
|
|
2025-07-18 00:08:01 -06:00
|
|
|
let res = match &*info.algorithm.as_str() {
|
|
|
|
"sha256" => hashers::hash_sha256(file),
|
|
|
|
"sha384" => hashers::hash_sha384(file),
|
|
|
|
"sha512" => hashers::hash_sha512(file),
|
2025-07-18 01:13:11 -06:00
|
|
|
"md5" => hashers::hash_md5(file),
|
2025-07-18 00:08:01 -06:00
|
|
|
_ => panic!("Somehow did not pass a supported algorithm"),
|
|
|
|
};
|
|
|
|
|
|
|
|
if !*info.quiet {
|
|
|
|
println!("{} {}", res, filename.as_path().display());
|
|
|
|
}
|
2025-07-15 21:06:35 -06:00
|
|
|
}
|
|
|
|
|
2025-07-18 00:08:01 -06:00
|
|
|
if !*info.quiet && *info.debug {
|
2025-07-18 01:27:16 -06:00
|
|
|
common::debug(format!("thread {} has ran out of work", info.thread_id));
|
2025-07-18 00:08:01 -06:00
|
|
|
}
|
2025-07-15 21:06:35 -06:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
let args = Args::parse();
|
|
|
|
let mut buffer = VecDeque::new();
|
|
|
|
let mut handles = vec![];
|
|
|
|
for file in args.files {
|
2025-07-18 00:08:01 -06:00
|
|
|
if args.canonicalize {
|
|
|
|
match fs::canonicalize(file.as_path()) {
|
|
|
|
Ok(p) => buffer.push_back(p),
|
|
|
|
Err(e) => panic!("unable to canonicalize {}: {}", file.as_path().display(), e),
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
buffer.push_back(file);
|
|
|
|
}
|
2025-07-15 21:06:35 -06:00
|
|
|
}
|
2025-07-18 00:08:01 -06:00
|
|
|
|
2025-07-18 01:13:11 -06:00
|
|
|
let cpus = match args.threads {
|
|
|
|
0 => available_parallelism().unwrap().get(),
|
|
|
|
_ => args.threads
|
|
|
|
};
|
|
|
|
|
|
|
|
if args.debug {
|
2025-07-18 01:27:16 -06:00
|
|
|
common::debug(format!("Starting psha using algorithm {} with {} threads", args.algorithm, cpus));
|
2025-07-18 01:13:11 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
if UNSECURE_ALGORITHMS.contains(&args.algorithm.as_str()) {
|
|
|
|
common::warning(format!("{} is an unsecure hashing algorithm!", &args.algorithm));
|
|
|
|
}
|
|
|
|
|
2025-07-15 21:06:35 -06:00
|
|
|
let arc_buf = Arc::new(Mutex::new(buffer));
|
|
|
|
for i in 0..cpus {
|
2025-07-18 00:08:01 -06:00
|
|
|
let safe_buf = Arc::clone(&arc_buf);
|
|
|
|
let safe_alg = Arc::new(args.algorithm.clone());
|
|
|
|
handles.push(thread::spawn(move || {
|
|
|
|
hash(ThreadInfo {
|
|
|
|
debug: Arc::new(args.debug),
|
|
|
|
failures_only: Arc::new(args.failures_only),
|
|
|
|
quiet: Arc::new(args.quiet),
|
|
|
|
thread_id: i,
|
|
|
|
filenames: safe_buf,
|
|
|
|
algorithm: safe_alg,
|
|
|
|
})
|
|
|
|
}))
|
2025-07-15 21:06:35 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
for handle in handles {
|
2025-07-18 00:08:01 -06:00
|
|
|
match handle.join().unwrap() {
|
|
|
|
Err(e) => panic!("{}", e),
|
|
|
|
Ok(_) => (),
|
|
|
|
}
|
2025-07-15 21:06:35 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|