clean up code, implement all secure sha algorithms
This commit is contained in:
parent
f06da146a7
commit
35dd6d97b0
3 changed files with 148 additions and 46 deletions
4
.rustfmt.toml
Normal file
4
.rustfmt.toml
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
edition = "2021"
|
||||||
|
tab_spaces = 2
|
||||||
|
match_block_trailing_comma = true
|
||||||
|
#wrap_comments = true
|
24
src/hashers.rs
Normal file
24
src/hashers.rs
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
use sha2::{Digest, Sha256, Sha384, Sha512};
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io;
|
||||||
|
|
||||||
|
pub fn hash_sha256(mut file: File) -> String {
|
||||||
|
let mut hasher = Sha256::new();
|
||||||
|
_ = io::copy(&mut file, &mut hasher);
|
||||||
|
|
||||||
|
return format!("{:x}", hasher.finalize());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_sha384(mut file: File) -> String {
|
||||||
|
let mut hasher = Sha384::new();
|
||||||
|
_ = io::copy(&mut file, &mut hasher);
|
||||||
|
|
||||||
|
return format!("{:x}", hasher.finalize());
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn hash_sha512(mut file: File) -> String {
|
||||||
|
let mut hasher = Sha512::new();
|
||||||
|
_ = io::copy(&mut file, &mut hasher);
|
||||||
|
|
||||||
|
return format!("{:x}", hasher.finalize());
|
||||||
|
}
|
166
src/main.rs
166
src/main.rs
|
@ -1,44 +1,114 @@
|
||||||
use sha2::{Sha256, Digest};
|
|
||||||
use std::ops::Deref;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::thread;
|
|
||||||
use std::thread::available_parallelism;
|
|
||||||
use std::fs::File;
|
|
||||||
use std::io;
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use std::sync::Arc;
|
|
||||||
use std::sync::Mutex;
|
|
||||||
use std::collections::VecDeque;
|
use std::collections::VecDeque;
|
||||||
use std::fs;
|
use std::fs::{self, File};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::{Arc, Mutex};
|
||||||
|
use std::thread::{self, available_parallelism};
|
||||||
|
|
||||||
|
mod hashers;
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(Parser)]
|
||||||
#[command(name = "psha")]
|
#[command(name = "psha")]
|
||||||
#[command(version = option_env!("CARGO_PKG_VERSION"))]
|
#[command(version = option_env!("CARGO_PKG_VERSION"))]
|
||||||
|
#[command(about = "A parallel checksum tool for various algorithms")]
|
||||||
#[command(long_about = None)]
|
#[command(long_about = None)]
|
||||||
struct Args {
|
struct Args {
|
||||||
#[arg(
|
#[arg(
|
||||||
trailing_var_arg = true,
|
short,
|
||||||
|
long,
|
||||||
|
help = "Use at most this number of threads, 0 means as many as there are processor cores",
|
||||||
|
default_value = "0"
|
||||||
)]
|
)]
|
||||||
|
threads: usize,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
short,
|
||||||
|
long,
|
||||||
|
help = "Enable debug output (thread info, algorithm used/detected)"
|
||||||
|
)]
|
||||||
|
debug: bool,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
short,
|
||||||
|
long,
|
||||||
|
help = "Specify an algorithm for hashing",
|
||||||
|
default_value = "sha256",
|
||||||
|
value_parser = clap::builder::PossibleValuesParser::new(["sha256", "sha512"])
|
||||||
|
)]
|
||||||
|
algorithm: String,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
short = 'f',
|
||||||
|
long,
|
||||||
|
help = "Show canonicalized (relative paths converted to absolute) file paths"
|
||||||
|
)]
|
||||||
|
canonicalize: bool,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
short = 'c',
|
||||||
|
long,
|
||||||
|
help = "Read checksums from the file(s) and verify them"
|
||||||
|
)]
|
||||||
|
check: Vec<PathBuf>,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
short = 'q',
|
||||||
|
long,
|
||||||
|
help = "(only used with -c) Only print checksums that fail; do not print OK for files that are successful"
|
||||||
|
)]
|
||||||
|
failures_only: bool,
|
||||||
|
|
||||||
|
#[arg(
|
||||||
|
short = 'Q',
|
||||||
|
long,
|
||||||
|
help = "(only used with -c) Suppress all output to stdout, including failures"
|
||||||
|
)]
|
||||||
|
quiet: bool,
|
||||||
|
|
||||||
|
#[arg(trailing_var_arg = true)]
|
||||||
files: Vec<PathBuf>,
|
files: Vec<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn checksum(thread_id: usize, filenames: Arc<Mutex<VecDeque<PathBuf>>>) -> Result<(), String> {
|
struct ThreadInfo {
|
||||||
let running = true;
|
debug: Arc<bool>,
|
||||||
while running {
|
failures_only: Arc<bool>,
|
||||||
let filename = match filenames.lock().unwrap().pop_front() {
|
quiet: Arc<bool>,
|
||||||
Some(f) => f,
|
thread_id: usize,
|
||||||
None => return Ok(())
|
filenames: Arc<Mutex<VecDeque<PathBuf>>>,
|
||||||
};
|
algorithm: Arc<String>,
|
||||||
let mut hasher = Sha256::new();
|
|
||||||
let mut file = File::open(&filename).unwrap();
|
|
||||||
|
|
||||||
io::copy(&mut file, &mut hasher);
|
|
||||||
let hash = hasher.finalize();
|
|
||||||
// println!("thread {} result: {:x}\t{}", thread_id, hash, filename.as_path().display());
|
|
||||||
println!("{:x}\t{}", hash, filename.as_path().display());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
println!("thread {} has ran out of work", thread_id);
|
fn hash(info: ThreadInfo) -> Result<(), String> {
|
||||||
|
loop {
|
||||||
|
let filename = match info.filenames.lock().unwrap().pop_front() {
|
||||||
|
Some(f) => f,
|
||||||
|
None => break,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !*info.quiet && *info.debug {
|
||||||
|
eprintln!(
|
||||||
|
"thread {} is hashing file '{}'",
|
||||||
|
info.thread_id,
|
||||||
|
filename.as_path().display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let file = File::open(&filename).unwrap();
|
||||||
|
let res = match &*info.algorithm.as_str() {
|
||||||
|
"sha256" => hashers::hash_sha256(file),
|
||||||
|
"sha384" => hashers::hash_sha384(file),
|
||||||
|
"sha512" => hashers::hash_sha512(file),
|
||||||
|
_ => panic!("Somehow did not pass a supported algorithm"),
|
||||||
|
};
|
||||||
|
|
||||||
|
if !*info.quiet {
|
||||||
|
println!("{} {}", res, filename.as_path().display());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !*info.quiet && *info.debug {
|
||||||
|
eprintln!("thread {} has ran out of work", info.thread_id);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -49,33 +119,37 @@ fn main() {
|
||||||
let mut buffer = VecDeque::new();
|
let mut buffer = VecDeque::new();
|
||||||
let mut handles = vec![];
|
let mut handles = vec![];
|
||||||
for file in args.files {
|
for file in args.files {
|
||||||
// match fs::canonicalize(file.as_path()) {
|
if args.canonicalize {
|
||||||
// Ok(p) => buffer.push_back(p),
|
match fs::canonicalize(file.as_path()) {
|
||||||
// Err(e) => panic!("unable to canonicalize {}: {}", file.as_path().display(), e)
|
Ok(p) => buffer.push_back(p),
|
||||||
// };
|
Err(e) => panic!("unable to canonicalize {}: {}", file.as_path().display(), e),
|
||||||
|
};
|
||||||
|
} else {
|
||||||
buffer.push_back(file);
|
buffer.push_back(file);
|
||||||
}
|
}
|
||||||
let arc_buf = Arc::new(Mutex::new(buffer));
|
}
|
||||||
// let mut handles = vec![];
|
|
||||||
// let chunks = Arc::new(Mutex::new(args.files.chunks(args.files.len()/ cpus).clone()));
|
|
||||||
/* let threads = chunks.into_iter().map(|chunk| {
|
|
||||||
thread::spawn(move || checksum(0, chunk.to_vec()))
|
|
||||||
}).collect::<Vec<_>>(); */
|
|
||||||
|
|
||||||
/* threads.into_iter().for_each(|i| { i.join().unwrap(); });
|
let arc_buf = Arc::new(Mutex::new(buffer));
|
||||||
// for file in args.files {
|
|
||||||
// if let Err(e) = checksum(0, file) { */
|
|
||||||
// println!("{}", e);
|
|
||||||
// break;
|
|
||||||
// };
|
|
||||||
// }
|
|
||||||
for i in 0..cpus {
|
for i in 0..cpus {
|
||||||
let safe = Arc::clone(&arc_buf);
|
let safe_buf = Arc::clone(&arc_buf);
|
||||||
handles.push(thread::spawn(move || checksum(i, safe)))
|
let safe_alg = Arc::new(args.algorithm.clone());
|
||||||
|
handles.push(thread::spawn(move || {
|
||||||
|
hash(ThreadInfo {
|
||||||
|
debug: Arc::new(args.debug),
|
||||||
|
failures_only: Arc::new(args.failures_only),
|
||||||
|
quiet: Arc::new(args.quiet),
|
||||||
|
thread_id: i,
|
||||||
|
filenames: safe_buf,
|
||||||
|
algorithm: safe_alg,
|
||||||
|
})
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
for handle in handles {
|
for handle in handles {
|
||||||
handle.join().unwrap();
|
match handle.join().unwrap() {
|
||||||
|
Err(e) => panic!("{}", e),
|
||||||
|
Ok(_) => (),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// println!("{}", available_parallelism().unwrap().get());
|
// println!("{}", available_parallelism().unwrap().get());
|
||||||
|
|
Loading…
Add table
Reference in a new issue