move common code to lib and macros

This commit is contained in:
Bryson Steck 2025-08-12 23:25:19 -06:00
parent f247b9dd18
commit 56e8b69322
Signed by: bryson
SSH key fingerprint: SHA256:XpKABw/nP4z8UVaH+weLaBnEOD86+cVwif+QjuYLGT4
6 changed files with 502 additions and 450 deletions

View file

@ -4,6 +4,13 @@ version = "0.5.0"
edition = "2024"
autobins = true
[features]
algorithms = []
[[bin]]
name = "picca"
required-features = ["algorithms"]
[dependencies]
sha2 = "0.10.9"
clap = { version = "4.5.29", features = ["derive"] }

View file

@ -7,383 +7,13 @@ use std::sync::atomic::{AtomicI32, Ordering};
use std::sync::{Arc, Mutex};
use std::thread::{self, JoinHandle, available_parallelism};
use picca::message;
use picca::core::hashers;
const ALGORITHMS: [&'static str; 44] = [
"ascon",
"belt",
"blake3",
"blake2b512",
"blake2s256",
"fsb160",
"fsb224",
"fsb256",
"fsb384",
"fsb512",
"groestl224",
"groestl256",
"groestl384",
"groestl512",
"jh224",
"jh256",
"jh384",
"jh512",
"k12",
"ripemd128",
"ripemd160",
"ripemd256",
"ripemd320",
"sha224",
"sha256",
"sha384",
"sha512",
"sha3_224",
"sha3_256",
"sha3_384",
"sha3_512",
"shabal192",
"shabal224",
"shabal256",
"shabal384",
"shabal512",
"shake128",
"shake256",
"skein256",
"skein512",
"skein1024",
"sm3",
"tiger",
"whirlpool",
];
const UNSECURE_ALGORITHMS: [&'static str; 7] = [
"gost94",
"md2",
"md4",
"md5",
"sha1",
"streebog256",
"streebog512",
];
#[derive(Parser)]
#[command(name = "picca")]
#[command(version = option_env!("CARGO_PKG_VERSION"))]
#[command(about = "a Parallel Implementation of Common Checksum Algorithms")]
#[command(long_about = None)]
struct Args {
#[arg(
short,
long,
help = "Use at most this number of threads, 0 means as many as there are processor cores",
default_value = "0"
)]
threads: usize,
#[arg(
short,
long,
help = "Enable debug output (thread info, algorithm used/detected)"
)]
debug: bool,
#[arg(
short,
long,
help = "Specify an algorithm for hashing",
default_value = "sha256",
value_parser = clap::builder::PossibleValuesParser::new(get_algorithms())
)]
algorithm: String,
#[arg(
short = 'f',
long,
help = "Show canonicalized (relative paths converted to absolute) file paths"
)]
canonicalize: bool,
#[arg(
short = 'c',
long,
help = "Read checksums from the file(s) and verify them"
)]
check: Vec<PathBuf>,
#[arg(
short = 'q',
long,
help = "(only used with -c) Only print checksums that fail; do not print OK for files that are successful"
)]
quiet: bool,
#[arg(trailing_var_arg = true)]
files: Vec<PathBuf>,
}
struct ThreadInfo {
debug: bool,
quiet: bool,
thread_id: usize,
filenames: Arc<Mutex<VecDeque<PathBuf>>>,
algorithm: Arc<String>,
hash_map: Option<Arc<Mutex<HashMap<PathBuf, String>>>>,
file_errors: Arc<AtomicI32>,
hash_errors: Arc<AtomicI32>,
}
fn get_algorithms() -> Vec<&'static str> {
let mut combined: Vec<&str> = vec![];
for i in ALGORITHMS {
combined.push(i);
}
for i in UNSECURE_ALGORITHMS {
combined.push(i);
}
combined.sort();
return Vec::from(combined)
}
fn hash(info: ThreadInfo) -> Result<(), String> {
loop {
let mut stdin = None;
let mut file = None;
let filename = match info.filenames.lock().unwrap().pop_front() {
Some(f) => f,
None => break,
};
if !info.quiet && info.debug {
message::debug(format!(
"thread {} is hashing file '{}'",
info.thread_id,
filename.as_path().display()
));
}
if filename.display().to_string() == "-" {
let mut buffer = String::new();
match io::stdin().lock().read_to_string(&mut buffer) {
Ok(_) => (),
Err(e) => message::error(format!("stdin: {}", e)),
}
stdin = Some(buffer)
} else if filename.is_dir() {
message::error(format!("{}: Is a directory", filename.as_path().display()));
info.file_errors.fetch_add(1, Ordering::SeqCst);
continue;
} else {
file = Some(match File::open(&filename) {
Err(e) => {
println!("{}: FAILED ({})", filename.as_path().display(), e);
info.file_errors.fetch_add(1, Ordering::SeqCst);
continue;
},
Ok(f) => f,
});
}
let res = match &*info.algorithm.as_str() {
"ascon" => hashers::hash_ascon(file, stdin),
"belt" => hashers::hash_belt(file, stdin),
"blake2b512" => hashers::hash_blake2b512(file, stdin),
"blake2s256" => hashers::hash_blake2s256(file, stdin),
"blake3" => hashers::hash_blake3(file, stdin),
"fsb160" => hashers::hash_fsb160(file, stdin),
"fsb224" => hashers::hash_fsb224(file, stdin),
"fsb256" => hashers::hash_fsb256(file, stdin),
"fsb384" => hashers::hash_fsb384(file, stdin),
"fsb512" => hashers::hash_fsb512(file, stdin),
"gost94" => hashers::hash_gost94(file, stdin),
"groestl224" => hashers::hash_groestl224(file, stdin),
"groestl256" => hashers::hash_groestl256(file, stdin),
"groestl384" => hashers::hash_groestl384(file, stdin),
"groestl512" => hashers::hash_groestl512(file, stdin),
"jh224" => hashers::hash_jh224(file, stdin),
"jh256" => hashers::hash_jh256(file, stdin),
"jh384" => hashers::hash_jh384(file, stdin),
"jh512" => hashers::hash_jh512(file, stdin),
"k12" => hashers::hash_k12(file, stdin),
"md2" => hashers::hash_md2(file, stdin),
"md4" => hashers::hash_md4(file, stdin),
"md5" => hashers::hash_md5(file, stdin),
"ripemd128" => hashers::hash_ripemd128(file, stdin),
"ripemd160" => hashers::hash_ripemd160(file, stdin),
"ripemd256" => hashers::hash_ripemd256(file, stdin),
"ripemd320" => hashers::hash_ripemd320(file, stdin),
"sha1" => hashers::hash_sha1(file, stdin),
"sha224" => hashers::hash_sha224(file, stdin),
"sha256" => hashers::hash_sha256(file, stdin),
"sha384" => hashers::hash_sha384(file, stdin),
"sha512" => hashers::hash_sha512(file, stdin),
"sha3_224" => hashers::hash_sha3_224(file, stdin),
"sha3_256" => hashers::hash_sha3_256(file, stdin),
"sha3_384" => hashers::hash_sha3_384(file, stdin),
"sha3_512" => hashers::hash_sha3_512(file, stdin),
"shabal192" => hashers::hash_shabal192(file, stdin),
"shabal224" => hashers::hash_shabal224(file, stdin),
"shabal256" => hashers::hash_shabal256(file, stdin),
"shabal384" => hashers::hash_shabal384(file, stdin),
"shabal512" => hashers::hash_shabal512(file, stdin),
"shake128" => hashers::hash_shake128(file, stdin),
"shake256" => hashers::hash_shake256(file, stdin),
"skein256" => hashers::hash_skein256(file, stdin),
"skein512" => hashers::hash_skein512(file, stdin),
"skein1024" => hashers::hash_skein1024(file, stdin),
"sm3" => hashers::hash_sm3(file, stdin),
"streebog256" => hashers::hash_streebog256(file, stdin),
"streebog512" => hashers::hash_streebog512(file, stdin),
"tiger" => hashers::hash_tiger(file, stdin),
"whirlpool" => hashers::hash_whirlpool(file, stdin),
_ => panic!("Somehow did not pass a supported algorithm"),
};
match &info.hash_map {
Some(h) => {
if h.lock().unwrap()[&filename] == res {
if !info.quiet {
println!("{}: OK", filename.as_path().display());
}
} else {
println!("{}: FAILED", filename.as_path().display());
info.hash_errors.fetch_add(1, Ordering::SeqCst);
}
},
None => {
if !info.quiet {
println!("{} {}", res, filename.as_path().display());
}
},
}
}
if !info.quiet && info.debug {
message::debug(format!("thread {} has ran out of work", info.thread_id));
}
Ok(())
}
fn verify(
cpus: usize,
algorithm: String,
debug: bool,
quiet: bool,
checksum_files: Vec<PathBuf>,
) -> (
Vec<JoinHandle<Result<(), std::string::String>>>,
Arc<AtomicI32>,
Arc<AtomicI32>,
) {
let mut handles = vec![];
let mut hash_map: HashMap<PathBuf, String> = HashMap::new();
let mut buffer = VecDeque::new();
for file in checksum_files {
match read_to_string(&file) {
Err(e) => {
message::error(format!("{}: {}", file.as_path().display(), e));
continue;
},
Ok(f) => {
for line in f.lines() {
let split: Vec<String> = line.split_whitespace().map(|x| x.to_string()).collect();
// println!("{}, {}", split.size_hint().0, split.size_hint().1)
match split.len() {
2 => {
hash_map.insert(PathBuf::from(split[1].clone()), split[0].clone());
buffer.push_back(PathBuf::from(split[1].clone()));
},
_ => message::error(format!("malformed line: {}", line)),
}
}
},
};
}
let threads;
if buffer.len() >= cpus {
threads = cpus
} else {
threads = buffer.len()
}
let arc_fe = Arc::new(AtomicI32::new(0));
let arc_he = Arc::new(AtomicI32::new(0));
let arc_buf = Arc::new(Mutex::new(buffer));
let arc_hash = Arc::new(Mutex::new(hash_map));
for i in 0..threads {
let safe_fe = Arc::clone(&arc_fe);
let safe_he = Arc::clone(&arc_he);
let safe_buf = Arc::clone(&arc_buf);
let safe_alg = Arc::new(algorithm.clone());
let safe_hash = Arc::clone(&arc_hash);
handles.push(thread::spawn(move || {
hash(ThreadInfo {
debug,
quiet,
thread_id: i,
filenames: safe_buf,
algorithm: safe_alg,
hash_map: Some(safe_hash),
file_errors: safe_fe,
hash_errors: safe_he,
})
}))
}
return (handles, arc_fe, arc_he);
}
fn generate(
cpus: usize,
buffer: VecDeque<PathBuf>,
algorithm: String,
debug: bool,
quiet: bool,
) -> (
Vec<JoinHandle<Result<(), std::string::String>>>,
Arc<AtomicI32>,
Arc<AtomicI32>,
) {
let threads;
if buffer.len() >= cpus {
threads = cpus
} else {
threads = buffer.len()
}
let mut handles = vec![];
let arc_fe = Arc::new(AtomicI32::new(0));
let arc_he = Arc::new(AtomicI32::new(0));
let arc_buf = Arc::new(Mutex::new(buffer));
for i in 0..threads {
let safe_fe = Arc::clone(&arc_fe);
let safe_he = Arc::clone(&arc_he);
let safe_buf = Arc::clone(&arc_buf);
let safe_alg = Arc::new(algorithm.clone());
handles.push(thread::spawn(move || {
hash(ThreadInfo {
debug,
quiet,
thread_id: i,
filenames: safe_buf,
algorithm: safe_alg,
hash_map: None,
file_errors: safe_fe,
hash_errors: safe_he,
})
}))
}
return (handles, arc_fe, arc_he);
}
use picca::{get_algorithms, message};
#[quit::main]
fn main() {
let args = Args::parse();
let algorithm = args.algorithm;
let docker = match option_env!("PICCA_DOCKER") {
Some(v) => match v {
"true" => true,
@ -392,81 +22,5 @@ fn main() {
None => false,
};
let cpus = match args.threads {
0 => available_parallelism().unwrap().get(),
_ => args.threads,
};
if args.debug {
if env!("CARGO_BIN_NAME") != "picca" {
message::debug(format!(
"Starting picca using algorithm {} with a max of {} threads",
args.algorithm, cpus
));
} else {
message::debug(format!(
"Starting {} with a max of {} threads",
env!("CARGO_BIN_NAME"), cpus
));
}
if docker {
message::debug(format!("Docker is detected"));
}
}
if UNSECURE_ALGORITHMS.contains(&args.algorithm.as_str()) {
message::warning(format!(
"{} is an unsecure hashing algorithm!",
&args.algorithm
));
}
let handles;
let arc_fe;
let arc_he;
let check_mode = !args.check.is_empty();
if &args.check.len() >= &1 {
(handles, arc_fe, arc_he) = verify(cpus, args.algorithm, args.debug, args.quiet, args.check);
} else {
let mut buffer = VecDeque::new();
if &args.files.len() >= &1 {
for file in args.files {
if args.canonicalize {
match fs::canonicalize(file.as_path()) {
Ok(p) => buffer.push_back(p),
Err(e) => panic!("unable to canonicalize {}: {}", file.as_path().display(), e),
};
} else {
buffer.push_back(file);
}
}
} else {
// read from stdin instead
buffer.push_back(PathBuf::from("-"));
}
(handles, arc_fe, arc_he) = generate(cpus, buffer, args.algorithm, args.debug, args.quiet);
}
for handle in handles {
match handle.join().unwrap() {
Err(e) => panic!("{}", e),
Ok(_) => (),
}
}
let fe = arc_fe.load(Ordering::SeqCst);
let he = arc_he.load(Ordering::SeqCst);
if fe != 0 {
message::warning(format!("{} listed files could not be read", fe));
}
if he != 0 {
message::warning(format!("{} computed checksums did NOT match", he));
}
if (he != 0 || fe != 0) && check_mode {
quit::with_code(1);
}
picca::main!(args, algorithm, docker);
}

23
src/bin/sha256sum.rs Normal file
View file

@ -0,0 +1,23 @@
use clap::Parser;
use std::collections::VecDeque;
use std::fs::{self};
use std::path::PathBuf;
use std::sync::atomic::Ordering;
use std::thread::available_parallelism;
use picca::message;
#[quit::main]
fn main() {
let args = picca::Args::parse();
let algorithm = env!("CARGO_BIN_NAME").replace("sum", "");
let docker = match option_env!("PICCA_DOCKER") {
Some(v) => match v {
"true" => true,
_ => false,
},
None => false,
};
picca::main!(args, algorithm, docker);
}

View file

@ -1,2 +1,261 @@
use std::{
collections::{HashMap, VecDeque},
fs::{read_to_string, File},
io::{self, Read},
path::PathBuf,
sync::{
atomic::{AtomicI32, Ordering}, Arc, Mutex
},
thread::{self, JoinHandle},
};
use crate::message;
pub mod hashers;
pub mod macros;
struct ThreadInfo {
debug: bool,
quiet: bool,
thread_id: usize,
filenames: Arc<Mutex<VecDeque<PathBuf>>>,
algorithm: Arc<String>,
hash_map: Option<Arc<Mutex<HashMap<PathBuf, String>>>>,
file_errors: Arc<AtomicI32>,
hash_errors: Arc<AtomicI32>,
}
fn hash(info: ThreadInfo) -> Result<(), String> {
loop {
let mut stdin = None;
let mut file = None;
let filename = match info.filenames.lock().unwrap().pop_front() {
Some(f) => f,
None => break,
};
if !info.quiet && info.debug {
message::debug(format!(
"thread {} is hashing file '{}'",
info.thread_id,
filename.as_path().display()
));
}
if filename.display().to_string() == "-" {
let mut buffer = String::new();
match io::stdin().lock().read_to_string(&mut buffer) {
Ok(_) => (),
Err(e) => message::error(format!("stdin: {}", e)),
}
stdin = Some(buffer)
} else if filename.is_dir() {
message::error(format!("{}: Is a directory", filename.as_path().display()));
info.file_errors.fetch_add(1, Ordering::SeqCst);
continue;
} else {
file = Some(match File::open(&filename) {
Err(e) => {
println!("{}: FAILED ({})", filename.as_path().display(), e);
info.file_errors.fetch_add(1, Ordering::SeqCst);
continue;
},
Ok(f) => f,
});
}
let res = match &*info.algorithm.as_str() {
"ascon" => hashers::hash_ascon(file, stdin),
"belt" => hashers::hash_belt(file, stdin),
"blake2b512" => hashers::hash_blake2b512(file, stdin),
"blake2s256" => hashers::hash_blake2s256(file, stdin),
"blake3" => hashers::hash_blake3(file, stdin),
"fsb160" => hashers::hash_fsb160(file, stdin),
"fsb224" => hashers::hash_fsb224(file, stdin),
"fsb256" => hashers::hash_fsb256(file, stdin),
"fsb384" => hashers::hash_fsb384(file, stdin),
"fsb512" => hashers::hash_fsb512(file, stdin),
"gost94" => hashers::hash_gost94(file, stdin),
"groestl224" => hashers::hash_groestl224(file, stdin),
"groestl256" => hashers::hash_groestl256(file, stdin),
"groestl384" => hashers::hash_groestl384(file, stdin),
"groestl512" => hashers::hash_groestl512(file, stdin),
"jh224" => hashers::hash_jh224(file, stdin),
"jh256" => hashers::hash_jh256(file, stdin),
"jh384" => hashers::hash_jh384(file, stdin),
"jh512" => hashers::hash_jh512(file, stdin),
"k12" => hashers::hash_k12(file, stdin),
"md2" => hashers::hash_md2(file, stdin),
"md4" => hashers::hash_md4(file, stdin),
"md5" => hashers::hash_md5(file, stdin),
"ripemd128" => hashers::hash_ripemd128(file, stdin),
"ripemd160" => hashers::hash_ripemd160(file, stdin),
"ripemd256" => hashers::hash_ripemd256(file, stdin),
"ripemd320" => hashers::hash_ripemd320(file, stdin),
"sha1" => hashers::hash_sha1(file, stdin),
"sha224" => hashers::hash_sha224(file, stdin),
"sha256" => hashers::hash_sha256(file, stdin),
"sha384" => hashers::hash_sha384(file, stdin),
"sha512" => hashers::hash_sha512(file, stdin),
"sha3_224" => hashers::hash_sha3_224(file, stdin),
"sha3_256" => hashers::hash_sha3_256(file, stdin),
"sha3_384" => hashers::hash_sha3_384(file, stdin),
"sha3_512" => hashers::hash_sha3_512(file, stdin),
"shabal192" => hashers::hash_shabal192(file, stdin),
"shabal224" => hashers::hash_shabal224(file, stdin),
"shabal256" => hashers::hash_shabal256(file, stdin),
"shabal384" => hashers::hash_shabal384(file, stdin),
"shabal512" => hashers::hash_shabal512(file, stdin),
"shake128" => hashers::hash_shake128(file, stdin),
"shake256" => hashers::hash_shake256(file, stdin),
"skein256" => hashers::hash_skein256(file, stdin),
"skein512" => hashers::hash_skein512(file, stdin),
"skein1024" => hashers::hash_skein1024(file, stdin),
"sm3" => hashers::hash_sm3(file, stdin),
"streebog256" => hashers::hash_streebog256(file, stdin),
"streebog512" => hashers::hash_streebog512(file, stdin),
"tiger" => hashers::hash_tiger(file, stdin),
"whirlpool" => hashers::hash_whirlpool(file, stdin),
_ => panic!("Somehow did not pass a supported algorithm"),
};
match &info.hash_map {
Some(h) => {
if h.lock().unwrap()[&filename] == res {
if !info.quiet {
println!("{}: OK", filename.as_path().display());
}
} else {
println!("{}: FAILED", filename.as_path().display());
info.hash_errors.fetch_add(1, Ordering::SeqCst);
}
},
None => {
if !info.quiet {
println!("{} {}", res, filename.as_path().display());
}
},
}
}
if !info.quiet && info.debug {
message::debug(format!("thread {} has ran out of work", info.thread_id));
}
Ok(())
}
pub fn verify(
cpus: usize,
algorithm: String,
debug: bool,
quiet: bool,
checksum_files: Vec<PathBuf>,
) -> (
Vec<JoinHandle<Result<(), std::string::String>>>,
Arc<AtomicI32>,
Arc<AtomicI32>,
) {
let mut handles = vec![];
let mut hash_map: HashMap<PathBuf, String> = HashMap::new();
let mut buffer = VecDeque::new();
for file in checksum_files {
match read_to_string(&file) {
Err(e) => {
message::error(format!("{}: {}", file.as_path().display(), e));
continue;
},
Ok(f) => {
for line in f.lines() {
let split: Vec<String> = line.split_whitespace().map(|x| x.to_string()).collect();
// println!("{}, {}", split.size_hint().0, split.size_hint().1)
match split.len() {
2 => {
hash_map.insert(PathBuf::from(split[1].clone()), split[0].clone());
buffer.push_back(PathBuf::from(split[1].clone()));
},
_ => message::error(format!("malformed line: {}", line)),
}
}
},
};
}
let threads;
if buffer.len() >= cpus {
threads = cpus
} else {
threads = buffer.len()
}
let arc_fe = Arc::new(AtomicI32::new(0));
let arc_he = Arc::new(AtomicI32::new(0));
let arc_buf = Arc::new(Mutex::new(buffer));
let arc_hash = Arc::new(Mutex::new(hash_map));
for i in 0..threads {
let safe_fe = Arc::clone(&arc_fe);
let safe_he = Arc::clone(&arc_he);
let safe_buf = Arc::clone(&arc_buf);
let safe_alg = Arc::new(algorithm.clone());
let safe_hash = Arc::clone(&arc_hash);
handles.push(thread::spawn(move || {
hash(ThreadInfo {
debug,
quiet,
thread_id: i,
filenames: safe_buf,
algorithm: safe_alg,
hash_map: Some(safe_hash),
file_errors: safe_fe,
hash_errors: safe_he,
})
}))
}
return (handles, arc_fe, arc_he);
}
pub fn generate(
cpus: usize,
buffer: VecDeque<PathBuf>,
algorithm: String,
debug: bool,
quiet: bool,
) -> (
Vec<JoinHandle<Result<(), std::string::String>>>,
Arc<AtomicI32>,
Arc<AtomicI32>,
) {
let threads;
if buffer.len() >= cpus {
threads = cpus
} else {
threads = buffer.len()
}
let mut handles = vec![];
let arc_fe = Arc::new(AtomicI32::new(0));
let arc_he = Arc::new(AtomicI32::new(0));
let arc_buf = Arc::new(Mutex::new(buffer));
for i in 0..threads {
let safe_fe = Arc::clone(&arc_fe);
let safe_he = Arc::clone(&arc_he);
let safe_buf = Arc::clone(&arc_buf);
let safe_alg = Arc::new(algorithm.clone());
handles.push(thread::spawn(move || {
hash(ThreadInfo {
debug,
quiet,
thread_id: i,
filenames: safe_buf,
algorithm: safe_alg,
hash_map: None,
file_errors: safe_fe,
hash_errors: safe_he,
})
}))
}
return (handles, arc_fe, arc_he);
}

View file

@ -1 +1,80 @@
#[macro_export]
macro_rules! main {
($a:expr,$b:expr,$c:expr) => {
let cpus = match $a.threads {
0 => available_parallelism().unwrap().get(),
_ => $a.threads,
};
if $a.debug {
if env!("CARGO_BIN_NAME") != "picca" {
message::debug(format!(
"Starting picca using algorithm {} with a max of {} threads",
$b, cpus
));
} else {
message::debug(format!(
"Starting {} with a max of {} threads",
env!("CARGO_BIN_NAME"),
cpus
));
}
if $c {
message::debug(format!("Docker is detected"));
}
}
if picca::UNSECURE_ALGORITHMS.contains(&$b.as_str()) {
message::warning(format!("{} is an unsecure hashing algorithm!", &$b));
}
let handles;
let arc_fe;
let arc_he;
let check_mode = !$a.check.is_empty();
if &$a.check.len() >= &1 {
(handles, arc_fe, arc_he) = picca::core::verify(cpus, $b, $a.debug, $a.quiet, $a.check);
} else {
let mut buffer = VecDeque::new();
if &$a.files.len() >= &1 {
for file in $a.files {
if $a.canonicalize {
match fs::canonicalize(file.as_path()) {
Ok(p) => buffer.push_back(p),
Err(e) => panic!("unable to canonicalize {}: {}", file.as_path().display(), e),
};
} else {
buffer.push_back(file);
}
}
} else {
// read from stdin instead
buffer.push_back(PathBuf::from("-"));
}
(handles, arc_fe, arc_he) = picca::core::generate(cpus, buffer, $b, $a.debug, $a.quiet);
}
for handle in handles {
match handle.join().unwrap() {
Err(e) => panic!("{}", e),
Ok(_) => (),
}
}
let fe = arc_fe.load(Ordering::SeqCst);
let he = arc_he.load(Ordering::SeqCst);
if fe != 0 {
message::warning(format!("{} listed files could not be read", fe));
}
if he != 0 {
message::warning(format!("{} computed checksums did NOT match", he));
}
if (he != 0 || fe != 0) && check_mode {
quit::with_code(1);
}
};
}

View file

@ -1,2 +1,132 @@
pub mod message;
use clap::Parser;
use std::path::PathBuf;
pub mod core;
pub mod message;
pub const ALGORITHMS: [&'static str; 44] = [
"ascon",
"belt",
"blake3",
"blake2b512",
"blake2s256",
"fsb160",
"fsb224",
"fsb256",
"fsb384",
"fsb512",
"groestl224",
"groestl256",
"groestl384",
"groestl512",
"jh224",
"jh256",
"jh384",
"jh512",
"k12",
"ripemd128",
"ripemd160",
"ripemd256",
"ripemd320",
"sha224",
"sha256",
"sha384",
"sha512",
"sha3_224",
"sha3_256",
"sha3_384",
"sha3_512",
"shabal192",
"shabal224",
"shabal256",
"shabal384",
"shabal512",
"shake128",
"shake256",
"skein256",
"skein512",
"skein1024",
"sm3",
"tiger",
"whirlpool",
];
pub const UNSECURE_ALGORITHMS: [&'static str; 7] = [
"gost94",
"md2",
"md4",
"md5",
"sha1",
"streebog256",
"streebog512",
];
#[derive(Parser)]
#[command(name = "picca")]
#[command(version = option_env!("CARGO_PKG_VERSION"))]
#[command(about = "a Parallel Implementation of Common Checksum Algorithms")]
#[command(long_about = None)]
pub struct Args {
#[arg(
short,
long,
help = "Use at most this number of threads, 0 means as many as there are processor cores",
default_value = "0"
)]
pub threads: usize,
#[arg(
short,
long,
help = "Enable debug output (thread info, algorithm used/detected)"
)]
pub debug: bool,
#[cfg(feature = "algorithms")]
#[arg(
short,
long,
help = "Specify an algorithm for hashing",
default_value = "sha256",
value_parser = clap::builder::PossibleValuesParser::new(get_algorithms())
)]
pub algorithm: String,
#[arg(
short = 'f',
long,
help = "Show canonicalized (relative paths converted to absolute) file paths"
)]
pub canonicalize: bool,
#[arg(
short = 'c',
long,
help = "Read checksums from the file(s) and verify them"
)]
pub check: Vec<PathBuf>,
#[arg(
short = 'q',
long,
help = "(only used with -c) Only print checksums that fail; do not print OK for files that are successful"
)]
pub quiet: bool,
#[arg(trailing_var_arg = true)]
pub files: Vec<PathBuf>,
}
pub fn get_algorithms() -> Vec<&'static str> {
let mut combined: Vec<&str> = vec![];
for i in ALGORITHMS {
combined.push(i);
}
for i in UNSECURE_ALGORITHMS {
combined.push(i);
}
combined.sort();
return Vec::from(combined);
}