use std::{ collections::{HashMap, VecDeque}, fs::{read_to_string, File}, io::{self, Read}, path::PathBuf, sync::{ atomic::{AtomicI32, Ordering}, Arc, Mutex }, thread::{self, JoinHandle}, }; use crate::message; pub mod hashers; pub mod macros; struct ThreadInfo { debug: bool, quiet: bool, thread_id: usize, filenames: Arc>>, algorithm: Arc, hash_map: Option>>>, file_errors: Arc, hash_errors: Arc, } fn hash(info: ThreadInfo) -> Result<(), String> { loop { let mut stdin = None; let mut file = None; let filename = match info.filenames.lock().unwrap().pop_front() { Some(f) => f, None => break, }; if !info.quiet && info.debug { message::debug(format!( "thread {} is hashing file '{}'", info.thread_id, filename.as_path().display() )); } if filename.display().to_string() == "-" { let mut buffer = String::new(); match io::stdin().lock().read_to_string(&mut buffer) { Ok(_) => (), Err(e) => message::error(format!("stdin: {}", e)), } stdin = Some(buffer) } else if filename.is_dir() { message::error(format!("{}: Is a directory", filename.as_path().display())); info.file_errors.fetch_add(1, Ordering::SeqCst); continue; } else { file = Some(match File::open(&filename) { Err(e) => { println!("{}: FAILED ({})", filename.as_path().display(), e); info.file_errors.fetch_add(1, Ordering::SeqCst); continue; }, Ok(f) => f, }); } let res = match &*info.algorithm.as_str() { "ascon" => hashers::hash_ascon(file, stdin), "belt" => hashers::hash_belt(file, stdin), "blake2b512" => hashers::hash_blake2b512(file, stdin), "blake2s256" => hashers::hash_blake2s256(file, stdin), "blake3" => hashers::hash_blake3(file, stdin), "fsb160" => hashers::hash_fsb160(file, stdin), "fsb224" => hashers::hash_fsb224(file, stdin), "fsb256" => hashers::hash_fsb256(file, stdin), "fsb384" => hashers::hash_fsb384(file, stdin), "fsb512" => hashers::hash_fsb512(file, stdin), "gost94" => hashers::hash_gost94(file, stdin), "groestl224" => hashers::hash_groestl224(file, stdin), "groestl256" => hashers::hash_groestl256(file, stdin), "groestl384" => hashers::hash_groestl384(file, stdin), "groestl512" => hashers::hash_groestl512(file, stdin), "jh224" => hashers::hash_jh224(file, stdin), "jh256" => hashers::hash_jh256(file, stdin), "jh384" => hashers::hash_jh384(file, stdin), "jh512" => hashers::hash_jh512(file, stdin), "k12" => hashers::hash_k12(file, stdin), "md2" => hashers::hash_md2(file, stdin), "md4" => hashers::hash_md4(file, stdin), "md5" => hashers::hash_md5(file, stdin), "ripemd128" => hashers::hash_ripemd128(file, stdin), "ripemd160" => hashers::hash_ripemd160(file, stdin), "ripemd256" => hashers::hash_ripemd256(file, stdin), "ripemd320" => hashers::hash_ripemd320(file, stdin), "sha1" => hashers::hash_sha1(file, stdin), "sha224" => hashers::hash_sha224(file, stdin), "sha256" => hashers::hash_sha256(file, stdin), "sha384" => hashers::hash_sha384(file, stdin), "sha512" => hashers::hash_sha512(file, stdin), "sha3_224" => hashers::hash_sha3_224(file, stdin), "sha3_256" => hashers::hash_sha3_256(file, stdin), "sha3_384" => hashers::hash_sha3_384(file, stdin), "sha3_512" => hashers::hash_sha3_512(file, stdin), "shabal192" => hashers::hash_shabal192(file, stdin), "shabal224" => hashers::hash_shabal224(file, stdin), "shabal256" => hashers::hash_shabal256(file, stdin), "shabal384" => hashers::hash_shabal384(file, stdin), "shabal512" => hashers::hash_shabal512(file, stdin), "shake128" => hashers::hash_shake128(file, stdin), "shake256" => hashers::hash_shake256(file, stdin), "skein256" => hashers::hash_skein256(file, stdin), "skein512" => hashers::hash_skein512(file, stdin), "skein1024" => hashers::hash_skein1024(file, stdin), "sm3" => hashers::hash_sm3(file, stdin), "streebog256" => hashers::hash_streebog256(file, stdin), "streebog512" => hashers::hash_streebog512(file, stdin), "tiger" => hashers::hash_tiger(file, stdin), "whirlpool" => hashers::hash_whirlpool(file, stdin), _ => panic!("Somehow did not pass a supported algorithm"), }; match &info.hash_map { Some(h) => { if h.lock().unwrap()[&filename] == res { if !info.quiet { println!("{}: OK", filename.as_path().display()); } } else { println!("{}: FAILED", filename.as_path().display()); info.hash_errors.fetch_add(1, Ordering::SeqCst); } }, None => { if !info.quiet { println!("{} {}", res, filename.as_path().display()); } }, } } if !info.quiet && info.debug { message::debug(format!("thread {} has ran out of work", info.thread_id)); } Ok(()) } pub fn verify( cpus: usize, algorithm: String, debug: bool, quiet: bool, checksum_files: Vec, ) -> ( Vec>>, Arc, Arc, ) { let mut handles = vec![]; let mut hash_map: HashMap = HashMap::new(); let mut buffer = VecDeque::new(); for file in checksum_files { match read_to_string(&file) { Err(e) => { message::error(format!("{}: {}", file.as_path().display(), e)); continue; }, Ok(f) => { for line in f.lines() { let split: Vec = line.split_whitespace().map(|x| x.to_string()).collect(); // println!("{}, {}", split.size_hint().0, split.size_hint().1) match split.len() { 2 => { hash_map.insert(PathBuf::from(split[1].clone()), split[0].clone()); buffer.push_back(PathBuf::from(split[1].clone())); }, _ => message::error(format!("malformed line: {}", line)), } } }, }; } let threads; if buffer.len() >= cpus { threads = cpus } else { threads = buffer.len() } let arc_fe = Arc::new(AtomicI32::new(0)); let arc_he = Arc::new(AtomicI32::new(0)); let arc_buf = Arc::new(Mutex::new(buffer)); let arc_hash = Arc::new(Mutex::new(hash_map)); for i in 0..threads { let safe_fe = Arc::clone(&arc_fe); let safe_he = Arc::clone(&arc_he); let safe_buf = Arc::clone(&arc_buf); let safe_alg = Arc::new(algorithm.clone()); let safe_hash = Arc::clone(&arc_hash); handles.push(thread::spawn(move || { hash(ThreadInfo { debug, quiet, thread_id: i, filenames: safe_buf, algorithm: safe_alg, hash_map: Some(safe_hash), file_errors: safe_fe, hash_errors: safe_he, }) })) } return (handles, arc_fe, arc_he); } pub fn generate( cpus: usize, buffer: VecDeque, algorithm: String, debug: bool, quiet: bool, ) -> ( Vec>>, Arc, Arc, ) { let threads; if buffer.len() >= cpus { threads = cpus } else { threads = buffer.len() } let mut handles = vec![]; let arc_fe = Arc::new(AtomicI32::new(0)); let arc_he = Arc::new(AtomicI32::new(0)); let arc_buf = Arc::new(Mutex::new(buffer)); for i in 0..threads { let safe_fe = Arc::clone(&arc_fe); let safe_he = Arc::clone(&arc_he); let safe_buf = Arc::clone(&arc_buf); let safe_alg = Arc::new(algorithm.clone()); handles.push(thread::spawn(move || { hash(ThreadInfo { debug, quiet, thread_id: i, filenames: safe_buf, algorithm: safe_alg, hash_map: None, file_errors: safe_fe, hash_errors: safe_he, }) })) } return (handles, arc_fe, arc_he); }