]> code.octet-stream.net Git - hashgood/blob - src/calculate.rs
870115c7e89c0300f0b525dd03fb80e8af0aff90
[hashgood] / src / calculate.rs
1 use super::Algorithm;
2 use crossbeam_channel::bounded;
3 use crossbeam_channel::Receiver;
4 use crypto::digest::Digest;
5 use crypto::md5::Md5;
6 use crypto::sha1::Sha1;
7 use crypto::sha2::Sha256;
8 use std::error::Error;
9 use std::fs::File;
10 use std::io::prelude::*;
11 use std::path::PathBuf;
12 use std::sync::Arc;
13 use std::thread;
14 use std::thread::JoinHandle;
15
16 pub type CalculateResult = Result<Vec<(Algorithm, Vec<u8>)>, Box<dyn Error>>;
17
18 /// For a given path to the input (may be "-" for STDIN), try to obtain a reader for the data within it.
19 pub fn get_input_reader(input: &PathBuf) -> Result<Box<dyn Read>, Box<dyn Error>> {
20 if input.to_str() == Some("-") {
21 // Special case: standard input
22 return Ok(Box::new(std::io::stdin()));
23 }
24 Ok(Box::new(File::open(input)?))
25 }
26
27 /// For the given input stream, calculate all requested digest types
28 pub fn create_digests(algorithms: &[Algorithm], mut input: Box<dyn Read>) -> CalculateResult {
29 let mut senders = vec![];
30 let mut handles = vec![];
31
32 if algorithms.contains(&Algorithm::Md5) {
33 let (s, r) = bounded::<Arc<Vec<u8>>>(1);
34 senders.push(s);
35 handles.push(md5_digest(r));
36 }
37 if algorithms.contains(&Algorithm::Sha1) {
38 let (s, r) = bounded::<Arc<Vec<u8>>>(1);
39 senders.push(s);
40 handles.push(sha1_digest(r));
41 }
42 if algorithms.contains(&Algorithm::Sha256) {
43 let (s, r) = bounded::<Arc<Vec<u8>>>(1);
44 senders.push(s);
45 handles.push(sha256_digest(r));
46 }
47
48 // 64 KB chunks will be read from the input at 64 KB and supplied to all hashing threads at once
49 // Right now that could be up to three threads. If CPU-bound, the other threads will mostly block while the slowest one finishes
50 const BUF_SIZE: usize = 1024 * 64;
51 let mut buf = [0; BUF_SIZE];
52 while let Ok(size) = input.read(&mut buf) {
53 if size == 0 {
54 break;
55 } else {
56 // Create a shared read-only copy for the hashers to take as input
57 // buf is freed up for more reading
58 let chunk = Arc::new(buf[0..size].to_vec());
59 for s in &senders {
60 s.send(chunk.clone())?;
61 }
62 }
63 }
64 drop(senders);
65 // Once all data has been sent we just have to wait for the digests to fall out
66 Ok(handles.into_iter().map(|h| h.join().unwrap()).collect())
67 }
68
69 /// Calculate the md5 digest of some data on the given channel
70 fn md5_digest(rx: Receiver<Arc<Vec<u8>>>) -> JoinHandle<(Algorithm, Vec<u8>)> {
71 thread::spawn(move || {
72 let mut md5 = Md5::new();
73 while let Ok(chunk) = rx.recv() {
74 md5.input(&chunk);
75 }
76 let mut result = [0; 16];
77 md5.result(&mut result);
78 (Algorithm::Md5, result.to_vec())
79 })
80 }
81
82 /// Calculate the sha1 digest of some data on the given channel
83 fn sha1_digest(rx: Receiver<Arc<Vec<u8>>>) -> JoinHandle<(Algorithm, Vec<u8>)> {
84 thread::spawn(move || {
85 let mut sha1 = Sha1::new();
86 while let Ok(chunk) = rx.recv() {
87 sha1.input(&chunk);
88 }
89 let mut result = [0; 20];
90 sha1.result(&mut result);
91 (Algorithm::Sha1, result.to_vec())
92 })
93 }
94
95 /// Calculate the sha256 digest of some data on the given channel
96 fn sha256_digest(rx: Receiver<Arc<Vec<u8>>>) -> JoinHandle<(Algorithm, Vec<u8>)> {
97 thread::spawn(move || {
98 let mut sha256 = Sha256::new();
99 while let Ok(chunk) = rx.recv() {
100 sha256.input(&chunk);
101 }
102 let mut result = [0; 32];
103 sha256.result(&mut result);
104 (Algorithm::Sha256, result.to_vec())
105 })
106 }
107
108 #[cfg(test)]
109 mod tests {
110 use super::*;
111 use std::io::Cursor;
112
113 const SMALL_DATA: [u8; 10] = ['A' as u8; 10];
114 // python3 -c 'print ("A"*10, end="", flush=True)' | md5sum
115 const SMALL_DATA_MD5: &'static str = "16c52c6e8326c071da771e66dc6e9e57";
116 // python3 -c 'print ("A"*10, end="", flush=True)' | sha1sum
117 const SMALL_DATA_SHA1: &'static str = "c71613a7386fd67995708464bf0223c0d78225c4";
118 // python3 -c 'print ("A"*10, end="", flush=True)' | sha256sum
119 const SMALL_DATA_SHA256: &'static str =
120 "1d65bf29403e4fb1767522a107c827b8884d16640cf0e3b18c4c1dd107e0d49d";
121
122 const LARGE_DATA: [u8; 1_000_000] = ['B' as u8; 1_000_000];
123 // python3 -c 'print ("B"*1000000, end="", flush=True)' | md5sum
124 const LARGE_DATA_MD5: &'static str = "9171f6d67a87ca649a702434a03458a1";
125 // python3 -c 'print ("B"*1000000, end="", flush=True)' | sha1sum
126 const LARGE_DATA_SHA1: &'static str = "cfae4cebfd01884111bdede7cf983626bb249c94";
127 // python3 -c 'print ("B"*1000000, end="", flush=True)' | sha256sum
128 const LARGE_DATA_SHA256: &'static str =
129 "b9193853f7798e92e2f6b82eda336fa7d6fc0fa90fdefe665f372b0bad8cdf8c";
130
131 fn verify_digest(alg: Algorithm, data: &'static [u8], hash: &str) {
132 let reader = Cursor::new(&*data);
133 let digests = create_digests(&[alg], Box::new(reader)).unwrap();
134 assert_eq!(digests.len(), 1);
135 assert_eq!(digests[0], (alg, hex::decode(hash).unwrap()));
136 }
137
138 /// Assert that digests for all algorithms are calculated correctly for a small piece
139 /// of test data (single block).
140 #[test]
141 fn small_digests() {
142 verify_digest(Algorithm::Md5, &SMALL_DATA, &SMALL_DATA_MD5);
143 verify_digest(Algorithm::Sha1, &SMALL_DATA, &SMALL_DATA_SHA1);
144 verify_digest(Algorithm::Sha256, &SMALL_DATA, &SMALL_DATA_SHA256);
145 }
146
147 /// Assert that digests for all algorithms are calculated correctly for a large piece
148 /// of test data. For our purposes, "large" means that it spans several of the 64 KB
149 /// blocks used to break up the input processing. Using one million bytes instead of
150 /// 1 MiB means that the final block will be slightly smaller than the others.
151 #[test]
152 fn large_digests() {
153 verify_digest(Algorithm::Md5, &LARGE_DATA, &LARGE_DATA_MD5);
154 verify_digest(Algorithm::Sha1, &LARGE_DATA, &LARGE_DATA_SHA1);
155 verify_digest(Algorithm::Sha256, &LARGE_DATA, &LARGE_DATA_SHA256);
156 }
157 }