Algorithm, CandidateHash, CandidateHashes, Hash, MatchLevel, MessageLevel, Opt, Verification,
VerificationSource,
};
-use clipboard::ClipboardContext;
-use clipboard::ClipboardProvider;
-use regex::Regex;
+#[cfg(feature = "paste")]
+use copypasta::{ClipboardContext, ClipboardProvider};
use std::fs::File;
use std::io;
use std::io::prelude::*;
pub fn get_candidate_hashes(opt: &Opt) -> Result<Option<CandidateHashes>, String> {
if let Some(hash_string) = &opt.hash {
return Ok(Some(get_by_parameter(hash_string)?));
- } else if opt.paste {
+ } else if opt.get_paste() {
return Ok(Some(get_from_clipboard()?));
} else if let Some(hash_file) = &opt.hash_file {
return Ok(Some(get_from_file(hash_file)?));
/// Generate a candidate hash from the system clipboard, or throw an error.
fn get_from_clipboard() -> Result<CandidateHashes, String> {
- let mut ctx: ClipboardContext = match ClipboardProvider::new() {
- Ok(ctx) => ctx,
- Err(e) => return Err(format!("Error getting system clipboard: {}", e)),
- };
+ #[cfg(feature = "paste")]
+ {
+ let mut ctx: ClipboardContext = match ClipboardContext::new() {
+ Ok(ctx) => ctx,
+ Err(e) => return Err(format!("Error getting system clipboard: {}", e)),
+ };
- let possible_hash = match ctx.get_contents() {
- Ok(value) => value,
- Err(e) => format!("Error reading from clipboard: {}", e),
- };
+ let possible_hash = match ctx.get_contents() {
+ Ok(value) => value,
+ Err(e) => format!("Error reading from clipboard: {}", e),
+ };
- let bytes = hex::decode(&possible_hash)
- .map_err(|_| "Clipboard contains invalid or truncated hex".to_owned())?;
- let alg = Algorithm::from_len(bytes.len())?;
- let candidate = CandidateHash {
- filename: None,
- bytes,
- };
- Ok(CandidateHashes {
- alg,
- hashes: vec![candidate],
- source: VerificationSource::Clipboard,
- })
+ let bytes = hex::decode(&possible_hash)
+ .map_err(|_| "Clipboard contains invalid or truncated hex".to_owned())?;
+ let alg = Algorithm::from_len(bytes.len())?;
+ let candidate = CandidateHash {
+ filename: None,
+ bytes,
+ };
+ return Ok(CandidateHashes {
+ alg,
+ hashes: vec![candidate],
+ source: VerificationSource::Clipboard,
+ });
+ }
+ #[cfg(not(feature = "paste"))]
+ {
+ return Err("Paste not implemented".to_owned());
+ }
}
/// Generate a candidate hash from the digests file specified (could be "-" for STDIN), or throw an error.
))
}
+fn try_parse_hash(s: &str) -> Option<(Algorithm, Vec<u8>)> {
+ let bytes = match hex::decode(s.trim()) {
+ Ok(bytes) => bytes,
+ _ => return None,
+ };
+ let alg = match Algorithm::from_len(bytes.len()) {
+ Ok(alg) => alg,
+ _ => return None,
+ };
+ Some((alg, bytes))
+}
+
fn read_raw_candidate_from_file(line: &str, path: &PathBuf) -> Option<CandidateHashes> {
- // It is a little sad to use a dynamic regex in an otherwise nice Rust program
- // These deserve to be replaced with a good old fashioned static parser
- // But let's be honest: the impact is negligible
- let re = Regex::new(r"^([[:xdigit:]]{32}|[[:xdigit:]]{40}|[[:xdigit:]]{64})$").unwrap();
- if re.is_match(line) {
- // These should both always succeed due to the matching
- let bytes = match hex::decode(line) {
- Ok(bytes) => bytes,
- _ => return None,
- };
- let alg = match Algorithm::from_len(bytes.len()) {
- Ok(alg) => alg,
- _ => return None,
- };
- return Some(CandidateHashes {
- alg,
- source: VerificationSource::RawFile(path.clone()),
- hashes: vec![CandidateHash {
- bytes,
- filename: None,
- }],
- });
- }
- None
+ let (alg, bytes) = try_parse_hash(line)?;
+ Some(CandidateHashes {
+ alg,
+ source: VerificationSource::RawFile(path.clone()),
+ hashes: vec![CandidateHash {
+ bytes,
+ filename: None,
+ }],
+ })
}
-fn read_coreutils_digests_from_file<I>(lines: I, path: &PathBuf) -> Option<CandidateHashes>
+fn read_coreutils_digests_from_file<I, S>(lines: I, path: &PathBuf) -> Option<CandidateHashes>
where
- I: Iterator<Item = io::Result<String>>,
+ I: Iterator<Item = io::Result<S>>,
+ S: AsRef<str>,
{
- let re = Regex::new(
- r"^(?P<hash>([[:xdigit:]]{32}|[[:xdigit:]]{40}|[[:xdigit:]]{64})) .(?P<filename>.+)$",
- )
- .unwrap();
-
let mut hashes = vec![];
let mut alg: Option<Algorithm> = None;
for l in lines {
if let Ok(l) = l {
- let l = l.trim();
+ let l = l.as_ref().trim();
// Allow (ignore) blank lines
if l.is_empty() {
continue;
}
- // If we can capture a valid line, use it
- if let Some(captures) = re.captures(&l) {
- let hash = &captures["hash"];
- let filename = &captures["filename"];
- // Decode the hex and algorithm for this line
- let line_bytes = match hex::decode(hash) {
- Ok(bytes) => bytes,
- _ => return None,
- };
- let line_alg = match Algorithm::from_len(line_bytes.len()) {
- Ok(alg) => alg,
- _ => return None,
- };
- if alg.is_some() && alg != Some(line_alg) {
- // Different algorithms in the same digest file are not supported
+ // Expected format
+ // <valid-hash><space><space-or-*><filename>
+ let (line_alg, bytes, filename) = match l
+ .find(' ')
+ .and_then(|space_pos| (l.get(0..space_pos)).zip(l.get(space_pos + 2..)))
+ .and_then(|(maybe_hash, filename)| {
+ try_parse_hash(maybe_hash).map(|(alg, bytes)| (alg, bytes, filename))
+ }) {
+ Some(t) => t,
+ None => {
+ // if we have a line with content we cannot parse, this is an error
return None;
- } else {
- // If we are the first line, we define the overall algorithm
- alg = Some(line_alg);
}
- // So far so good - create an entry for this line
- hashes.push(CandidateHash {
- bytes: line_bytes,
- filename: Some(filename.to_owned()),
- });
- } else {
- // But if we have a line with content we cannot parse, this is an error
+ };
+ if alg.is_some() && alg != Some(line_alg) {
+ // Different algorithms in the same digest file are not supported
return None;
+ } else {
+ // If we are the first line, we define the overall algorithm
+ alg = Some(line_alg);
}
+ // So far so good - create an entry for this line
+ hashes.push(CandidateHash {
+ bytes,
+ filename: Some(filename.to_owned()),
+ });
}
}
messages,
}
}
+
+#[cfg(test)]
+mod tests {
+ use std::path::Path;
+
+ use super::*;
+
+ #[test]
+ fn test_read_raw_inputs() {
+ let example_path: PathBuf = "some_file".into();
+ let valid_md5 = "d229da563da18fe5d58cd95a6467d584";
+ let valid_sha1 = "b314c7ebb7d599944981908b7f3ed33a30e78f3a";
+ let valid_sha1_2 = valid_sha1.to_uppercase();
+ let valid_sha256 = "1eb85fc97224598dad1852b5d6483bbcf0aa8608790dcc657a5a2a761ae9c8c6";
+
+ let invalid1 = "x";
+ let invalid2 = "a";
+ let invalid3 = "d229da563da18fe5d58cd95a6467d58";
+ let invalid4 = "1eb85fc97224598dad1852b5d6483bbcf0aa8608790dcc657a5a2a761ae9c8c67";
+ let invalid5 = "1eb85fc97224598dad1852b5d 483bbcf0aa8608790dcc657a5a2a761ae9c8c6";
+
+ assert!(matches!(
+ read_raw_candidate_from_file(valid_md5, &example_path),
+ Some(CandidateHashes {
+ alg: Algorithm::Md5,
+ ..
+ })
+ ));
+ assert!(matches!(
+ read_raw_candidate_from_file(valid_sha1, &example_path),
+ Some(CandidateHashes {
+ alg: Algorithm::Sha1,
+ ..
+ })
+ ));
+ assert!(matches!(
+ read_raw_candidate_from_file(&valid_sha1_2, &example_path),
+ Some(CandidateHashes {
+ alg: Algorithm::Sha1,
+ ..
+ })
+ ));
+ assert!(matches!(
+ read_raw_candidate_from_file(valid_sha256, &example_path),
+ Some(CandidateHashes {
+ alg: Algorithm::Sha256,
+ ..
+ })
+ ));
+
+ for i in &[invalid1, invalid2, invalid3, invalid4, invalid5] {
+ assert!(read_raw_candidate_from_file(*i, &example_path).is_none());
+ }
+ }
+
+ #[test]
+ fn test_read_shasums() {
+ let shasums = "4b91f7a387a6edd4a7c0afb2897f1ca968c9695b *cp
+ 75eb7420a9f5a260b04a3e8ad51e50f2838a17fc lel.txt
+
+ fe6c26d485a3573a1cb0ad0682f5105325a1905f shasums";
+ let lines = shasums.lines().map(|l| std::io::Result::Ok(l));
+ let path = Path::new("SHASUMS").to_owned();
+ let candidates = read_coreutils_digests_from_file(lines, &path);
+
+ assert_eq!(
+ candidates,
+ Some(CandidateHashes {
+ alg: Algorithm::Sha1,
+ hashes: vec![
+ CandidateHash {
+ bytes: hex::decode("4b91f7a387a6edd4a7c0afb2897f1ca968c9695b").unwrap(),
+ filename: Some("cp".to_owned()),
+ },
+ CandidateHash {
+ bytes: hex::decode("75eb7420a9f5a260b04a3e8ad51e50f2838a17fc").unwrap(),
+ filename: Some("lel.txt".to_owned()),
+ },
+ CandidateHash {
+ bytes: hex::decode("fe6c26d485a3573a1cb0ad0682f5105325a1905f").unwrap(),
+ filename: Some("shasums".to_owned()),
+ }
+ ],
+ source: VerificationSource::DigestsFile(path),
+ })
+ );
+ }
+}