From 46ee256ba48859424219b8dc50bb0b3ce5cf3153 Mon Sep 17 00:00:00 2001 From: traviscross Date: Fri, 12 Jul 2019 08:29:44 +0000 Subject: [PATCH] Fix clippy warnings (#744) Clippy is returning some warnings. Let's fix or explicitly ignore them. In particular: - In `components/imageproc/src/lib.rs`, we implement `Hash` explicitly but derive `PartialEq`. We need to maintain the property that two keys being equal implies the hashes of those two keys are equal. Our `Hash` implementations preserve this, so we'll explicitly ignore the warnings. - In `components/site/src/lib.rs`, we were calling `.into()` on some values that are already of the correct type. - In `components/site/src/lib.rs`, we were using `.map(|x| *x)` in iterator chains to remove a level of indirection; we can instead say `.copied()` (introduced in Rust v1.36) or `.cloned()`. Using `.copied` here is better from a type-checking point of view, but we'll use `.cloned` for now as Rust v1.36 was only recently released. - In `components/templates/src/filters.rs` and `components/utils/src/site.rs`, we were taking `HashMap`s as function arguments but not generically accepting alternate `Hasher` implementations. - In `src/cmd/check.rs`, we use `env::current_dir()` as a default value, but our use of `unwrap_or` meant that we would always retrieve the current directory even when not needed. - In `components/errors/src/lib.rs`, we can use `if let` rather than `match`. - In `components/library/src/content/page.rs`, we can collapse a nested conditional into `else if let ...`. - In `components/library/src/sorting.rs`, a function takes `&&Page` arguments. Clippy warns about this for efficiency reasons, but we're doing it here to match a particular sorting API, so we'll explicitly ignore the warning. --- components/errors/src/lib.rs | 7 +++---- components/imageproc/src/lib.rs | 2 ++ components/library/src/content/page.rs | 8 +++----- components/library/src/sorting.rs | 1 + components/site/src/lib.rs | 10 +++++----- components/templates/src/filters.rs | 16 +++++++++++++--- components/utils/src/site.rs | 5 +++-- src/cmd/check.rs | 2 +- 8 files changed, 31 insertions(+), 20 deletions(-) diff --git a/components/errors/src/lib.rs b/components/errors/src/lib.rs index e9a271f..3d806cb 100755 --- a/components/errors/src/lib.rs +++ b/components/errors/src/lib.rs @@ -31,10 +31,9 @@ impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { let mut source = self.source.as_ref().map(|c| &**c); if source.is_none() { - match self.kind { - ErrorKind::Tera(ref err) => source = err.source(), - _ => (), - }; + if let ErrorKind::Tera(ref err) = self.kind { + source = err.source(); + } } source diff --git a/components/imageproc/src/lib.rs b/components/imageproc/src/lib.rs index 7385efc..4708e96 100644 --- a/components/imageproc/src/lib.rs +++ b/components/imageproc/src/lib.rs @@ -129,6 +129,7 @@ impl From for u8 { } } +#[allow(clippy::derive_hash_xor_eq)] impl Hash for ResizeOp { fn hash(&self, hasher: &mut H) { hasher.write_u8(u8::from(*self)); @@ -194,6 +195,7 @@ impl Format { } } +#[allow(clippy::derive_hash_xor_eq)] impl Hash for Format { fn hash(&self, hasher: &mut H) { use Format::*; diff --git a/components/library/src/content/page.rs b/components/library/src/content/page.rs index 8d419c2..934ba6f 100644 --- a/components/library/src/content/page.rs +++ b/components/library/src/content/page.rs @@ -171,12 +171,10 @@ impl Page { } else { slugify(&page.file.name) } + } else if let Some(slug) = slug_from_dated_filename { + slugify(&slug) } else { - if let Some(slug) = slug_from_dated_filename { - slugify(&slug) - } else { - slugify(&page.file.name) - } + slugify(&page.file.name) } }; diff --git a/components/library/src/sorting.rs b/components/library/src/sorting.rs index c6cfd67..bd8b98a 100644 --- a/components/library/src/sorting.rs +++ b/components/library/src/sorting.rs @@ -8,6 +8,7 @@ use content::Page; /// Used by the RSS feed /// There to not have to import sorting stuff in the site crate +#[allow(clippy::trivially_copy_pass_by_ref)] pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); if ord == Ordering::Equal { diff --git a/components/site/src/lib.rs b/components/site/src/lib.rs index 57eb215..ccfddfc 100644 --- a/components/site/src/lib.rs +++ b/components/site/src/lib.rs @@ -323,7 +323,7 @@ impl Site { }) .collect::>() .join("\n"); - Err(Error { kind: ErrorKind::Msg(msg.into()), source: None }) + Err(Error { kind: ErrorKind::Msg(msg), source: None }) } pub fn check_external_links(&self) -> Result<()> { @@ -352,7 +352,7 @@ impl Site { let pool = rayon::ThreadPoolBuilder::new() .num_threads(threads) .build() - .map_err(|e| Error { kind: ErrorKind::Msg(e.to_string().into()), source: None })?; + .map_err(|e| Error { kind: ErrorKind::Msg(e.to_string()), source: None })?; let errors: Vec<_> = pool.install(|| { all_links @@ -383,7 +383,7 @@ impl Site { }) .collect::>() .join("\n"); - Err(Error { kind: ErrorKind::Msg(msg.into()), source: None }) + Err(Error { kind: ErrorKind::Msg(msg), source: None }) } /// Insert a default index section for each language if necessary so we don't need to create @@ -699,7 +699,7 @@ impl Site { .pages_values() .iter() .filter(|p| p.lang == self.config.default_language) - .map(|p| *p) + .cloned() .collect() } else { library.pages_values() @@ -712,7 +712,7 @@ impl Site { continue; } let pages = - library.pages_values().iter().filter(|p| p.lang == lang.code).map(|p| *p).collect(); + library.pages_values().iter().filter(|p| p.lang == lang.code).cloned().collect(); self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?; } diff --git a/components/templates/src/filters.rs b/components/templates/src/filters.rs index 133feb8..53b9789 100644 --- a/components/templates/src/filters.rs +++ b/components/templates/src/filters.rs @@ -1,10 +1,14 @@ use std::collections::HashMap; +use std::hash::BuildHasher; use base64::{decode, encode}; use pulldown_cmark as cmark; use tera::{to_value, Result as TeraResult, Value}; -pub fn markdown(value: &Value, args: &HashMap) -> TeraResult { +pub fn markdown( + value: &Value, + args: &HashMap, +) -> TeraResult { let s = try_get_value!("markdown", "value", String, value); let inline = match args.get("inline") { Some(val) => try_get_value!("markdown", "inline", bool, val), @@ -30,12 +34,18 @@ pub fn markdown(value: &Value, args: &HashMap) -> TeraResult) -> TeraResult { +pub fn base64_encode( + value: &Value, + _: &HashMap, +) -> TeraResult { let s = try_get_value!("base64_encode", "value", String, value); Ok(to_value(&encode(s.as_bytes())).unwrap()) } -pub fn base64_decode(value: &Value, _: &HashMap) -> TeraResult { +pub fn base64_decode( + value: &Value, + _: &HashMap, +) -> TeraResult { let s = try_get_value!("base64_decode", "value", String, value); Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap()) } diff --git a/components/utils/src/site.rs b/components/utils/src/site.rs index 54bac1c..c846f7a 100644 --- a/components/utils/src/site.rs +++ b/components/utils/src/site.rs @@ -1,4 +1,5 @@ use std::collections::HashMap; +use std::hash::BuildHasher; use unicode_segmentation::UnicodeSegmentation; use errors::Result; @@ -23,9 +24,9 @@ pub struct ResolvedInternalLink { /// Resolves an internal link (of the `@/posts/something.md#hey` sort) to its absolute link and /// returns the path + anchor as well -pub fn resolve_internal_link( +pub fn resolve_internal_link( link: &str, - permalinks: &HashMap, + permalinks: &HashMap, ) -> Result { // First we remove the ./ since that's zola specific let clean_link = link.replacen("@/", "", 1); diff --git a/src/cmd/check.rs b/src/cmd/check.rs index 32b6cb4..2700d1b 100644 --- a/src/cmd/check.rs +++ b/src/cmd/check.rs @@ -7,7 +7,7 @@ use site::Site; use console; pub fn check(config_file: &str, base_path: Option<&str>, base_url: Option<&str>) -> Result<()> { - let bp = base_path.map(PathBuf::from).unwrap_or(env::current_dir().unwrap()); + let bp = base_path.map(PathBuf::from).unwrap_or_else(|| env::current_dir().unwrap()); let mut site = Site::new(bp, config_file)?; // Force the checking of external links site.config.check_external_links = true;