Browse Source

Fix clippy warnings (#744)

Clippy is returning some warnings.  Let's fix or explicitly ignore
them.  In particular:

- In `components/imageproc/src/lib.rs`, we implement `Hash` explicitly
  but derive `PartialEq`.  We need to maintain the property that two
  keys being equal implies the hashes of those two keys are equal.
  Our `Hash` implementations preserve this, so we'll explicitly ignore
  the warnings.

- In `components/site/src/lib.rs`, we were calling `.into()` on some
  values that are already of the correct type.

- In `components/site/src/lib.rs`, we were using `.map(|x| *x)` in
  iterator chains to remove a level of indirection; we can instead say
  `.copied()` (introduced in Rust v1.36) or `.cloned()`.  Using
  `.copied` here is better from a type-checking point of view, but
  we'll use `.cloned` for now as Rust v1.36 was only recently
  released.

- In `components/templates/src/filters.rs` and
  `components/utils/src/site.rs`, we were taking `HashMap`s as
  function arguments but not generically accepting alternate `Hasher`
  implementations.

- In `src/cmd/check.rs`, we use `env::current_dir()` as a default
  value, but our use of `unwrap_or` meant that we would always
  retrieve the current directory even when not needed.

- In `components/errors/src/lib.rs`, we can use `if let` rather than
  `match`.

- In `components/library/src/content/page.rs`, we can collapse a
  nested conditional into `else if let ...`.

- In `components/library/src/sorting.rs`, a function takes `&&Page`
  arguments.  Clippy warns about this for efficiency reasons, but
  we're doing it here to match a particular sorting API, so we'll
  explicitly ignore the warning.
index-subcmd
traviscross Vincent Prouillet 5 years ago
parent
commit
46ee256ba4
8 changed files with 31 additions and 20 deletions
  1. +3
    -4
      components/errors/src/lib.rs
  2. +2
    -0
      components/imageproc/src/lib.rs
  3. +3
    -5
      components/library/src/content/page.rs
  4. +1
    -0
      components/library/src/sorting.rs
  5. +5
    -5
      components/site/src/lib.rs
  6. +13
    -3
      components/templates/src/filters.rs
  7. +3
    -2
      components/utils/src/site.rs
  8. +1
    -1
      src/cmd/check.rs

+ 3
- 4
components/errors/src/lib.rs View File

@@ -31,10 +31,9 @@ impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> { fn source(&self) -> Option<&(dyn StdError + 'static)> {
let mut source = self.source.as_ref().map(|c| &**c); let mut source = self.source.as_ref().map(|c| &**c);
if source.is_none() { if source.is_none() {
match self.kind {
ErrorKind::Tera(ref err) => source = err.source(),
_ => (),
};
if let ErrorKind::Tera(ref err) = self.kind {
source = err.source();
}
} }


source source


+ 2
- 0
components/imageproc/src/lib.rs View File

@@ -129,6 +129,7 @@ impl From<ResizeOp> for u8 {
} }
} }


#[allow(clippy::derive_hash_xor_eq)]
impl Hash for ResizeOp { impl Hash for ResizeOp {
fn hash<H: Hasher>(&self, hasher: &mut H) { fn hash<H: Hasher>(&self, hasher: &mut H) {
hasher.write_u8(u8::from(*self)); hasher.write_u8(u8::from(*self));
@@ -194,6 +195,7 @@ impl Format {
} }
} }


#[allow(clippy::derive_hash_xor_eq)]
impl Hash for Format { impl Hash for Format {
fn hash<H: Hasher>(&self, hasher: &mut H) { fn hash<H: Hasher>(&self, hasher: &mut H) {
use Format::*; use Format::*;


+ 3
- 5
components/library/src/content/page.rs View File

@@ -171,12 +171,10 @@ impl Page {
} else { } else {
slugify(&page.file.name) slugify(&page.file.name)
} }
} else if let Some(slug) = slug_from_dated_filename {
slugify(&slug)
} else { } else {
if let Some(slug) = slug_from_dated_filename {
slugify(&slug)
} else {
slugify(&page.file.name)
}
slugify(&page.file.name)
} }
}; };




+ 1
- 0
components/library/src/sorting.rs View File

@@ -8,6 +8,7 @@ use content::Page;


/// Used by the RSS feed /// Used by the RSS feed
/// There to not have to import sorting stuff in the site crate /// There to not have to import sorting stuff in the site crate
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap());
if ord == Ordering::Equal { if ord == Ordering::Equal {


+ 5
- 5
components/site/src/lib.rs View File

@@ -323,7 +323,7 @@ impl Site {
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n"); .join("\n");
Err(Error { kind: ErrorKind::Msg(msg.into()), source: None })
Err(Error { kind: ErrorKind::Msg(msg), source: None })
} }


pub fn check_external_links(&self) -> Result<()> { pub fn check_external_links(&self) -> Result<()> {
@@ -352,7 +352,7 @@ impl Site {
let pool = rayon::ThreadPoolBuilder::new() let pool = rayon::ThreadPoolBuilder::new()
.num_threads(threads) .num_threads(threads)
.build() .build()
.map_err(|e| Error { kind: ErrorKind::Msg(e.to_string().into()), source: None })?;
.map_err(|e| Error { kind: ErrorKind::Msg(e.to_string()), source: None })?;


let errors: Vec<_> = pool.install(|| { let errors: Vec<_> = pool.install(|| {
all_links all_links
@@ -383,7 +383,7 @@ impl Site {
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n"); .join("\n");
Err(Error { kind: ErrorKind::Msg(msg.into()), source: None })
Err(Error { kind: ErrorKind::Msg(msg), source: None })
} }


/// Insert a default index section for each language if necessary so we don't need to create /// Insert a default index section for each language if necessary so we don't need to create
@@ -699,7 +699,7 @@ impl Site {
.pages_values() .pages_values()
.iter() .iter()
.filter(|p| p.lang == self.config.default_language) .filter(|p| p.lang == self.config.default_language)
.map(|p| *p)
.cloned()
.collect() .collect()
} else { } else {
library.pages_values() library.pages_values()
@@ -712,7 +712,7 @@ impl Site {
continue; continue;
} }
let pages = let pages =
library.pages_values().iter().filter(|p| p.lang == lang.code).map(|p| *p).collect();
library.pages_values().iter().filter(|p| p.lang == lang.code).cloned().collect();
self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?; self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?;
} }




+ 13
- 3
components/templates/src/filters.rs View File

@@ -1,10 +1,14 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::BuildHasher;


use base64::{decode, encode}; use base64::{decode, encode};
use pulldown_cmark as cmark; use pulldown_cmark as cmark;
use tera::{to_value, Result as TeraResult, Value}; use tera::{to_value, Result as TeraResult, Value};


pub fn markdown(value: &Value, args: &HashMap<String, Value>) -> TeraResult<Value> {
pub fn markdown<S: BuildHasher>(
value: &Value,
args: &HashMap<String, Value, S>,
) -> TeraResult<Value> {
let s = try_get_value!("markdown", "value", String, value); let s = try_get_value!("markdown", "value", String, value);
let inline = match args.get("inline") { let inline = match args.get("inline") {
Some(val) => try_get_value!("markdown", "inline", bool, val), Some(val) => try_get_value!("markdown", "inline", bool, val),
@@ -30,12 +34,18 @@ pub fn markdown(value: &Value, args: &HashMap<String, Value>) -> TeraResult<Valu
Ok(to_value(&html).unwrap()) Ok(to_value(&html).unwrap())
} }


pub fn base64_encode(value: &Value, _: &HashMap<String, Value>) -> TeraResult<Value> {
pub fn base64_encode<S: BuildHasher>(
value: &Value,
_: &HashMap<String, Value, S>,
) -> TeraResult<Value> {
let s = try_get_value!("base64_encode", "value", String, value); let s = try_get_value!("base64_encode", "value", String, value);
Ok(to_value(&encode(s.as_bytes())).unwrap()) Ok(to_value(&encode(s.as_bytes())).unwrap())
} }


pub fn base64_decode(value: &Value, _: &HashMap<String, Value>) -> TeraResult<Value> {
pub fn base64_decode<S: BuildHasher>(
value: &Value,
_: &HashMap<String, Value, S>,
) -> TeraResult<Value> {
let s = try_get_value!("base64_decode", "value", String, value); let s = try_get_value!("base64_decode", "value", String, value);
Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap()) Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap())
} }


+ 3
- 2
components/utils/src/site.rs View File

@@ -1,4 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::BuildHasher;
use unicode_segmentation::UnicodeSegmentation; use unicode_segmentation::UnicodeSegmentation;


use errors::Result; use errors::Result;
@@ -23,9 +24,9 @@ pub struct ResolvedInternalLink {


/// Resolves an internal link (of the `@/posts/something.md#hey` sort) to its absolute link and /// Resolves an internal link (of the `@/posts/something.md#hey` sort) to its absolute link and
/// returns the path + anchor as well /// returns the path + anchor as well
pub fn resolve_internal_link(
pub fn resolve_internal_link<S: BuildHasher>(
link: &str, link: &str,
permalinks: &HashMap<String, String>,
permalinks: &HashMap<String, String, S>,
) -> Result<ResolvedInternalLink> { ) -> Result<ResolvedInternalLink> {
// First we remove the ./ since that's zola specific // First we remove the ./ since that's zola specific
let clean_link = link.replacen("@/", "", 1); let clean_link = link.replacen("@/", "", 1);


+ 1
- 1
src/cmd/check.rs View File

@@ -7,7 +7,7 @@ use site::Site;
use console; use console;


pub fn check(config_file: &str, base_path: Option<&str>, base_url: Option<&str>) -> Result<()> { pub fn check(config_file: &str, base_path: Option<&str>, base_url: Option<&str>) -> Result<()> {
let bp = base_path.map(PathBuf::from).unwrap_or(env::current_dir().unwrap());
let bp = base_path.map(PathBuf::from).unwrap_or_else(|| env::current_dir().unwrap());
let mut site = Site::new(bp, config_file)?; let mut site = Site::new(bp, config_file)?;
// Force the checking of external links // Force the checking of external links
site.config.check_external_links = true; site.config.check_external_links = true;


Loading…
Cancel
Save