diff --git a/Cargo.lock b/Cargo.lock index 705e048..c2303b1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -229,9 +229,10 @@ dependencies = [ [[package]] name = "bincode" -version = "1.0.1" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ + "autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.87 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1372,7 +1373,7 @@ dependencies = [ [[package]] name = "notify" -version = "4.0.8" +version = "4.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2260,7 +2261,7 @@ name = "syntect" version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "bincode 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "flate2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3040,7 +3041,7 @@ dependencies = [ "errors 0.1.0", "front_matter 0.1.0", "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", - "notify 4.0.8 (registry+https://github.com/rust-lang/crates.io-index)", + "notify 4.0.9 (registry+https://github.com/rust-lang/crates.io-index)", "rebuild 0.1.0", "site 0.1.0", "termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", @@ -3069,7 +3070,7 @@ dependencies = [ "checksum backtrace-sys 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "797c830ac25ccc92a7f8a7b9862bde440715531514594a6154e3d4a54dd769b6" "checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e" "checksum base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643" -"checksum bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f2fb9e29e72fd6bc12071533d5dc7664cb01480c59406f656d7ac25c7bd8ff7" +"checksum bincode 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "58470ad6460f0b0e89b0df5f17b8bd77ebae26af69dca0bd9ddc8b9e38abb2ff" "checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d" "checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12" "checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab" @@ -3191,7 +3192,7 @@ dependencies = [ "checksum nix 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d37e713a259ff641624b6cb20e3b12b2952313ba36b6823c0f16e6cfd9e5de17" "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" "checksum nom 4.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b30adc557058ce00c9d0d7cb3c6e0b5bc6f36e2e2eabe74b0ba726d194abd588" -"checksum notify 4.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c9b605e417814e88bb051c88a84f83655d6ad4fa32fc36d9a96296d86087692d" +"checksum notify 4.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9cc7ed2bd4b7edad3ee93b659c38e53dabb619f7274e127a0fab054ad2bb998d" "checksum num-derive 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d9fe8fcafd1b86a37ce8a1cfa15ae504817e0c8c2e7ad42767371461ac1d316d" "checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" "checksum num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "af3fdbbc3291a5464dc57b03860ec37ca6bf915ed6ee385e7c6c052c422b2124" diff --git a/components/errors/src/lib.rs b/components/errors/src/lib.rs index c8ada64..e9a271f 100755 --- a/components/errors/src/lib.rs +++ b/components/errors/src/lib.rs @@ -32,10 +32,8 @@ impl StdError for Error { let mut source = self.source.as_ref().map(|c| &**c); if source.is_none() { match self.kind { - ErrorKind::Tera(ref err) => { - source = err.source() - }, - _ => () + ErrorKind::Tera(ref err) => source = err.source(), + _ => (), }; } @@ -68,7 +66,6 @@ impl Error { } } - impl From<&str> for Error { fn from(e: &str) -> Self { Self::msg(e) diff --git a/components/front_matter/src/lib.rs b/components/front_matter/src/lib.rs index c0ca8b7..204582c 100644 --- a/components/front_matter/src/lib.rs +++ b/components/front_matter/src/lib.rs @@ -12,7 +12,7 @@ extern crate toml; extern crate errors; extern crate utils; -use errors::{Result, Error}; +use errors::{Error, Result}; use regex::Regex; use std::path::Path; @@ -72,7 +72,10 @@ pub fn split_section_content( ) -> Result<(SectionFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| { - Error::chain(format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()), e) + Error::chain( + format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()), + e, + ) })?; Ok((meta, content)) } @@ -82,7 +85,10 @@ pub fn split_section_content( pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; let meta = PageFrontMatter::parse(&front_matter).map_err(|e| { - Error::chain(format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()), e) + Error::chain( + format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()), + e, + ) })?; Ok((meta, content)) } diff --git a/components/imageproc/src/lib.rs b/components/imageproc/src/lib.rs index 4ebdbea..91d1ec3 100644 --- a/components/imageproc/src/lib.rs +++ b/components/imageproc/src/lib.rs @@ -20,7 +20,7 @@ use image::{FilterType, GenericImageView}; use rayon::prelude::*; use regex::Regex; -use errors::{Result, Error}; +use errors::{Error, Result}; use utils::fs as ufs; static RESIZED_SUBDIR: &'static str = "processed_images"; diff --git a/components/library/src/content/page.rs b/components/library/src/content/page.rs index 420e414..c344d50 100644 --- a/components/library/src/content/page.rs +++ b/components/library/src/content/page.rs @@ -8,7 +8,7 @@ use slug::slugify; use tera::{Context as TeraContext, Tera}; use config::Config; -use errors::{Result, Error}; +use errors::{Error, Result}; use front_matter::{split_page_content, InsertAnchor, PageFrontMatter}; use library::Library; use rendering::{render_content, Header, RenderContext}; @@ -126,7 +126,16 @@ impl Page { page.reading_time = Some(reading_time); let mut slug_from_dated_filename = None; - if let Some(ref caps) = RFC3339_DATE.captures(&page.file.name.replace(".md", "")) { + let file_path = if page.file.name == "index" { + if let Some(parent) = page.file.path.parent() { + parent.file_name().unwrap().to_str().unwrap().to_string() + } else { + page.file.name.replace(".md", "") + } + } else { + page.file.name.replace(".md", "") + }; + if let Some(ref caps) = RFC3339_DATE.captures(&file_path) { slug_from_dated_filename = Some(caps.name("slug").unwrap().as_str().to_string()); if page.meta.date.is_none() { page.meta.date = Some(caps.name("datetime").unwrap().as_str().to_string()); @@ -139,7 +148,11 @@ impl Page { slug.trim().to_string() } else if page.file.name == "index" { if let Some(parent) = page.file.path.parent() { - slugify(parent.file_name().unwrap().to_str().unwrap()) + if let Some(slug) = slug_from_dated_filename { + slugify(&slug) + } else { + slugify(parent.file_name().unwrap().to_str().unwrap()) + } } else { slugify(&page.file.name) } @@ -233,8 +246,9 @@ impl Page { context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None)); - let res = render_content(&self.raw_content, &context) - .map_err(|e| Error::chain(format!("Failed to render content of {}", self.file.path.display()), e))?; + let res = render_content(&self.raw_content, &context).map_err(|e| { + Error::chain(format!("Failed to render content of {}", self.file.path.display()), e) + })?; self.summary = res.summary_len.map(|l| res.body[0..l].to_owned()); self.content = res.body; @@ -257,8 +271,9 @@ impl Page { context.insert("page", &self.to_serialized(library)); context.insert("lang", &self.lang); - render_template(&tpl_name, tera, context, &config.theme) - .map_err(|e| Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e)) + render_template(&tpl_name, tera, context, &config.theme).map_err(|e| { + Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e) + }) } /// Creates a vectors of asset URLs. @@ -499,6 +514,31 @@ Hello world assert_eq!(page.permalink, "http://a-website.com/posts/hey/"); } + // https://github.com/getzola/zola/issues/607 + #[test] + fn page_with_assets_and_date_in_folder_name() { + let tmp_dir = tempdir().expect("create temp dir"); + let path = tmp_dir.path(); + create_dir(&path.join("content")).expect("create content temp dir"); + create_dir(&path.join("content").join("posts")).expect("create posts temp dir"); + let nested_path = path.join("content").join("posts").join("2013-06-02_with-assets"); + create_dir(&nested_path).expect("create nested temp dir"); + let mut f = File::create(nested_path.join("index.md")).unwrap(); + f.write_all(b"+++\n\n+++\n").unwrap(); + File::create(nested_path.join("example.js")).unwrap(); + File::create(nested_path.join("graph.jpg")).unwrap(); + File::create(nested_path.join("fail.png")).unwrap(); + + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); + assert!(res.is_ok()); + let page = res.unwrap(); + assert_eq!(page.file.parent, path.join("content").join("posts")); + assert_eq!(page.slug, "with-assets"); + assert_eq!(page.meta.date, Some("2013-06-02".to_string())); + assert_eq!(page.assets.len(), 3); + assert_eq!(page.permalink, "http://a-website.com/posts/with-assets/"); + } + #[test] fn page_with_ignored_assets_filters_out_correct_files() { let tmp_dir = tempdir().expect("create temp dir"); diff --git a/components/library/src/content/section.rs b/components/library/src/content/section.rs index 9994b57..3da005e 100644 --- a/components/library/src/content/section.rs +++ b/components/library/src/content/section.rs @@ -5,7 +5,7 @@ use slotmap::Key; use tera::{Context as TeraContext, Tera}; use config::Config; -use errors::{Result, Error}; +use errors::{Error, Result}; use front_matter::{split_section_content, SectionFrontMatter}; use rendering::{render_content, Header, RenderContext}; use utils::fs::{find_related_assets, read_file}; @@ -171,8 +171,9 @@ impl Section { context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None)); - let res = render_content(&self.raw_content, &context) - .map_err(|e| Error::chain(format!("Failed to render content of {}", self.file.path.display()), e))?; + let res = render_content(&self.raw_content, &context).map_err(|e| { + Error::chain(format!("Failed to render content of {}", self.file.path.display()), e) + })?; self.content = res.body; self.toc = res.toc; Ok(()) @@ -189,8 +190,9 @@ impl Section { context.insert("section", &self.to_serialized(library)); context.insert("lang", &self.lang); - render_template(tpl_name, tera, context, &config.theme) - .map_err(|e| Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e)) + render_template(tpl_name, tera, context, &config.theme).map_err(|e| { + Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e) + }) } /// Is this the index section? diff --git a/components/library/src/library.rs b/components/library/src/library.rs index 793e70f..c724a2e 100644 --- a/components/library/src/library.rs +++ b/components/library/src/library.rs @@ -5,9 +5,9 @@ use slotmap::{DenseSlotMap, Key}; use front_matter::SortBy; +use config::Config; use content::{Page, Section}; use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; -use config::Config; /// Houses everything about pages and sections /// Think of it as a database where each page and section has an id (Key here) diff --git a/components/library/src/pagination/mod.rs b/components/library/src/pagination/mod.rs index 6f47cbe..fd7f57f 100644 --- a/components/library/src/pagination/mod.rs +++ b/components/library/src/pagination/mod.rs @@ -4,7 +4,7 @@ use slotmap::Key; use tera::{to_value, Context, Tera, Value}; use config::Config; -use errors::{Result, Error}; +use errors::{Error, Result}; use utils::templates::render_template; use content::{Section, SerializingPage, SerializingSection}; diff --git a/components/library/src/taxonomies/mod.rs b/components/library/src/taxonomies/mod.rs index 0756b53..a82c3e5 100644 --- a/components/library/src/taxonomies/mod.rs +++ b/components/library/src/taxonomies/mod.rs @@ -5,7 +5,7 @@ use slug::slugify; use tera::{Context, Tera}; use config::{Config, Taxonomy as TaxonomyConfig}; -use errors::{Result, Error}; +use errors::{Error, Result}; use utils::templates::render_template; use content::SerializingPage; @@ -48,7 +48,13 @@ pub struct TaxonomyItem { } impl TaxonomyItem { - pub fn new(name: &str, taxonomy: &TaxonomyConfig, config: &Config, keys: Vec, library: &Library) -> Self { + pub fn new( + name: &str, + taxonomy: &TaxonomyConfig, + config: &Config, + keys: Vec, + library: &Library, + ) -> Self { // Taxonomy are almost always used for blogs so we filter by dates // and it's not like we can sort things across sections by anything other // than dates @@ -145,7 +151,9 @@ impl Taxonomy { context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug)); render_template(&format!("{}/single.html", self.kind.name), tera, context, &config.theme) - .map_err(|e| Error::chain(format!("Failed to render single term {} page.", self.kind.name), e)) + .map_err(|e| { + Error::chain(format!("Failed to render single term {} page.", self.kind.name), e) + }) } pub fn render_all_terms( @@ -164,7 +172,9 @@ impl Taxonomy { context.insert("current_path", &self.kind.name); render_template(&format!("{}/list.html", self.kind.name), tera, context, &config.theme) - .map_err(|e| Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e)) + .map_err(|e| { + Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e) + }) } pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { @@ -232,7 +242,7 @@ mod tests { use super::*; use std::collections::HashMap; - use config::{Config, Taxonomy as TaxonomyConfig, Language}; + use config::{Config, Language, Taxonomy as TaxonomyConfig}; use content::Page; use library::Library; @@ -242,9 +252,21 @@ mod tests { let mut library = Library::new(2, 0, false); config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "authors".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }, + TaxonomyConfig { + name: "categories".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }, + TaxonomyConfig { + name: "tags".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }, + TaxonomyConfig { + name: "authors".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }, ]; let mut page1 = Page::default(); @@ -324,8 +346,11 @@ mod tests { let mut config = Config::default(); let mut library = Library::new(2, 0, false); - config.taxonomies = - vec![TaxonomyConfig { name: "authors".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }]; + config.taxonomies = vec![TaxonomyConfig { + name: "authors".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }]; let mut page1 = Page::default(); let mut taxo_page1 = HashMap::new(); taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); @@ -346,13 +371,25 @@ mod tests { #[test] fn can_make_taxonomies_in_multiple_languages() { let mut config = Config::default(); - config.languages.push(Language {rss: false, code: "fr".to_string()}); + config.languages.push(Language { rss: false, code: "fr".to_string() }); let mut library = Library::new(2, 0, true); config.taxonomies = vec![ - TaxonomyConfig { name: "categories".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }, - TaxonomyConfig { name: "auteurs".to_string(), lang: "fr".to_string(), ..TaxonomyConfig::default() }, + TaxonomyConfig { + name: "categories".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }, + TaxonomyConfig { + name: "tags".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }, + TaxonomyConfig { + name: "auteurs".to_string(), + lang: "fr".to_string(), + ..TaxonomyConfig::default() + }, ]; let mut page1 = Page::default(); @@ -410,7 +447,10 @@ mod tests { assert_eq!(authors.items[0].name, "Vincent Prouillet"); assert_eq!(authors.items[0].slug, "vincent-prouillet"); - assert_eq!(authors.items[0].permalink, "http://a-website.com/fr/auteurs/vincent-prouillet/"); + assert_eq!( + authors.items[0].permalink, + "http://a-website.com/fr/auteurs/vincent-prouillet/" + ); assert_eq!(authors.items[0].pages.len(), 1); assert_eq!(categories.items[0].name, "Other"); @@ -430,7 +470,7 @@ mod tests { #[test] fn errors_on_taxonomy_of_different_language() { let mut config = Config::default(); - config.languages.push(Language {rss: false, code: "fr".to_string()}); + config.languages.push(Language { rss: false, code: "fr".to_string() }); let mut library = Library::new(2, 0, false); config.taxonomies = diff --git a/components/rebuild/src/lib.rs b/components/rebuild/src/lib.rs index 9ba83cb..ca4250b 100644 --- a/components/rebuild/src/lib.rs +++ b/components/rebuild/src/lib.rs @@ -155,12 +155,14 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { SectionChangesNeeded::Sort => { site.register_tera_global_fns(); } - SectionChangesNeeded::Render => { - site.render_section(&site.library.read().unwrap().get_section(&pathbuf).unwrap(), false)? - } - SectionChangesNeeded::RenderWithPages => { - site.render_section(&site.library.read().unwrap().get_section(&pathbuf).unwrap(), true)? - } + SectionChangesNeeded::Render => site.render_section( + &site.library.read().unwrap().get_section(&pathbuf).unwrap(), + false, + )?, + SectionChangesNeeded::RenderWithPages => site.render_section( + &site.library.read().unwrap().get_section(&pathbuf).unwrap(), + true, + )?, // not a common enough operation to make it worth optimizing SectionChangesNeeded::Delete | SectionChangesNeeded::Transparent => { site.build()?; @@ -182,7 +184,7 @@ macro_rules! render_parent_sections { ($site: expr, $path: expr) => { for s in $site.library.read().unwrap().find_parent_sections($path) { $site.render_section(s, false)?; - }; + } }; } @@ -230,7 +232,9 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { } PageChangesNeeded::Render => { render_parent_sections!(site, path); - site.render_page(&site.library.read().unwrap().get_page(&path.to_path_buf()).unwrap())?; + site.render_page( + &site.library.read().unwrap().get_page(&path.to_path_buf()).unwrap(), + )?; } }; } diff --git a/components/rendering/src/markdown.rs b/components/rendering/src/markdown.rs index 3c6f20b..9bbb26a 100644 --- a/components/rendering/src/markdown.rs +++ b/components/rendering/src/markdown.rs @@ -4,7 +4,7 @@ use pulldown_cmark as cmark; use slug::slugify; use syntect::easy::HighlightLines; use syntect::html::{ - IncludeBackground, start_highlighted_html_snippet, styled_line_to_highlighted_html, + start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground, }; use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET}; @@ -12,13 +12,14 @@ use context::RenderContext; use errors::{Error, Result}; use front_matter::InsertAnchor; use link_checker::check_url; -use table_of_contents::{Header, make_table_of_contents}; +use table_of_contents::{make_table_of_contents, Header}; use utils::site::resolve_internal_link; use utils::vec::InsertMany; use self::cmark::{Event, Options, Parser, Tag}; -const CONTINUE_READING: &str = "

\n"; +const CONTINUE_READING: &str = + "

\n"; const ANCHOR_LINK_TEMPLATE: &str = "anchor-link.html"; #[derive(Debug)] @@ -88,9 +89,7 @@ fn fix_link(link: &str, context: &RenderContext) -> Result { if res.is_valid() { link.to_string() } else { - return Err( - format!("Link {} is not valid: {}", link, res.message()).into(), - ); + return Err(format!("Link {} is not valid: {}", link, res.message()).into()); } } else { link.to_string() @@ -148,78 +147,84 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result { - // if we are in the middle of a code block - if let Some((ref mut highlighter, in_extra)) = highlighter { - let highlighted = if in_extra { - if let Some(ref extra) = context.config.extra_syntax_set { - highlighter.highlight(&text, &extra) + let mut events = Parser::new_ext(content, opts) + .map(|event| { + match event { + Event::Text(text) => { + // if we are in the middle of a code block + if let Some((ref mut highlighter, in_extra)) = highlighter { + let highlighted = if in_extra { + if let Some(ref extra) = context.config.extra_syntax_set { + highlighter.highlight(&text, &extra) + } else { + unreachable!( + "Got a highlighter from extra syntaxes but no extra?" + ); + } } else { - unreachable!("Got a highlighter from extra syntaxes but no extra?"); - } - } else { - highlighter.highlight(&text, &SYNTAX_SET) - }; - //let highlighted = &highlighter.highlight(&text, ss); - let html = styled_line_to_highlighted_html(&highlighted, background); - return Event::Html(Owned(html)); - } + highlighter.highlight(&text, &SYNTAX_SET) + }; + //let highlighted = &highlighter.highlight(&text, ss); + let html = styled_line_to_highlighted_html(&highlighted, background); + return Event::Html(Owned(html)); + } - // Business as usual - Event::Text(text) - } - Event::Start(Tag::CodeBlock(ref info)) => { - if !context.config.highlight_code { - return Event::Html(Borrowed("
"));
+                        // Business as usual
+                        Event::Text(text)
                     }
+                    Event::Start(Tag::CodeBlock(ref info)) => {
+                        if !context.config.highlight_code {
+                            return Event::Html(Borrowed("
"));
+                        }
 
-                    let theme = &THEME_SET.themes[&context.config.highlight_theme];
-                    highlighter = Some(get_highlighter(info, &context.config));
-                    // This selects the background color the same way that start_coloured_html_snippet does
-                    let color =
-                        theme.settings.background.unwrap_or(::syntect::highlighting::Color::WHITE);
-                    background = IncludeBackground::IfDifferent(color);
-                    let snippet = start_highlighted_html_snippet(theme);
-                    Event::Html(Owned(snippet.0))
-                }
-                Event::End(Tag::CodeBlock(_)) => {
-                    if !context.config.highlight_code {
-                        return Event::Html(Borrowed("
\n")); + let theme = &THEME_SET.themes[&context.config.highlight_theme]; + highlighter = Some(get_highlighter(info, &context.config)); + // This selects the background color the same way that start_coloured_html_snippet does + let color = theme + .settings + .background + .unwrap_or(::syntect::highlighting::Color::WHITE); + background = IncludeBackground::IfDifferent(color); + let snippet = start_highlighted_html_snippet(theme); + Event::Html(Owned(snippet.0)) } - // reset highlight and close the code block - highlighter = None; - Event::Html(Borrowed("
")) - } - Event::Start(Tag::Image(src, title)) => { - if is_colocated_asset_link(&src) { - return Event::Start(Tag::Image( - Owned(format!("{}{}", context.current_page_permalink, src)), - title, - )); + Event::End(Tag::CodeBlock(_)) => { + if !context.config.highlight_code { + return Event::Html(Borrowed("\n")); + } + // reset highlight and close the code block + highlighter = None; + Event::Html(Borrowed("")) } - - Event::Start(Tag::Image(src, title)) - } - Event::Start(Tag::Link(link, title)) => { - let fixed_link = match fix_link(&link, context) { - Ok(fixed_link) => fixed_link, - Err(err) => { - error = Some(err); - return Event::Html(Borrowed("")) + Event::Start(Tag::Image(src, title)) => { + if is_colocated_asset_link(&src) { + return Event::Start(Tag::Image( + Owned(format!("{}{}", context.current_page_permalink, src)), + title, + )); } - }; - Event::Start(Tag::Link(Owned(fixed_link), title)) - } - Event::Html(ref markup) if markup.contains("") => { - has_summary = true; - Event::Html(Borrowed(CONTINUE_READING)) + Event::Start(Tag::Image(src, title)) + } + Event::Start(Tag::Link(link, title)) => { + let fixed_link = match fix_link(&link, context) { + Ok(fixed_link) => fixed_link, + Err(err) => { + error = Some(err); + return Event::Html(Borrowed("")); + } + }; + + Event::Start(Tag::Link(Owned(fixed_link), title)) + } + Event::Html(ref markup) if markup.contains("") => { + has_summary = true; + Event::Html(Borrowed(CONTINUE_READING)) + } + _ => event, } - _ => event, - } - }).collect::>(); // We need to collect the events to make a second pass + }) + .collect::>(); // We need to collect the events to make a second pass let header_refs = get_header_refs(&events); @@ -228,7 +233,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result Result Result<()> { if self.output_path.exists() { // Delete current `public` directory so we can start fresh - remove_dir_all(&self.output_path).map_err(|e| Error::chain("Couldn't delete output directory", e))?; + remove_dir_all(&self.output_path) + .map_err(|e| Error::chain("Couldn't delete output directory", e))?; } Ok(()) @@ -544,12 +555,8 @@ impl Site { if !lang.rss { continue; } - let pages = library - .pages_values() - .iter() - .filter(|p| p.lang == lang.code) - .map(|p| *p) - .collect(); + let pages = + library.pages_values().iter().filter(|p| p.lang == lang.code).map(|p| *p).collect(); self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?; } @@ -735,7 +742,8 @@ impl Site { } else { self.output_path.join(&taxonomy.kind.name) }; - let list_output = taxonomy.render_all_terms(&self.tera, &self.config, &self.library.read().unwrap())?; + let list_output = + taxonomy.render_all_terms(&self.tera, &self.config, &self.library.read().unwrap())?; create_directory(&output_path)?; create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?; let library = self.library.read().unwrap(); @@ -794,14 +802,20 @@ impl Site { let mut sections = self .library - .read().unwrap() + .read() + .unwrap() .sections_values() .iter() .filter(|s| s.meta.render) .map(|s| SitemapEntry::new(s.permalink.clone(), None)) .collect::>(); - for section in - self.library.read().unwrap().sections_values().iter().filter(|s| s.meta.paginate_by.is_some()) + for section in self + .library + .read() + .unwrap() + .sections_values() + .iter() + .filter(|s| s.meta.paginate_by.is_some()) { let number_pagers = (section.pages.len() as f64 / section.meta.paginate_by.unwrap() as f64) @@ -971,9 +985,13 @@ impl Site { } if section.meta.is_paginated() { - self.render_paginated(&output_path, &Paginator::from_section(§ion, &self.library.read().unwrap()))?; + self.render_paginated( + &output_path, + &Paginator::from_section(§ion, &self.library.read().unwrap()), + )?; } else { - let output = section.render_html(&self.tera, &self.config, &self.library.read().unwrap())?; + let output = + section.render_html(&self.tera, &self.config, &self.library.read().unwrap())?; create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; } @@ -985,7 +1003,8 @@ impl Site { self.render_section( &self .library - .read().unwrap() + .read() + .unwrap() .get_section(&self.content_path.join("_index.md")) .expect("Failed to get index section"), false, @@ -995,7 +1014,8 @@ impl Site { /// Renders all sections pub fn render_sections(&self) -> Result<()> { self.library - .read().unwrap() + .read() + .unwrap() .sections_values() .into_par_iter() .map(|s| self.render_section(s, true)) @@ -1026,8 +1046,12 @@ impl Site { .map(|pager| { let page_path = folder_path.join(&format!("{}", pager.index)); create_directory(&page_path)?; - let output = - paginator.render_pager(pager, &self.config, &self.tera, &self.library.read().unwrap())?; + let output = paginator.render_pager( + pager, + &self.config, + &self.tera, + &self.library.read().unwrap(), + )?; if pager.index > 1 { create_file(&page_path.join("index.html"), &self.inject_livereload(output))?; } else { diff --git a/components/site/tests/site.rs b/components/site/tests/site.rs index c85033a..9286b43 100644 --- a/components/site/tests/site.rs +++ b/components/site/tests/site.rs @@ -631,9 +631,8 @@ fn can_apply_page_templates() { assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into())); // But it should not have override a children page_template - let yet_another_section = library - .get_section(&template_path.join("yet_another_section").join("_index.md")) - .unwrap(); + let yet_another_section = + library.get_section(&template_path.join("yet_another_section").join("_index.md")).unwrap(); assert_eq!(yet_another_section.subsections.len(), 0); assert_eq!(yet_another_section.pages.len(), 1); diff --git a/components/site/tests/site_i18n.rs b/components/site/tests/site_i18n.rs index f9b2a98..2f81c7c 100644 --- a/components/site/tests/site_i18n.rs +++ b/components/site/tests/site_i18n.rs @@ -23,8 +23,7 @@ fn can_parse_multilingual_site() { assert_eq!(default_index_section.pages.len(), 1); assert!(default_index_section.ancestors.is_empty()); - let fr_index_section = - library.get_section(&path.join("content").join("_index.fr.md")).unwrap(); + let fr_index_section = library.get_section(&path.join("content").join("_index.fr.md")).unwrap(); assert_eq!(fr_index_section.pages.len(), 1); assert!(fr_index_section.ancestors.is_empty()); @@ -139,5 +138,4 @@ fn can_build_multilingual_site() { assert!(!file_contains!(public, "fr/auteurs/index.html", "Queen")); assert!(file_contains!(public, "fr/auteurs/index.html", "Vincent")); assert!(!file_exists!(public, "fr/auteurs/vincent-prouillet/rss.xml")); - } diff --git a/components/templates/src/global_fns/load_data.rs b/components/templates/src/global_fns/load_data.rs index 4fec8a9..bc89c1b 100644 --- a/components/templates/src/global_fns/load_data.rs +++ b/components/templates/src/global_fns/load_data.rs @@ -183,7 +183,7 @@ impl LoadData { pub fn new(content_path: PathBuf, base_path: PathBuf) -> Self { let client = Arc::new(Mutex::new(Client::builder().build().expect("reqwest client build"))); let result_cache = Arc::new(Mutex::new(HashMap::new())); - Self {content_path, base_path, client, result_cache} + Self { content_path, base_path, client, result_cache } } } @@ -310,7 +310,7 @@ fn load_csv(csv_data: String) -> Result { #[cfg(test)] mod tests { - use super::{LoadData, DataSource, OutputFormat}; + use super::{DataSource, LoadData, OutputFormat}; use std::collections::HashMap; use std::path::PathBuf; diff --git a/components/templates/src/global_fns/mod.rs b/components/templates/src/global_fns/mod.rs index 2398386..cc34740 100644 --- a/components/templates/src/global_fns/mod.rs +++ b/components/templates/src/global_fns/mod.rs @@ -15,7 +15,7 @@ mod macros; mod load_data; - pub use self::load_data::LoadData; +pub use self::load_data::LoadData; #[derive(Debug)] pub struct Trans { @@ -23,7 +23,7 @@ pub struct Trans { } impl Trans { pub fn new(config: Config) -> Self { - Self {config} + Self { config } } } impl TeraFn for Trans { @@ -43,7 +43,7 @@ pub struct GetUrl { } impl GetUrl { pub fn new(config: Config, permalinks: HashMap) -> Self { - Self {config, permalinks} + Self { config, permalinks } } } impl TeraFn for GetUrl { @@ -88,7 +88,7 @@ pub struct ResizeImage { } impl ResizeImage { pub fn new(imageproc: Arc>) -> Self { - Self {imageproc} + Self { imageproc } } } @@ -154,7 +154,7 @@ impl GetTaxonomyUrl { } taxonomies.insert(taxonomy.kind.name.clone(), items); } - Self {taxonomies} + Self { taxonomies } } } impl TeraFn for GetTaxonomyUrl { @@ -188,7 +188,6 @@ impl TeraFn for GetTaxonomyUrl { } } - #[derive(Debug)] pub struct GetPage { base_path: PathBuf, @@ -196,7 +195,7 @@ pub struct GetPage { } impl GetPage { pub fn new(base_path: PathBuf, library: Arc>) -> Self { - Self {base_path: base_path.join("content"), library} + Self { base_path: base_path.join("content"), library } } } impl TeraFn for GetPage { @@ -209,9 +208,7 @@ impl TeraFn for GetPage { let full_path = self.base_path.join(&path); let library = self.library.read().unwrap(); match library.get_page(&full_path) { - Some(p) => { - Ok(to_value(p.to_serialized(&library)).unwrap()) - }, + Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()), None => Err(format!("Page `{}` not found.", path).into()), } } @@ -224,7 +221,7 @@ pub struct GetSection { } impl GetSection { pub fn new(base_path: PathBuf, library: Arc>) -> Self { - Self {base_path: base_path.join("content"), library} + Self { base_path: base_path.join("content"), library } } } impl TeraFn for GetSection { @@ -249,13 +246,12 @@ impl TeraFn for GetSection { } else { Ok(to_value(s.to_serialized(&library)).unwrap()) } - }, + } None => Err(format!("Section `{}` not found.", path).into()), } } } - #[derive(Debug)] pub struct GetTaxonomy { library: Arc>, @@ -267,7 +263,7 @@ impl GetTaxonomy { for taxo in all_taxonomies { taxonomies.insert(taxo.kind.name.clone(), taxo); } - Self {taxonomies, library} + Self { taxonomies, library } } } impl TeraFn for GetTaxonomy { @@ -278,16 +274,10 @@ impl TeraFn for GetTaxonomy { "`get_taxonomy` requires a `kind` argument with a string value" ); - match self.taxonomies.get(&kind) { - Some(t) => { - Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap()) - }, + match self.taxonomies.get(&kind) { + Some(t) => Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap()), None => { - Err(format!( - "`get_taxonomy` received an unknown taxonomy as kind: {}", - kind - ) - .into()) + Err(format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into()) } } } @@ -298,9 +288,9 @@ mod tests { use super::{GetTaxonomy, GetTaxonomyUrl, GetUrl, Trans}; use std::collections::HashMap; - use std::sync::{RwLock, Arc}; + use std::sync::{Arc, RwLock}; - use tera::{to_value, Value, Function}; + use tera::{to_value, Function, Value}; use config::{Config, Taxonomy as TaxonomyConfig}; use library::{Library, Taxonomy, TaxonomyItem}; @@ -348,9 +338,19 @@ mod tests { #[test] fn can_get_taxonomy() { let config = Config::default(); - let taxo_config = TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }; + let taxo_config = TaxonomyConfig { + name: "tags".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }; let library = Arc::new(RwLock::new(Library::new(0, 0, false))); - let tag = TaxonomyItem::new("Programming", &taxo_config, &config, vec![], &library.read().unwrap()); + let tag = TaxonomyItem::new( + "Programming", + &taxo_config, + &config, + vec![], + &library.read().unwrap(), + ); let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; let taxonomies = vec![tags.clone()]; @@ -388,7 +388,11 @@ mod tests { #[test] fn can_get_taxonomy_url() { let config = Config::default(); - let taxo_config = TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }; + let taxo_config = TaxonomyConfig { + name: "tags".to_string(), + lang: config.default_language.clone(), + ..TaxonomyConfig::default() + }; let library = Library::new(0, 0, false); let tag = TaxonomyItem::new("Programming", &taxo_config, &config, vec![], &library); let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; diff --git a/components/templates/src/lib.rs b/components/templates/src/lib.rs index 9f54ca8..05f782b 100644 --- a/components/templates/src/lib.rs +++ b/components/templates/src/lib.rs @@ -25,7 +25,7 @@ pub mod global_fns; use tera::{Context, Tera}; -use errors::{Result, Error}; +use errors::{Error, Result}; lazy_static! { pub static ref ZOLA_TERA: Tera = { diff --git a/components/utils/src/fs.rs b/components/utils/src/fs.rs index fdbccbd..f9eb4ea 100644 --- a/components/utils/src/fs.rs +++ b/components/utils/src/fs.rs @@ -4,7 +4,7 @@ use std::path::{Path, PathBuf}; use std::time::SystemTime; use walkdir::WalkDir; -use errors::{Result, Error}; +use errors::{Error, Result}; pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result { let canonical_path = path @@ -19,8 +19,8 @@ pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result { /// Create a file with the content given pub fn create_file(path: &Path, content: &str) -> Result<()> { - let mut file = File::create(&path) - .map_err(|e| Error::chain(format!("Failed to create {:?}", path), e))?; + let mut file = + File::create(&path).map_err(|e| Error::chain(format!("Failed to create {:?}", path), e))?; file.write_all(content.as_bytes())?; Ok(()) } @@ -37,8 +37,9 @@ pub fn ensure_directory_exists(path: &Path) -> Result<()> { /// exists before creating it pub fn create_directory(path: &Path) -> Result<()> { if !path.exists() { - create_dir_all(path) - .map_err(|e| Error::chain(format!("Was not able to create folder {}", path.display()), e))?; + create_dir_all(path).map_err(|e| { + Error::chain(format!("Was not able to create folder {}", path.display()), e) + })?; } Ok(()) } diff --git a/components/utils/src/vec.rs b/components/utils/src/vec.rs index 778de4a..346769c 100644 --- a/components/utils/src/vec.rs +++ b/components/utils/src/vec.rs @@ -16,7 +16,7 @@ impl InsertMany for Vec { for (idx, elem) in elem_to_insert.into_iter() { let head_len = idx - last_idx; - inserted.extend(self.splice(0 .. head_len, std::iter::empty())); + inserted.extend(self.splice(0..head_len, std::iter::empty())); inserted.push(elem); last_idx = idx; } @@ -41,4 +41,4 @@ mod test { v2.insert_many(vec![(0, 0), (2, -1)]); assert_eq!(v2, &[0, 1, 2, -1, 3, 4, 5]); } -} \ No newline at end of file +} diff --git a/src/cmd/serve.rs b/src/cmd/serve.rs index 2deba22..07544a0 100644 --- a/src/cmd/serve.rs +++ b/src/cmd/serve.rs @@ -36,7 +36,7 @@ use ctrlc; use notify::{watcher, RecursiveMode, Watcher}; use ws::{Message, Sender, WebSocket}; -use errors::{Result, Error as ZolaError}; +use errors::{Error as ZolaError, Result}; use site::Site; use utils::fs::copy_file; @@ -296,11 +296,7 @@ pub fn serve( }; console::info(&msg); // Force refresh - rebuild_done_handling( - &broadcaster, - rebuild::after_template_change(site, &path), - "/x.js", - ); + rebuild_done_handling(&broadcaster, rebuild::after_template_change(site, &path), "/x.js"); }; let reload_sass = |site: &Site, path: &Path, partial_path: &Path| { diff --git a/src/console.rs b/src/console.rs index 719f3ad..0241cf3 100644 --- a/src/console.rs +++ b/src/console.rs @@ -7,8 +7,8 @@ use atty; use chrono::Duration; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; -use site::Site; use errors::Error; +use site::Site; lazy_static! { /// Termcolor color choice. @@ -64,9 +64,7 @@ pub fn warn_about_ignored_pages(site: &Site) { let ignored_pages: Vec<_> = library .sections_values() .iter() - .flat_map(|s| { - s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone()) - }) + .flat_map(|s| s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone())) .collect(); if !ignored_pages.is_empty() {