Browse Source

rustfmt

index-subcmd
Vincent Prouillet 4 years ago
parent
commit
b7ce4e59fb
52 changed files with 1420 additions and 1093 deletions
  1. +5
    -5
      build.rs
  2. +4
    -4
      components/config/examples/generate_sublime.rs
  3. +9
    -40
      components/config/src/config.rs
  4. +8
    -7
      components/config/src/highlighting.rs
  5. +1
    -2
      components/config/src/lib.rs
  6. +4
    -6
      components/config/src/theme.rs
  7. +2
    -2
      components/errors/src/lib.rs
  8. +22
    -14
      components/front_matter/src/lib.rs
  9. +14
    -13
      components/front_matter/src/page.rs
  10. +2
    -3
      components/front_matter/src/section.rs
  11. +55
    -40
      components/imageproc/src/lib.rs
  12. +2
    -1
      components/library/src/content/file_info.rs
  13. +40
    -55
      components/library/src/content/page.rs
  14. +22
    -26
      components/library/src/content/section.rs
  15. +34
    -13
      components/library/src/content/ser.rs
  16. +13
    -13
      components/library/src/lib.rs
  17. +31
    -24
      components/library/src/library.rs
  18. +39
    -28
      components/library/src/pagination/mod.rs
  19. +28
    -36
      components/library/src/sorting.rs
  20. +51
    -37
      components/library/src/taxonomies/mod.rs
  21. +3
    -9
      components/link_checker/src/lib.rs
  22. +41
    -21
      components/rebuild/src/lib.rs
  23. +64
    -53
      components/rebuild/tests/rebuild.rs
  24. +13
    -10
      components/rendering/benches/all.rs
  25. +2
    -3
      components/rendering/src/context.rs
  26. +8
    -8
      components/rendering/src/lib.rs
  27. +17
    -15
      components/rendering/src/markdown.rs
  28. +88
    -80
      components/rendering/src/shortcode.rs
  29. +27
    -13
      components/rendering/src/table_of_contents.rs
  30. +131
    -82
      components/rendering/tests/markdown.rs
  31. +12
    -6
      components/search/src/lib.rs
  32. +1
    -2
      components/site/benches/load.rs
  33. +4
    -5
      components/site/benches/site.rs
  34. +91
    -47
      components/site/src/lib.rs
  35. +128
    -49
      components/site/tests/site.rs
  36. +19
    -22
      components/templates/src/filters.rs
  37. +131
    -70
      components/templates/src/global_fns/load_data.rs
  38. +4
    -4
      components/templates/src/global_fns/macros.rs
  39. +50
    -82
      components/templates/src/global_fns/mod.rs
  40. +7
    -10
      components/templates/src/lib.rs
  41. +12
    -7
      components/utils/src/fs.rs
  42. +2
    -2
      components/utils/src/lib.rs
  43. +1
    -3
      components/utils/src/net.rs
  44. +1
    -2
      components/utils/src/site.rs
  45. +26
    -27
      components/utils/src/templates.rs
  46. +1
    -0
      rustfmt.toml
  47. +5
    -5
      src/cmd/init.rs
  48. +2
    -2
      src/cmd/mod.rs
  49. +120
    -58
      src/cmd/serve.rs
  50. +6
    -7
      src/console.rs
  51. +10
    -11
      src/main.rs
  52. +7
    -9
      src/prompt.rs

+ 5
- 5
build.rs View File

@@ -7,9 +7,9 @@ include!("src/cli.rs");

fn main() {
// disabled below as it fails in CI
// let mut app = build_cli();
// app.gen_completions("zola", Shell::Bash, "completions/");
// app.gen_completions("zola", Shell::Fish, "completions/");
// app.gen_completions("zola", Shell::Zsh, "completions/");
// app.gen_completions("zola", Shell::PowerShell, "completions/");
// let mut app = build_cli();
// app.gen_completions("zola", Shell::Bash, "completions/");
// app.gen_completions("zola", Shell::Fish, "completions/");
// app.gen_completions("zola", Shell::Zsh, "completions/");
// app.gen_completions("zola", Shell::PowerShell, "completions/");
}

+ 4
- 4
components/config/examples/generate_sublime.rs View File

@@ -3,10 +3,10 @@
//! Although it is a valid example for serializing syntaxes, you probably won't need
//! to do this yourself unless you want to cache your own compiled grammars.
extern crate syntect;
use syntect::parsing::SyntaxSetBuilder;
use syntect::highlighting::ThemeSet;
use syntect::dumps::*;
use std::env;
use syntect::dumps::*;
use syntect::highlighting::ThemeSet;
use syntect::parsing::SyntaxSetBuilder;

fn usage_and_exit() -> ! {
println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n
@@ -32,7 +32,7 @@ fn main() {
println!("- {} -> {:?}", s.name, s.file_extensions);
}
}
},
}
(Some(ref cmd), Some(ref theme_dir), Some(ref packpath)) if cmd == "themepack" => {
let ts = ThemeSet::load_from_folder(theme_dir).unwrap();
for path in ts.themes.keys() {


+ 9
- 40
components/config/src/config.rs View File

@@ -41,12 +41,7 @@ impl Taxonomy {

impl Default for Taxonomy {
fn default() -> Taxonomy {
Taxonomy {
name: String::new(),
paginate_by: None,
paginate_path: None,
rss: false,
}
Taxonomy { name: String::new(), paginate_by: None, paginate_path: None, rss: false }
}
}

@@ -137,19 +132,12 @@ impl Config {
for pat in &config.ignored_content {
let glob = match Glob::new(pat) {
Ok(g) => g,
Err(e) => bail!(
"Invalid ignored_content glob pattern: {}, error = {}",
pat,
e
),
Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e),
};
glob_set_builder.add(glob);
}
config.ignored_content_globset = Some(
glob_set_builder
.build()
.expect("Bad ignored_content in config file."),
);
config.ignored_content_globset =
Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
}

Ok(config)
@@ -162,10 +150,7 @@ impl Config {
let file_name = path.file_name().unwrap();
File::open(path)
.chain_err(|| {
format!(
"No `{:?}` file found. Are you in the right directory?",
file_name
)
format!("No `{:?}` file found. Are you in the right directory?", file_name)
})?
.read_to_string(&mut content)?;

@@ -217,16 +202,12 @@ impl Config {
let original = self.extra.clone();
// 2. inject theme extra values
for (key, val) in &theme.extra {
self.extra
.entry(key.to_string())
.or_insert_with(|| val.clone());
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
}

// 3. overwrite with original config
for (key, val) in &original {
self.extra
.entry(key.to_string())
.or_insert_with(|| val.clone());
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
}

Ok(())
@@ -316,16 +297,7 @@ hello = "world"

let config = Config::parse(config);
assert!(config.is_ok());
assert_eq!(
config
.unwrap()
.extra
.get("hello")
.unwrap()
.as_str()
.unwrap(),
"world"
);
assert_eq!(config.unwrap().extra.get("hello").unwrap().as_str().unwrap(), "world");
}

#[test]
@@ -360,10 +332,7 @@ hello = "world"
fn can_make_url_with_localhost() {
let mut config = Config::default();
config.base_url = "http://127.0.0.1:1111".to_string();
assert_eq!(
config.make_permalink("/tags/rust"),
"http://127.0.0.1:1111/tags/rust/"
);
assert_eq!(config.make_permalink("/tags/rust"), "http://127.0.0.1:1111/tags/rust/");
}

// https://github.com/Keats/gutenberg/issues/486


+ 8
- 7
components/config/src/highlighting.rs View File

@@ -1,18 +1,18 @@
use syntect::dumps::from_binary;
use syntect::parsing::SyntaxSet;
use syntect::highlighting::ThemeSet;
use syntect::easy::HighlightLines;
use syntect::highlighting::ThemeSet;
use syntect::parsing::SyntaxSet;

use Config;


lazy_static! {
pub static ref SYNTAX_SET: SyntaxSet = {
let ss: SyntaxSet = from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump"));
let ss: SyntaxSet =
from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump"));
ss
};
pub static ref THEME_SET: ThemeSet = from_binary(include_bytes!("../../../sublime_themes/all.themedump"));
pub static ref THEME_SET: ThemeSet =
from_binary(include_bytes!("../../../sublime_themes/all.themedump"));
}

/// Returns the highlighter and whether it was found in the extra or not
@@ -21,7 +21,8 @@ pub fn get_highlighter<'a>(info: &str, config: &Config) -> (HighlightLines<'a>,
let mut in_extra = false;

if let Some(ref lang) = info.split(' ').next() {
let syntax = SYNTAX_SET.find_syntax_by_token(lang)
let syntax = SYNTAX_SET
.find_syntax_by_token(lang)
.or_else(|| {
if let Some(ref extra) = config.extra_syntax_set {
let s = extra.find_syntax_by_token(lang);


+ 1
- 2
components/config/src/lib.rs View File

@@ -9,10 +9,9 @@ extern crate globset;
extern crate lazy_static;
extern crate syntect;


mod config;
mod theme;
pub mod highlighting;
mod theme;
pub use config::{Config, Taxonomy};

use std::path::Path;


+ 4
- 6
components/config/src/theme.rs View File

@@ -7,7 +7,6 @@ use toml::Value as Toml;

use errors::{Result, ResultExt};


/// Holds the data from a `theme.toml` file.
/// There are other fields than `extra` in it but Zola
/// itself doesn't care about them.
@@ -36,7 +35,6 @@ impl Theme {
bail!("Expected the `theme.toml` to be a TOML table")
}


Ok(Theme { extra })
}

@@ -44,11 +42,11 @@ impl Theme {
pub fn from_file(path: &PathBuf) -> Result<Theme> {
let mut content = String::new();
File::open(path)
.chain_err(||
.chain_err(|| {
"No `theme.toml` file found. \
Is the `theme` defined in your `config.toml present in the `themes` directory \
and does it have a `theme.toml` inside?"
)?
Is the `theme` defined in your `config.toml present in the `themes` directory \
and does it have a `theme.toml` inside?"
})?
.read_to_string(&mut content)?;

Theme::parse(&content)


+ 2
- 2
components/errors/src/lib.rs View File

@@ -2,10 +2,10 @@

#[macro_use]
extern crate error_chain;
extern crate tera;
extern crate toml;
extern crate image;
extern crate syntect;
extern crate tera;
extern crate toml;

error_chain! {
errors {}


+ 22
- 14
components/front_matter/src/lib.rs View File

@@ -2,18 +2,18 @@
extern crate lazy_static;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate toml;
extern crate chrono;
extern crate regex;
extern crate serde;
extern crate tera;
extern crate chrono;
extern crate toml;

#[macro_use]
extern crate errors;

use std::path::Path;
use regex::Regex;
use errors::{Result, ResultExt};
use regex::Regex;
use std::path::Path;

mod page;
mod section;
@@ -22,7 +22,8 @@ pub use page::PageFrontMatter;
pub use section::SectionFrontMatter;

lazy_static! {
static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
static ref PAGE_RE: Regex =
Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
}

#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
@@ -44,12 +45,14 @@ pub enum InsertAnchor {
None,
}


/// Split a file between the front matter and its content
/// Will return an error if the front matter wasn't found
fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> {
if !PAGE_RE.is_match(content) {
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy());
bail!(
"Couldn't find front matter in `{}`. Did you forget to add `+++`?",
file_path.to_string_lossy()
);
}

// 2. extract the front matter and the content
@@ -62,10 +65,14 @@ fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> {

/// Split a file between the front matter and its content.
/// Returns a parsed `SectionFrontMatter` and the rest of the content
pub fn split_section_content(file_path: &Path, content: &str) -> Result<(SectionFrontMatter, String)> {
pub fn split_section_content(
file_path: &Path,
content: &str,
) -> Result<(SectionFrontMatter, String)> {
let (front_matter, content) = split_content(file_path, content)?;
let meta = SectionFrontMatter::parse(&front_matter)
.chain_err(|| format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()))?;
let meta = SectionFrontMatter::parse(&front_matter).chain_err(|| {
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy())
})?;
Ok((meta, content))
}

@@ -73,8 +80,9 @@ pub fn split_section_content(file_path: &Path, content: &str) -> Result<(Section
/// Returns a parsed `PageFrontMatter` and the rest of the content
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> {
let (front_matter, content) = split_content(file_path, content)?;
let meta = PageFrontMatter::parse(&front_matter)
.chain_err(|| format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()))?;
let meta = PageFrontMatter::parse(&front_matter).chain_err(|| {
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy())
})?;
Ok((meta, content))
}

@@ -82,7 +90,7 @@ pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontM
mod tests {
use std::path::Path;

use super::{split_section_content, split_page_content};
use super::{split_page_content, split_section_content};

#[test]
fn can_split_page_content_valid() {


+ 14
- 13
components/front_matter/src/page.rs View File

@@ -2,19 +2,17 @@ use std::collections::HashMap;
use std::result::Result as StdResult;

use chrono::prelude::*;
use tera::{Map, Value};
use serde::{Deserialize, Deserializer};
use tera::{Map, Value};
use toml;

use errors::Result;


fn from_toml_datetime<'de, D>(deserializer: D) -> StdResult<Option<String>, D::Error>
where
D: Deserializer<'de>,
where
D: Deserializer<'de>,
{
toml::value::Datetime::deserialize(deserializer)
.map(|s| Some(s.to_string()))
toml::value::Datetime::deserialize(deserializer).map(|s| Some(s.to_string()))
}

/// Returns key/value for a converted date from TOML.
@@ -36,7 +34,9 @@ fn convert_toml_date(table: Map<String, Value>) -> Value {
}
new.insert(k, convert_toml_date(o));
}
_ => { new.insert(k, v); }
_ => {
new.insert(k, v);
}
}
}

@@ -53,14 +53,15 @@ fn fix_toml_dates(table: Map<String, Value>) -> Value {
Value::Object(mut o) => {
new.insert(key, convert_toml_date(o));
}
_ => { new.insert(key, value); }
_ => {
new.insert(key, value);
}
}
}

Value::Object(new)
}


/// The front matter of every page
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(default)]
@@ -143,7 +144,9 @@ impl PageFrontMatter {
if d.contains('T') {
DateTime::parse_from_rfc3339(&d).ok().and_then(|s| Some(s.naive_local()))
} else {
NaiveDate::parse_from_str(&d, "%Y-%m-%d").ok().and_then(|s| Some(s.and_hms(0, 0, 0)))
NaiveDate::parse_from_str(&d, "%Y-%m-%d")
.ok()
.and_then(|s| Some(s.and_hms(0, 0, 0)))
}
} else {
None
@@ -187,11 +190,10 @@ impl Default for PageFrontMatter {
}
}


#[cfg(test)]
mod tests {
use tera::to_value;
use super::PageFrontMatter;
use tera::to_value;

#[test]
fn can_have_empty_front_matter() {
@@ -213,7 +215,6 @@ mod tests {
assert_eq!(res.description.unwrap(), "hey there".to_string())
}


#[test]
fn errors_with_invalid_front_matter() {
let content = r#"title = 1\n"#;


+ 2
- 3
components/front_matter/src/section.rs View File

@@ -5,11 +5,10 @@ use toml;

use errors::Result;

use super::{SortBy, InsertAnchor};
use super::{InsertAnchor, SortBy};

static DEFAULT_PAGINATE_PATH: &'static str = "page";


/// The front matter of every section
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(default)]
@@ -69,7 +68,7 @@ impl SectionFrontMatter {
pub fn is_paginated(&self) -> bool {
match self.paginate_by {
Some(v) => v > 0,
None => false
None => false,
}
}
}


+ 55
- 40
components/imageproc/src/lib.rs View File

@@ -1,32 +1,32 @@
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate image;
extern crate rayon;
extern crate regex;

extern crate utils;
extern crate errors;
extern crate utils;

use std::path::{Path, PathBuf};
use std::hash::{Hash, Hasher};
use std::collections::HashMap;
use std::collections::hash_map::Entry as HEntry;
use std::collections::hash_map::DefaultHasher;
use std::collections::hash_map::Entry as HEntry;
use std::collections::HashMap;
use std::fs::{self, File};
use std::hash::{Hash, Hasher};
use std::path::{Path, PathBuf};

use regex::Regex;
use image::{FilterType, GenericImageView};
use image::jpeg::JPEGEncoder;
use image::{FilterType, GenericImageView};
use rayon::prelude::*;
use regex::Regex;

use utils::fs as ufs;
use errors::{Result, ResultExt};
use utils::fs as ufs;

static RESIZED_SUBDIR: &'static str = "processed_images";

lazy_static! {
pub static ref RESIZED_FILENAME: Regex = Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap();
pub static ref RESIZED_FILENAME: Regex =
Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap();
}

/// Describes the precise kind of a resize operation
@@ -57,16 +57,22 @@ impl ResizeOp {

// Validate args:
match op {
"fit_width" => if width.is_none() {
return Err("op=\"fit_width\" requires a `width` argument".to_string().into());
},
"fit_height" => if height.is_none() {
return Err("op=\"fit_height\" requires a `height` argument".to_string().into());
},
"scale" | "fit" | "fill" => if width.is_none() || height.is_none() {
return Err(format!("op={} requires a `width` and `height` argument", op).into());
},
_ => return Err(format!("Invalid image resize operation: {}", op).into())
"fit_width" => {
if width.is_none() {
return Err("op=\"fit_width\" requires a `width` argument".to_string().into());
}
}
"fit_height" => {
if height.is_none() {
return Err("op=\"fit_height\" requires a `height` argument".to_string().into());
}
}
"scale" | "fit" | "fill" => {
if width.is_none() || height.is_none() {
return Err(format!("op={} requires a `width` and `height` argument", op).into());
}
}
_ => return Err(format!("Invalid image resize operation: {}", op).into()),
};

Ok(match op {
@@ -121,8 +127,12 @@ impl From<ResizeOp> for u8 {
impl Hash for ResizeOp {
fn hash<H: Hasher>(&self, hasher: &mut H) {
hasher.write_u8(u8::from(*self));
if let Some(w) = self.width() { hasher.write_u32(w); }
if let Some(h) = self.height() { hasher.write_u32(h); }
if let Some(w) = self.width() {
hasher.write_u32(w);
}
if let Some(h) = self.height() {
hasher.write_u32(h);
}
}
}

@@ -207,8 +217,7 @@ impl ImageOp {
((img_w - crop_w) / 2, 0)
};

img.crop(offset_w, offset_h, crop_w, crop_h)
.resize_exact(w, h, RESIZE_FILTER)
img.crop(offset_w, offset_h, crop_w, crop_h).resize_exact(w, h, RESIZE_FILTER)
}
}
};
@@ -221,7 +230,6 @@ impl ImageOp {
}
}


/// A strcture into which image operations can be enqueued and then performed.
/// All output is written in a subdirectory in `static_path`,
/// taking care of file stale status based on timestamps and possible hash collisions.
@@ -271,7 +279,11 @@ impl Processor {

fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 {
match self.img_ops.entry(img_op.hash) {
HEntry::Occupied(entry) => if *entry.get() == img_op { return 0; },
HEntry::Occupied(entry) => {
if *entry.get() == img_op {
return 0;
}
}
HEntry::Vacant(entry) => {
entry.insert(img_op);
return 0;
@@ -341,9 +353,8 @@ impl Processor {
let filename = entry_path.file_name().unwrap().to_string_lossy();
if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) {
let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap();
let collision_id = u32::from_str_radix(
capts.get(2).unwrap().as_str(), 16,
).unwrap();
let collision_id =
u32::from_str_radix(capts.get(2).unwrap().as_str(), 16).unwrap();

if collision_id > 0 || !self.img_ops.contains_key(&hash) {
fs::remove_file(&entry_path)?;
@@ -359,24 +370,28 @@ impl Processor {
ufs::ensure_directory_exists(&self.resized_path)?;
}

self.img_ops.par_iter().map(|(hash, op)| {
let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id));
op.perform(&self.content_path, &target)
.chain_err(|| format!("Failed to process image: {}", op.source))
}).collect::<Result<()>>()
self.img_ops
.par_iter()
.map(|(hash, op)| {
let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id));
op.perform(&self.content_path, &target)
.chain_err(|| format!("Failed to process image: {}", op.source))
})
.collect::<Result<()>>()
}
}


/// Looks at file's extension and returns whether it's a supported image format
pub fn file_is_img<P: AsRef<Path>>(p: P) -> bool {
p.as_ref().extension().and_then(|s| s.to_str()).map(|ext| {
match ext.to_lowercase().as_str() {
p.as_ref()
.extension()
.and_then(|s| s.to_str())
.map(|ext| match ext.to_lowercase().as_str() {
"jpg" | "jpeg" => true,
"png" => true,
"gif" => true,
"bmp" => true,
_ => false,
}
}).unwrap_or(false)
})
.unwrap_or(false)
}

+ 2
- 1
components/library/src/content/file_info.rs View File

@@ -114,7 +114,8 @@ mod tests {

#[test]
fn can_find_content_components() {
let res = find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md");
let res =
find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md");
assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]);
}
}

+ 40
- 55
components/library/src/content/page.rs View File

@@ -2,19 +2,19 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};

use tera::{Tera, Context as TeraContext};
use slug::slugify;
use slotmap::{Key};
use regex::Regex;
use slotmap::Key;
use slug::slugify;
use tera::{Context as TeraContext, Tera};

use errors::{Result, ResultExt};
use config::Config;
use utils::fs::{read_file, find_related_assets};
use errors::{Result, ResultExt};
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
use library::Library;
use rendering::{render_content, Header, RenderContext};
use utils::fs::{find_related_assets, read_file};
use utils::site::get_reading_analytics;
use utils::templates::render_template;
use front_matter::{PageFrontMatter, InsertAnchor, split_page_content};
use rendering::{RenderContext, Header, render_content};
use library::Library;

use content::file_info::FileInfo;
use content::ser::SerializingPage;
@@ -24,7 +24,6 @@ lazy_static! {
static ref DATE_IN_FILENAME: Regex = Regex::new(r"^^([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))(_|-)").unwrap();
}


#[derive(Clone, Debug, PartialEq)]
pub struct Page {
/// All info about the actual file
@@ -71,7 +70,6 @@ pub struct Page {
pub reading_time: Option<usize>,
}


impl Page {
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter) -> Page {
let file_path = file_path.as_ref();
@@ -155,7 +153,9 @@ impl Page {
page.path = format!("{}/", page.path);
}

page.components = page.path.split('/')
page.components = page
.path
.split('/')
.map(|p| p.to_string())
.filter(|p| !p.is_empty())
.collect::<Vec<_>>();
@@ -182,13 +182,13 @@ impl Page {
// against the remaining path. Note that the current behaviour effectively means that
// the `ignored_content` setting in the config file is limited to single-file glob
// patterns (no "**" patterns).
page.assets = assets.into_iter()
.filter(|path|
match path.file_name() {
None => true,
Some(file) => !globset.is_match(file)
}
).collect();
page.assets = assets
.into_iter()
.filter(|path| match path.file_name() {
None => true,
Some(file) => !globset.is_match(file),
})
.collect();
} else {
page.assets = assets;
}
@@ -210,13 +210,8 @@ impl Page {
config: &Config,
anchor_insert: InsertAnchor,
) -> Result<()> {
let mut context = RenderContext::new(
tera,
config,
&self.permalink,
permalinks,
anchor_insert,
);
let mut context =
RenderContext::new(tera, config, &self.permalink, permalinks, anchor_insert);

context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));

@@ -234,7 +229,7 @@ impl Page {
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
let tpl_name = match self.meta.template {
Some(ref l) => l.to_string(),
None => "page.html".to_string()
None => "page.html".to_string(),
};

let mut context = TeraContext::new();
@@ -249,7 +244,8 @@ impl Page {

/// Creates a vectors of asset URLs.
fn serialize_assets(&self) -> Vec<String> {
self.assets.iter()
self.assets
.iter()
.filter_map(|asset| asset.file_name())
.filter_map(|filename| filename.to_str())
.map(|filename| self.path.clone() + filename)
@@ -294,19 +290,18 @@ impl Default for Page {
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use std::fs::{create_dir, File};
use std::io::Write;
use std::fs::{File, create_dir};
use std::path::Path;

use tera::Tera;
use tempfile::tempdir;
use globset::{Glob, GlobSetBuilder};
use tempfile::tempdir;
use tera::Tera;

use config::Config;
use super::Page;
use config::Config;
use front_matter::InsertAnchor;


#[test]
fn test_can_parse_a_valid_page() {
let content = r#"
@@ -324,7 +319,8 @@ Hello world"#;
&Tera::default(),
&Config::default(),
InsertAnchor::None,
).unwrap();
)
.unwrap();

assert_eq!(page.meta.title.unwrap(), "Hello".to_string());
assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string());
@@ -426,16 +422,13 @@ Hello world"#;
+++
+++
Hello world
<!-- more -->"#.to_string();
<!-- more -->"#
.to_string();
let res = Page::parse(Path::new("hello.md"), &content, &config);
assert!(res.is_ok());
let mut page = res.unwrap();
page.render_markdown(
&HashMap::default(),
&Tera::default(),
&config,
InsertAnchor::None,
).unwrap();
page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None)
.unwrap();
assert_eq!(page.summary, Some("<p>Hello world</p>\n".to_string()));
}

@@ -453,10 +446,7 @@ Hello world
File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap();

let res = Page::from_file(
nested_path.join("index.md").as_path(),
&Config::default(),
);
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default());
assert!(res.is_ok());
let page = res.unwrap();
assert_eq!(page.file.parent, path.join("content").join("posts"));
@@ -479,10 +469,7 @@ Hello world
File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap();

let res = Page::from_file(
nested_path.join("index.md").as_path(),
&Config::default(),
);
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default());
assert!(res.is_ok());
let page = res.unwrap();
assert_eq!(page.file.parent, path.join("content").join("posts"));
@@ -510,10 +497,7 @@ Hello world
let mut config = Config::default();
config.ignored_content_globset = Some(gsb.build().unwrap());

let res = Page::from_file(
nested_path.join("index.md").as_path(),
&config,
);
let res = Page::from_file(nested_path.join("index.md").as_path(), &config);

assert!(res.is_ok());
let page = res.unwrap();
@@ -528,7 +512,8 @@ Hello world
+++
+++
Hello world
<!-- more -->"#.to_string();
<!-- more -->"#
.to_string();
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config);
assert!(res.is_ok());
let page = res.unwrap();
@@ -539,14 +524,14 @@ Hello world

#[test]
fn frontmatter_date_override_filename_date() {

let config = Config::default();
let content = r#"
+++
date = 2018-09-09
+++
Hello world
<!-- more -->"#.to_string();
<!-- more -->"#
.to_string();
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config);
assert!(res.is_ok());
let page = res.unwrap();


+ 22
- 26
components/library/src/content/section.rs View File

@@ -1,22 +1,21 @@
use std::collections::HashMap;
use std::path::{Path, PathBuf};

use tera::{Tera, Context as TeraContext};
use slotmap::Key;
use tera::{Context as TeraContext, Tera};

use config::Config;
use front_matter::{SectionFrontMatter, split_section_content};
use errors::{Result, ResultExt};
use utils::fs::{read_file, find_related_assets};
use utils::templates::render_template;
use front_matter::{split_section_content, SectionFrontMatter};
use rendering::{render_content, Header, RenderContext};
use utils::fs::{find_related_assets, read_file};
use utils::site::get_reading_analytics;
use rendering::{RenderContext, Header, render_content};
use utils::templates::render_template;

use content::file_info::FileInfo;
use content::ser::SerializingSection;
use library::Library;


#[derive(Clone, Debug, PartialEq)]
pub struct Section {
/// All info about the actual file
@@ -86,7 +85,9 @@ impl Section {
section.word_count = Some(word_count);
section.reading_time = Some(reading_time);
section.path = format!("{}/", section.file.components.join("/"));
section.components = section.path.split('/')
section.components = section
.path
.split('/')
.map(|p| p.to_string())
.filter(|p| !p.is_empty())
.collect::<Vec<_>>();
@@ -111,13 +112,13 @@ impl Section {
// against the remaining path. Note that the current behaviour effectively means that
// the `ignored_content` setting in the config file is limited to single-file glob
// patterns (no "**" patterns).
section.assets = assets.into_iter()
.filter(|path|
match path.file_name() {
None => true,
Some(file) => !globset.is_match(file)
}
).collect();
section.assets = assets
.into_iter()
.filter(|path| match path.file_name() {
None => true,
Some(file) => !globset.is_match(file),
})
.collect();
} else {
section.assets = assets;
}
@@ -185,7 +186,8 @@ impl Section {

/// Creates a vectors of asset URLs.
fn serialize_assets(&self) -> Vec<String> {
self.assets.iter()
self.assets
.iter()
.filter_map(|asset| asset.file_name())
.filter_map(|filename| filename.to_str())
.map(|filename| self.path.clone() + filename)
@@ -227,14 +229,14 @@ impl Default for Section {

#[cfg(test)]
mod tests {
use std::fs::{create_dir, File};
use std::io::Write;
use std::fs::{File, create_dir};

use tempfile::tempdir;
use globset::{Glob, GlobSetBuilder};
use tempfile::tempdir;

use config::Config;
use super::Section;
use config::Config;

#[test]
fn section_with_assets_gets_right_info() {
@@ -250,10 +252,7 @@ mod tests {
File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap();

let res = Section::from_file(
nested_path.join("_index.md").as_path(),
&Config::default(),
);
let res = Section::from_file(nested_path.join("_index.md").as_path(), &Config::default());
assert!(res.is_ok());
let section = res.unwrap();
assert_eq!(section.assets.len(), 3);
@@ -279,10 +278,7 @@ mod tests {
let mut config = Config::default();
config.ignored_content_globset = Some(gsb.build().unwrap());

let res = Section::from_file(
nested_path.join("_index.md").as_path(),
&config,
);
let res = Section::from_file(nested_path.join("_index.md").as_path(), &config);

assert!(res.is_ok());
let page = res.unwrap();


+ 34
- 13
components/library/src/content/ser.rs View File

@@ -1,13 +1,12 @@
//! What we are sending to the templates when rendering them
use std::collections::HashMap;

use tera::{Value, Map};
use tera::{Map, Value};

use library::Library;
use content::{Page, Section};
use library::Library;
use rendering::Header;


#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct SerializingPage<'a> {
relative_path: &'a str,
@@ -49,11 +48,23 @@ impl<'a> SerializingPage<'a> {
day = Some(d.2);
}
let pages = library.pages();
let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
let lighter = page
.lighter
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let heavier = page
.heavier
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let earlier = page
.earlier
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let later = page
.later
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let ancestors = page
.ancestors
.iter()
.map(|k| library.get_section_by_key(*k).file.relative.clone())
.collect();

SerializingPage {
relative_path: &page.file.relative,
@@ -95,7 +106,10 @@ impl<'a> SerializingPage<'a> {
day = Some(d.2);
}
let ancestors = if let Some(ref lib) = library {
page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
page.ancestors
.iter()
.map(|k| lib.get_section_by_key(*k).file.relative.clone())
.collect()
} else {
vec![]
};
@@ -130,7 +144,6 @@ impl<'a> SerializingPage<'a> {
}
}


#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct SerializingSection<'a> {
relative_path: &'a str,
@@ -145,7 +158,7 @@ pub struct SerializingSection<'a> {
word_count: Option<usize>,
reading_time: Option<usize>,
toc: &'a [Header],
assets: &'a [String],
assets: &'a [String],
pages: Vec<SerializingPage<'a>>,
subsections: Vec<&'a str>,
}
@@ -163,7 +176,11 @@ impl<'a> SerializingSection<'a> {
subsections.push(library.get_section_path_by_key(*k));
}

let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
let ancestors = section
.ancestors
.iter()
.map(|k| library.get_section_by_key(*k).file.relative.clone())
.collect();

SerializingSection {
relative_path: &section.file.relative,
@@ -187,7 +204,11 @@ impl<'a> SerializingSection<'a> {
/// Same as from_section but doesn't fetch pages and sections
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
let ancestors = if let Some(ref lib) = library {
section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
section
.ancestors
.iter()
.map(|k| lib.get_section_by_key(*k).file.relative.clone())
.collect()
} else {
vec![]
};


+ 13
- 13
components/library/src/lib.rs View File

@@ -1,39 +1,39 @@
extern crate tera;
extern crate slug;
extern crate serde;
extern crate slug;
extern crate tera;
#[macro_use]
extern crate serde_derive;
extern crate chrono;
extern crate slotmap;
extern crate rayon;
extern crate slotmap;
#[macro_use]
extern crate lazy_static;
extern crate regex;

#[cfg(test)]
extern crate globset;
#[cfg(test)]
extern crate tempfile;
#[cfg(test)]
extern crate toml;
#[cfg(test)]
extern crate globset;

extern crate front_matter;
extern crate config;
extern crate utils;
extern crate front_matter;
extern crate rendering;
extern crate utils;
#[macro_use]
extern crate errors;

mod content;
mod taxonomies;
mod library;
mod pagination;
mod sorting;
mod library;
mod taxonomies;

pub use slotmap::{Key, DenseSlotMap};
pub use slotmap::{DenseSlotMap, Key};

pub use sorting::sort_actual_pages_by_date;
pub use content::{Page, SerializingPage, Section, SerializingSection};
pub use content::{Page, Section, SerializingPage, SerializingSection};
pub use library::Library;
pub use taxonomies::{Taxonomy, TaxonomyItem, find_taxonomies};
pub use pagination::Paginator;
pub use sorting::sort_actual_pages_by_date;
pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem};

+ 31
- 24
components/library/src/library.rs View File

@@ -5,9 +5,8 @@ use slotmap::{DenseSlotMap, Key};

use front_matter::SortBy;

use sorting::{find_siblings, sort_pages_by_weight, sort_pages_by_date};
use content::{Page, Section};
use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight};

/// Houses everything about pages and sections
/// Think of it as a database where each page and section has an id (Key here)
@@ -81,12 +80,13 @@ impl Library {
/// Find out the direct subsections of each subsection if there are some
/// as well as the pages for each section
pub fn populate_sections(&mut self) {
let (root_path, index_path) = self.sections
let (root_path, index_path) = self
.sections
.values()
.find(|s| s.is_index())
.map(|s| (s.file.parent.clone(), s.file.path.clone()))
.unwrap();
let root_key = self.paths_to_sections[&index_path];
let root_key = self.paths_to_sections[&index_path];

// We are going to get both the ancestors and grandparents for each section in one go
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
@@ -130,7 +130,8 @@ impl Library {
let parent_section_path = page.file.parent.join("_index.md");
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
self.sections.get_mut(*section_key).unwrap().pages.push(key);
page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
page.ancestors =
ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
// Don't forget to push the actual parent
page.ancestors.push(*section_key);
}
@@ -150,7 +151,8 @@ impl Library {
children.sort_by(|a, b| sections_weight[a].cmp(&sections_weight[b]));
section.subsections = children;
}
section.ancestors = ancestors.get(&section.file.path).cloned().unwrap_or_else(|| vec![]);
section.ancestors =
ancestors.get(&section.file.path).cloned().unwrap_or_else(|| vec![]);
}
}

@@ -161,7 +163,8 @@ impl Library {
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
SortBy::None => continue,
SortBy::Date => {
let data = section.pages
let data = section
.pages
.iter()
.map(|k| {
if let Some(page) = self.pages.get(*k) {
@@ -173,9 +176,10 @@ impl Library {
.collect();

sort_pages_by_date(data)
},
}
SortBy::Weight => {
let data = section.pages
let data = section
.pages
.iter()
.map(|k| {
if let Some(page) = self.pages.get(*k) {
@@ -194,13 +198,18 @@ impl Library {

for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
// Find sibling between sorted pages first
let with_siblings = find_siblings(sorted.iter().map(|k| {
if let Some(page) = self.pages.get(*k) {
(k, page.is_draft())
} else {
unreachable!("Sorting got an unknown page")
}
}).collect());
let with_siblings = find_siblings(
sorted
.iter()
.map(|k| {
if let Some(page) = self.pages.get(*k) {
(k, page.is_draft())
} else {
unreachable!("Sorting got an unknown page")
}
})
.collect(),
);

for (k2, val1, val2) in with_siblings {
if let Some(page) = self.pages.get_mut(k2) {
@@ -208,12 +217,12 @@ impl Library {
SortBy::Date => {
page.earlier = val2;
page.later = val1;
},
}
SortBy::Weight => {
page.lighter = val1;
page.heavier = val2;
},
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None")
}
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None"),
}
} else {
unreachable!("Sorting got an unknown page")
@@ -229,10 +238,8 @@ impl Library {

/// Find all the orphan pages: pages that are in a folder without an `_index.md`
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
let pages_in_sections = self.sections
.values()
.flat_map(|s| &s.pages)
.collect::<HashSet<_>>();
let pages_in_sections =
self.sections.values().flat_map(|s| &s.pages).collect::<HashSet<_>>();

self.pages
.iter()
@@ -245,7 +252,7 @@ impl Library {
let page_key = self.paths_to_pages[path];
for s in self.sections.values() {
if s.pages.contains(&page_key) {
return Some(s)
return Some(s);
}
}



+ 39
- 28
components/library/src/pagination/mod.rs View File

@@ -1,16 +1,15 @@
use std::collections::HashMap;

use tera::{Tera, Context, to_value, Value};
use slotmap::{Key};
use slotmap::Key;
use tera::{to_value, Context, Tera, Value};

use errors::{Result, ResultExt};
use config::Config;
use errors::{Result, ResultExt};
use utils::templates::render_template;

use content::{Section, SerializingSection, SerializingPage};
use taxonomies::{TaxonomyItem, Taxonomy};
use content::{Section, SerializingPage, SerializingSection};
use library::Library;
use taxonomies::{Taxonomy, TaxonomyItem};

#[derive(Clone, Debug, PartialEq)]
enum PaginationRoot<'a> {
@@ -18,7 +17,6 @@ enum PaginationRoot<'a> {
Taxonomy(&'a Taxonomy),
}


/// A list of all the pages in the paginator with their index and links
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct Pager<'a> {
@@ -33,13 +31,13 @@ pub struct Pager<'a> {
}

impl<'a> Pager<'a> {
fn new(index: usize, pages: Vec<SerializingPage<'a>>, permalink: String, path: String) -> Pager<'a> {
Pager {
index,
permalink,
path,
pages,
}
fn new(
index: usize,
pages: Vec<SerializingPage<'a>>,
permalink: String,
path: String,
) -> Pager<'a> {
Pager { index, permalink, path, pages }
}
}

@@ -83,7 +81,11 @@ impl<'a> Paginator<'a> {

/// Create a new paginator from a taxonomy
/// It will always at least create one pager (the first) even if there are not enough pages to paginate
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem, library: &'a Library) -> Paginator<'a> {
pub fn from_taxonomy(
taxonomy: &'a Taxonomy,
item: &'a TaxonomyItem,
library: &'a Library,
) -> Paginator<'a> {
let paginate_by = taxonomy.kind.paginate_by.unwrap();
let mut paginator = Paginator {
all_pages: &item.pages,
@@ -92,7 +94,11 @@ impl<'a> Paginator<'a> {
root: PaginationRoot::Taxonomy(taxonomy),
permalink: item.permalink.clone(),
path: format!("{}/{}", taxonomy.kind.name, item.slug),
paginate_path: taxonomy.kind.paginate_path.clone().unwrap_or_else(|| "pages".to_string()),
paginate_path: taxonomy
.kind
.paginate_path
.clone()
.unwrap_or_else(|| "pages".to_string()),
is_index: false,
};

@@ -142,12 +148,7 @@ impl<'a> Paginator<'a> {
format!("{}/{}", self.path, page_path)
};

pagers.push(Pager::new(
index + 1,
page,
permalink,
pager_path,
));
pagers.push(Pager::new(index + 1, page, permalink, pager_path));
}

// We always have the index one at least
@@ -184,19 +185,29 @@ impl<'a> Paginator<'a> {
paginator.insert("next", Value::Null);
}
paginator.insert("number_pagers", to_value(&self.pagers.len()).unwrap());
paginator.insert("base_url", to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap());
paginator.insert(
"base_url",
to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap(),
);
paginator.insert("pages", to_value(&current_pager.pages).unwrap());
paginator.insert("current_index", to_value(current_pager.index).unwrap());

paginator
}

pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera, library: &Library) -> Result<String> {
pub fn render_pager(
&self,
pager: &Pager,
config: &Config,
tera: &Tera,
library: &Library,
) -> Result<String> {
let mut context = Context::new();
context.insert("config", &config);
let template_name = match self.root {
PaginationRoot::Section(s) => {
context.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
context
.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
s.get_template_name()
}
PaginationRoot::Taxonomy(t) => {
@@ -217,11 +228,11 @@ impl<'a> Paginator<'a> {
mod tests {
use tera::to_value;

use front_matter::SectionFrontMatter;
use content::{Page, Section};
use config::Taxonomy as TaxonomyConfig;
use taxonomies::{Taxonomy, TaxonomyItem};
use content::{Page, Section};
use front_matter::SectionFrontMatter;
use library::Library;
use taxonomies::{Taxonomy, TaxonomyItem};

use super::Paginator;



+ 28
- 36
components/library/src/sorting.rs View File

@@ -1,8 +1,8 @@
use std::cmp::Ordering;

use chrono::NaiveDateTime;
use rayon::prelude::*;
use slotmap::Key;
use chrono::NaiveDateTime;

use content::Page;

@@ -21,19 +21,17 @@ pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering {
/// Pages without date will be put in the unsortable bucket
/// The permalink is used to break ties
pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (Vec<Key>, Vec<Key>) {
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
.into_par_iter()
.partition(|page| page.1.is_some());

can_be_sorted
.par_sort_unstable_by(|a, b| {
let ord = b.1.unwrap().cmp(&a.1.unwrap());
if ord == Ordering::Equal {
a.2.cmp(&b.2)
} else {
ord
}
});
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
pages.into_par_iter().partition(|page| page.1.is_some());

can_be_sorted.par_sort_unstable_by(|a, b| {
let ord = b.1.unwrap().cmp(&a.1.unwrap());
if ord == Ordering::Equal {
a.2.cmp(&b.2)
} else {
ord
}
});

(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
}
@@ -42,19 +40,17 @@ pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (V
/// Pages without weight will be put in the unsortable bucket
/// The permalink is used to break ties
pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key>, Vec<Key>) {
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages
.into_par_iter()
.partition(|page| page.1.is_some());

can_be_sorted
.par_sort_unstable_by(|a, b| {
let ord = a.1.unwrap().cmp(&b.1.unwrap());
if ord == Ordering::Equal {
a.2.cmp(&b.2)
} else {
ord
}
});
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
pages.into_par_iter().partition(|page| page.1.is_some());

can_be_sorted.par_sort_unstable_by(|a, b| {
let ord = a.1.unwrap().cmp(&b.1.unwrap());
if ord == Ordering::Equal {
a.2.cmp(&b.2)
} else {
ord
}
});

(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
}
@@ -118,9 +114,9 @@ pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option
mod tests {
use slotmap::DenseSlotMap;

use front_matter::{PageFrontMatter};
use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
use content::Page;
use super::{sort_pages_by_date, sort_pages_by_weight, find_siblings};
use front_matter::PageFrontMatter;

fn create_page_with_date(date: &str) -> Page {
let mut front_matter = PageFrontMatter::default();
@@ -179,7 +175,6 @@ mod tests {
assert_eq!(pages[2], key2);
}


#[test]
fn ignore_page_with_missing_field() {
let mut dense = DenseSlotMap::new();
@@ -196,7 +191,7 @@ mod tests {
(&key3, page3.meta.weight, page3.permalink.as_ref()),
];

let (pages,unsorted) = sort_pages_by_weight(input);
let (pages, unsorted) = sort_pages_by_weight(input);
assert_eq!(pages.len(), 2);
assert_eq!(unsorted.len(), 1);
}
@@ -211,11 +206,8 @@ mod tests {
let page3 = create_page_with_weight(3);
let key3 = dense.insert(page3.clone());

let input = vec![
(&key1, page1.is_draft()),
(&key2, page2.is_draft()),
(&key3, page3.is_draft()),
];
let input =
vec![(&key1, page1.is_draft()), (&key2, page2.is_draft()), (&key3, page3.is_draft())];

let pages = find_siblings(input);



+ 51
- 37
components/library/src/taxonomies/mod.rs View File

@@ -1,16 +1,16 @@
use std::collections::HashMap;

use slotmap::Key;
use slug::slugify;
use tera::{Context, Tera};
use slotmap::{Key};

use config::{Config, Taxonomy as TaxonomyConfig};
use errors::{Result, ResultExt};
use utils::templates::render_template;

use content::SerializingPage;
use sorting::sort_pages_by_date;
use library::Library;
use sorting::sort_pages_by_date;

#[derive(Debug, Clone, PartialEq, Serialize)]
struct SerializedTaxonomyItem<'a> {
@@ -34,7 +34,6 @@ impl<'a> SerializedTaxonomyItem<'a> {
slug: &item.slug,
permalink: &item.permalink,
pages,

}
}
}
@@ -70,12 +69,7 @@ impl TaxonomyItem {
// We still append pages without dates at the end
pages.extend(ignored_pages);

TaxonomyItem {
name: name.to_string(),
permalink,
slug,
pages,
}
TaxonomyItem { name: name.to_string(), permalink, slug, pages }
}
}

@@ -87,11 +81,9 @@ pub struct SerializedTaxonomy<'a> {

impl<'a> SerializedTaxonomy<'a> {
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
let items: Vec<SerializedTaxonomyItem> = taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
SerializedTaxonomy {
kind: &taxonomy.kind,
items,
}
let items: Vec<SerializedTaxonomyItem> =
taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
SerializedTaxonomy { kind: &taxonomy.kind, items }
}
}

@@ -104,19 +96,19 @@ pub struct Taxonomy {
}

impl Taxonomy {
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Key>>, library: &Library) -> Taxonomy {
fn new(
kind: TaxonomyConfig,
config: &Config,
items: HashMap<String, Vec<Key>>,
library: &Library,
) -> Taxonomy {
let mut sorted_items = vec![];
for (name, pages) in items {
sorted_items.push(
TaxonomyItem::new(&name, &kind.name, config, pages, library)
);
sorted_items.push(TaxonomyItem::new(&name, &kind.name, config, pages, library));
}
sorted_items.sort_by(|a, b| a.name.cmp(&b.name));

Taxonomy {
kind,
items: sorted_items,
}
Taxonomy { kind, items: sorted_items }
}

pub fn len(&self) -> usize {
@@ -127,22 +119,37 @@ impl Taxonomy {
self.len() == 0
}

pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
pub fn render_term(
&self,
item: &TaxonomyItem,
tera: &Tera,
config: &Config,
library: &Library,
) -> Result<String> {
let mut context = Context::new();
context.insert("config", config);
context.insert("term", &SerializedTaxonomyItem::from_item(item, library));
context.insert("taxonomy", &self.kind);
context.insert("current_url", &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)));
context.insert(
"current_url",
&config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)),
);
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug));

render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme)
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name))
}

pub fn render_all_terms(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> {
pub fn render_all_terms(
&self,
tera: &Tera,
config: &Config,
library: &Library,
) -> Result<String> {
let mut context = Context::new();
context.insert("config", config);
let terms: Vec<SerializedTaxonomyItem> = self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
let terms: Vec<SerializedTaxonomyItem> =
self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
context.insert("terms", &terms);
context.insert("taxonomy", &self.kind);
context.insert("current_url", &config.make_permalink(&self.kind.name));
@@ -175,19 +182,22 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom

for (name, val) in &page.meta.taxonomies {
if taxonomies_def.contains_key(name) {
all_taxonomies
.entry(name)
.or_insert_with(HashMap::new);
all_taxonomies.entry(name).or_insert_with(HashMap::new);

for v in val {
all_taxonomies.get_mut(name)
all_taxonomies
.get_mut(name)
.unwrap()
.entry(v.to_string())
.or_insert_with(|| vec![])
.push(key);
}
} else {
bail!("Page `{}` has taxonomy `{}` which is not defined in config.toml", page.file.path.display(), name);
bail!(
"Page `{}` has taxonomy `{}` which is not defined in config.toml",
page.file.path.display(),
name
);
}
}
}
@@ -201,7 +211,6 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
Ok(taxonomies)
}


#[cfg(test)]
mod tests {
use super::*;
@@ -284,7 +293,10 @@ mod tests {

assert_eq!(categories.items[1].name, "Programming tutorials");
assert_eq!(categories.items[1].slug, "programming-tutorials");
assert_eq!(categories.items[1].permalink, "http://a-website.com/categories/programming-tutorials/");
assert_eq!(
categories.items[1].permalink,
"http://a-website.com/categories/programming-tutorials/"
);
assert_eq!(categories.items[1].pages.len(), 1);
}

@@ -293,9 +305,8 @@ mod tests {
let mut config = Config::default();
let mut library = Library::new(2, 0);

config.taxonomies = vec![
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
];
config.taxonomies =
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }];
let mut page1 = Page::default();
let mut taxo_page1 = HashMap::new();
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
@@ -306,6 +317,9 @@ mod tests {
assert!(taxonomies.is_err());
let err = taxonomies.unwrap_err();
// no path as this is created by Default
assert_eq!(err.description(), "Page `` has taxonomy `tags` which is not defined in config.toml");
assert_eq!(
err.description(),
"Page `` has taxonomy `tags` which is not defined in config.toml"
);
}
}

+ 3
- 9
components/link_checker/src/lib.rs View File

@@ -3,7 +3,7 @@ extern crate reqwest;
extern crate lazy_static;

use reqwest::header::{HeaderMap, ACCEPT};
use reqwest::{StatusCode};
use reqwest::StatusCode;
use std::collections::HashMap;
use std::error::Error;
use std::sync::{Arc, RwLock};
@@ -62,14 +62,8 @@ pub fn check_url(url: &str) -> LinkResult {

// Need to actually do the link checking
let res = match client.get(url).headers(headers).send() {
Ok(response) => LinkResult {
code: Some(response.status()),
error: None,
},
Err(e) => LinkResult {
code: None,
error: Some(e.description().to_string()),
},
Ok(response) => LinkResult { code: Some(response.status()), error: None },
Err(e) => LinkResult { code: None, error: Some(e.description().to_string()) },
};

LINKS.write().unwrap().insert(url.to_string(), res.clone());


+ 41
- 21
components/rebuild/src/lib.rs View File

@@ -1,16 +1,15 @@
extern crate site;
#[macro_use]
extern crate errors;
extern crate library;
extern crate front_matter;
extern crate library;

use std::path::{Path, Component};
use std::path::{Component, Path};

use errors::Result;
use site::Site;
use library::{Page, Section};
use front_matter::{PageFrontMatter, SectionFrontMatter};

use library::{Page, Section};
use site::Site;

#[derive(Debug, Clone, Copy, PartialEq)]
pub enum PageChangesNeeded {
@@ -37,7 +36,10 @@ pub enum SectionChangesNeeded {
/// Evaluates all the params in the front matter that changed so we can do the smallest
/// delta in the serve command
/// Order matters as the actions will be done in insertion order
fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &SectionFrontMatter) -> Vec<SectionChangesNeeded> {
fn find_section_front_matter_changes(
current: &SectionFrontMatter,
new: &SectionFrontMatter,
) -> Vec<SectionChangesNeeded> {
let mut changes_needed = vec![];

if current.sort_by != new.sort_by {
@@ -54,7 +56,8 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section

if current.paginate_by != new.paginate_by
|| current.paginate_path != new.paginate_path
|| current.insert_anchor_links != new.insert_anchor_links {
|| current.insert_anchor_links != new.insert_anchor_links
{
changes_needed.push(SectionChangesNeeded::RenderWithPages);
// Nothing else we can do
return changes_needed;
@@ -68,14 +71,18 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section
/// Evaluates all the params in the front matter that changed so we can do the smallest
/// delta in the serve command
/// Order matters as the actions will be done in insertion order
fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMatter) -> Vec<PageChangesNeeded> {
fn find_page_front_matter_changes(
current: &PageFrontMatter,
other: &PageFrontMatter,
) -> Vec<PageChangesNeeded> {
let mut changes_needed = vec![];

if current.taxonomies != other.taxonomies {
changes_needed.push(PageChangesNeeded::Taxonomies);
}

if current.date != other.date || current.order != other.order || current.weight != other.weight {
if current.date != other.date || current.order != other.order || current.weight != other.weight
{
changes_needed.push(PageChangesNeeded::Sort);
}

@@ -86,7 +93,9 @@ fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMa
/// Handles a path deletion: could be a page, a section, a folder
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> {
// Ignore the event if this path was not known
if !site.library.contains_section(&path.to_path_buf()) && !site.library.contains_page(&path.to_path_buf()) {
if !site.library.contains_section(&path.to_path_buf())
&& !site.library.contains_page(&path.to_path_buf())
{
return Ok(());
}

@@ -127,14 +136,21 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
}

// Front matter changed
for changes in find_section_front_matter_changes(&site.library.get_section(&pathbuf).unwrap().meta, &prev.meta) {
for changes in find_section_front_matter_changes(
&site.library.get_section(&pathbuf).unwrap().meta,
&prev.meta,
) {
// Sort always comes first if present so the rendering will be fine
match changes {
SectionChangesNeeded::Sort => {
site.register_tera_global_fns();
}
SectionChangesNeeded::Render => site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?,
SectionChangesNeeded::RenderWithPages => site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?,
SectionChangesNeeded::Render => {
site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?
}
SectionChangesNeeded::RenderWithPages => {
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?
}
// not a common enough operation to make it worth optimizing
SectionChangesNeeded::Delete => {
site.build()?;
@@ -157,7 +173,7 @@ macro_rules! render_parent_section {
if let Some(s) = $site.library.find_parent_section($path) {
$site.render_section(s, false)?;
};
}
};
}

/// Handles a page being edited in some ways
@@ -181,7 +197,10 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
}

// Front matter changed
for changes in find_page_front_matter_changes(&site.library.get_page(&pathbuf).unwrap().meta, &prev.meta) {
for changes in find_page_front_matter_changes(
&site.library.get_page(&pathbuf).unwrap().meta,
&prev.meta,
) {
site.register_tera_global_fns();

// Sort always comes first if present so the rendering will be fine
@@ -213,7 +232,6 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
}
}


/// What happens when a section or a page is changed
pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> {
let is_section = path.file_name().unwrap() == "_index.md";
@@ -294,16 +312,15 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> {
}
}


#[cfg(test)]
mod tests {
use std::collections::HashMap;

use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy};
use super::{