@@ -16,7 +16,7 @@ matrix: | |||
# The earliest stable Rust version that works | |||
- env: TARGET=x86_64-unknown-linux-gnu | |||
rust: 1.27.0 | |||
rust: 1.28.0 | |||
before_install: set -e | |||
@@ -59,7 +59,7 @@ dependencies = [ | |||
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_urlencoded 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -80,7 +80,7 @@ name = "actix_derive" | |||
version = "0.3.0" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -291,26 +291,7 @@ dependencies = [ | |||
"highlighting 0.1.0", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "content" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"config 0.1.0", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
"globset 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"rendering 0.1.0", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"utils 0.1.0", | |||
"toml 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
@@ -450,7 +431,7 @@ dependencies = [ | |||
[[package]] | |||
name = "deflate" | |||
version = "0.7.18" | |||
version = "0.7.19" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"adler32 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -482,7 +463,7 @@ dependencies = [ | |||
"rust-stemmers 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"strum 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"strum_macros 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -546,7 +527,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
[[package]] | |||
name = "encoding_rs" | |||
version = "0.8.7" | |||
version = "0.8.9" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -583,7 +564,7 @@ dependencies = [ | |||
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"image 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
@@ -600,7 +581,7 @@ name = "failure_derive" | |||
version = "0.1.2" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"synstructure 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -618,7 +599,7 @@ dependencies = [ | |||
[[package]] | |||
name = "flate2" | |||
version = "1.0.2" | |||
version = "1.0.3" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -654,7 +635,7 @@ dependencies = [ | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
@@ -727,7 +708,7 @@ dependencies = [ | |||
[[package]] | |||
name = "gif" | |||
version = "0.10.0" | |||
version = "0.10.1" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"color_quant 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -759,7 +740,6 @@ dependencies = [ | |||
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"content 0.1.0", | |||
"ctrlc 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
@@ -768,7 +748,7 @@ dependencies = [ | |||
"rebuild 0.1.0", | |||
"site 0.1.0", | |||
"termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"utils 0.1.0", | |||
"ws 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -873,7 +853,7 @@ dependencies = [ | |||
[[package]] | |||
name = "hyper-tls" | |||
version = "0.3.0" | |||
version = "0.3.1" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -899,7 +879,7 @@ version = "0.20.0" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"gif 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"gif 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"jpeg-decoder 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"lzw 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -1035,6 +1015,27 @@ dependencies = [ | |||
"crc 1.8.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "library" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"config 0.1.0", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
"globset 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"rendering 0.1.0", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"slotmap 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"toml 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"utils 0.1.0", | |||
] | |||
[[package]] | |||
name = "link_checker" | |||
version = "0.1.0" | |||
@@ -1102,7 +1103,7 @@ dependencies = [ | |||
"phf_codegen 0.7.23 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"string_cache_codegen 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tendril 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -1277,13 +1278,13 @@ dependencies = [ | |||
[[package]] | |||
name = "num-derive" | |||
version = "0.2.2" | |||
version = "0.2.3" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.15.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
@@ -1383,21 +1384,6 @@ dependencies = [ | |||
"stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "pagination" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"config 0.1.0", | |||
"content 0.1.0", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"taxonomies 0.1.0", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"utils 0.1.0", | |||
] | |||
[[package]] | |||
name = "parking_lot" | |||
version = "0.6.4" | |||
@@ -1497,7 +1483,7 @@ version = "0.12.0" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"deflate 0.7.18 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"deflate 0.7.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"inflate 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -1517,7 +1503,7 @@ dependencies = [ | |||
[[package]] | |||
name = "proc-macro2" | |||
version = "0.4.19" | |||
version = "0.4.20" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -1555,7 +1541,7 @@ name = "quote" | |||
version = "0.6.8" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
@@ -1576,13 +1562,21 @@ dependencies = [ | |||
"cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "rand_core" | |||
version = "0.2.1" | |||
version = "0.2.2" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "rand_core" | |||
version = "0.3.0" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
[[package]] | |||
@@ -1610,11 +1604,11 @@ dependencies = [ | |||
name = "rebuild" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"content 0.1.0", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
"fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"highlighting 0.1.0", | |||
"library 0.1.0", | |||
"site 0.1.0", | |||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -1693,18 +1687,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"bytes 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"encoding_rs 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"encoding_rs 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"http 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"hyper 0.12.11 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"hyper-tls 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"hyper-tls 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"libflate 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"log 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"mime 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"mime_guess 2.0.0-alpha.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"native-tls 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_urlencoded 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tokio 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tokio-io 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -1809,10 +1803,10 @@ name = "search" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"ammonia 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"content 0.1.0", | |||
"elasticlunr-rs 2.3.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"errors 0.1.0", | |||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"library 0.1.0", | |||
] | |||
[[package]] | |||
@@ -1858,14 +1852,14 @@ name = "serde_derive" | |||
version = "1.0.79" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.15.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.15.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "serde_json" | |||
version = "1.0.31" | |||
version = "1.0.32" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"indexmap 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -1900,18 +1894,16 @@ name = "site" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"config 0.1.0", | |||
"content 0.1.0", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"imageproc 0.1.0", | |||
"pagination 0.1.0", | |||
"library 0.1.0", | |||
"rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"sass-rs 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"search 0.1.0", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"taxonomies 0.1.0", | |||
"tempfile 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"templates 0.1.0", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -1923,6 +1915,11 @@ name = "slab" | |||
version = "0.4.1" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
[[package]] | |||
name = "slotmap" | |||
version = "0.2.1" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
[[package]] | |||
name = "slug" | |||
version = "0.1.4" | |||
@@ -2006,7 +2003,7 @@ name = "strum_macros" | |||
version = "0.9.1" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -2036,17 +2033,17 @@ name = "syn" | |||
version = "0.14.9" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "syn" | |||
version = "0.15.6" | |||
version = "0.15.8" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -2064,7 +2061,7 @@ name = "synstructure" | |||
version = "0.9.0" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -2077,7 +2074,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"onig 3.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -2085,26 +2082,11 @@ dependencies = [ | |||
"regex-syntax 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"walkdir 2.2.5 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"yaml-rust 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
[[package]] | |||
name = "taxonomies" | |||
version = "0.1.0" | |||
dependencies = [ | |||
"config 0.1.0", | |||
"content 0.1.0", | |||
"errors 0.1.0", | |||
"front_matter 0.1.0", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"utils 0.1.0", | |||
] | |||
[[package]] | |||
name = "tempfile" | |||
version = "3.0.4" | |||
@@ -2124,12 +2106,11 @@ version = "0.1.0" | |||
dependencies = [ | |||
"base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"config 0.1.0", | |||
"content 0.1.0", | |||
"errors 0.1.0", | |||
"imageproc 0.1.0", | |||
"lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"library 0.1.0", | |||
"pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"taxonomies 0.1.0", | |||
"tera 0.11.16 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"utils 0.1.0", | |||
] | |||
@@ -2158,7 +2139,7 @@ dependencies = [ | |||
"pest_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"unic-segment 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -2205,7 +2186,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"lzw 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"num-derive 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"num-derive 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", | |||
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", | |||
] | |||
@@ -2389,7 +2370,7 @@ dependencies = [ | |||
[[package]] | |||
name = "toml" | |||
version = "0.4.7" | |||
version = "0.4.8" | |||
source = "registry+https://github.com/rust-lang/crates.io-index" | |||
dependencies = [ | |||
"serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)", | |||
@@ -2787,7 +2768,7 @@ dependencies = [ | |||
"checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015" | |||
"checksum ctrlc 3.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "630391922b1b893692c6334369ff528dcc3a9d8061ccf4c803aa8f83cb13db5e" | |||
"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" | |||
"checksum deflate 0.7.18 (registry+https://github.com/rust-lang/crates.io-index)" = "32c8120d981901a9970a3a1c97cf8b630e0fa8c3ca31e75b6fd6fd5f9f427b31" | |||
"checksum deflate 0.7.19 (registry+https://github.com/rust-lang/crates.io-index)" = "8a6abb26e16e8d419b5c78662aa9f82857c2386a073da266840e474d5055ec86" | |||
"checksum deunicode 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "850878694b7933ca4c9569d30a34b55031b9b139ee1fc7b94a527c4ef960d690" | |||
"checksum dtoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6d301140eb411af13d3115f9a562c85cc6b541ade9dfa314132244aaee7489dd" | |||
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" | |||
@@ -2799,14 +2780,14 @@ dependencies = [ | |||
"checksum encoding-index-singlebyte 1.20141219.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a" | |||
"checksum encoding-index-tradchinese 1.20141219.5 (registry+https://github.com/rust-lang/crates.io-index)" = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18" | |||
"checksum encoding_index_tests 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" | |||
"checksum encoding_rs 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "21a550ec129ca2f8593227888625c7c5331c6ad878e2cee6b7ac25e1c7d05746" | |||
"checksum encoding_rs 0.8.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f1a79fa56c329a5b087de13748054fb3b974c4a672c12c71f0b66e35c5addec5" | |||
"checksum error-chain 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "faa976b4fd2e4c2b2f3f486874b19e61944d3de3de8b61c9fcf835d583871bcc" | |||
"checksum error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07e791d3be96241c77c43846b665ef1384606da2cd2a48730abe606a12906e02" | |||
"checksum error-chain 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6930e04918388a9a2e41d518c25cf679ccafe26733fb4127dbf21993f2575d46" | |||
"checksum failure 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7efb22686e4a466b1ec1a15c2898f91fa9cb340452496dca654032de20ff95b9" | |||
"checksum failure_derive 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "946d0e98a50d9831f5d589038d2ca7f8f455b1c21028c0db0e84116a12696426" | |||
"checksum filetime 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "da4b9849e77b13195302c174324b5ba73eec9b236b24c221a61000daefb95c5f" | |||
"checksum flate2 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "37847f133aae7acf82bb9577ccd8bda241df836787642654286e79679826a54b" | |||
"checksum flate2 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4af030962d89d62aa52cd9492083b1cd9b2d1a77764878102a6c0f86b4d5444d" | |||
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3" | |||
"checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" | |||
"checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" | |||
@@ -2819,7 +2800,7 @@ dependencies = [ | |||
"checksum futures 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)" = "0c84b40c7e2de99ffd70602db314a7a8c26b2b3d830e6f7f7a142a8860ab3ca4" | |||
"checksum futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" | |||
"checksum getopts 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "0a7292d30132fb5424b354f5dc02512a86e4c516fe544bb7a25e7f266951b797" | |||
"checksum gif 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff3414b424657317e708489d2857d9575f4403698428b040b609b9d1c1a84a2c" | |||
"checksum gif 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4bca55ac1f213920ce3527ccd62386f1f15fa3f1714aeee1cf93f2c416903f" | |||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" | |||
"checksum globset 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4743617a7464bbda3c8aec8558ff2f9429047e025771037df561d383337ff865" | |||
"checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c" | |||
@@ -2830,7 +2811,7 @@ dependencies = [ | |||
"checksum httparse 1.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e8734b0cfd3bc3e101ec59100e101c2eecd19282202e87808b3037b442777a83" | |||
"checksum humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b6cab2627acfc432780848602f3f558f7e9dd427352224b0d9324025796d2a5e" | |||
"checksum hyper 0.12.11 (registry+https://github.com/rust-lang/crates.io-index)" = "78d50abbd1790e0f4c74cb1d4a2211b439bac661d54107ad5564c55e77906762" | |||
"checksum hyper-tls 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "caaee4dea92794a9e697038bd401e264307d1f22c883dbcb6f6618ba0d3b3bd3" | |||
"checksum hyper-tls 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "32cd73f14ad370d3b4d4b7dce08f69b81536c82e39fcc89731930fe5788cd661" | |||
"checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" | |||
"checksum image 0.20.0 (registry+https://github.com/rust-lang/crates.io-index)" = "60710fd3cb40c2434451d8d5147bcf39bbb68aae0741041133e09439cb2401e3" | |||
"checksum indexmap 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08173ba1e906efb6538785a8844dd496f5d34f0a2d88038e95195172fc667220" | |||
@@ -2872,7 +2853,7 @@ dependencies = [ | |||
"checksum nix 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d37e713a259ff641624b6cb20e3b12b2952313ba36b6823c0f16e6cfd9e5de17" | |||
"checksum nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "9a2228dca57108069a5262f2ed8bd2e82496d2e074a06d1ccc7ce1687b6ae0a2" | |||
"checksum notify 4.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "873ecfd8c174964ae30f401329d140142312c8e5590719cf1199d5f1717d8078" | |||
"checksum num-derive 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0d2c31b75c36a993d30c7a13d70513cb93f02acafdd5b7ba250f9b0e18615de7" | |||
"checksum num-derive 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8af1847c907c2f04d7bfd572fb25bbb4385c637fe5be163cf2f8c5d778fe1e7d" | |||
"checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea" | |||
"checksum num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "af3fdbbc3291a5464dc57b03860ec37ca6bf915ed6ee385e7c6c052c422b2124" | |||
"checksum num-rational 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4e96f040177bb3da242b5b1ecf3f54b5d5af3efbbfb18608977a5d2767b22f10" | |||
@@ -2898,7 +2879,7 @@ dependencies = [ | |||
"checksum png 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f54b9600d584d3b8a739e1662a595fab051329eff43f20e7d8cc22872962145b" | |||
"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" | |||
"checksum proc-macro2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1b06e2f335f48d24442b35a19df506a835fb3547bc3c06ef27340da9acf5cae7" | |||
"checksum proc-macro2 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)" = "ffe022fb8c8bd254524b0b3305906c1921fa37a84a644e29079a9e62200c3901" | |||
"checksum proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "3d7b7eaaa90b4a90a932a9ea6666c95a389e424eff347f0f793979289429feee" | |||
"checksum pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdf85cda6cadfae5428a54661d431330b312bc767ddbc57adbedc24da66e32" | |||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" | |||
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" | |||
@@ -2906,7 +2887,8 @@ dependencies = [ | |||
"checksum quote 0.6.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dd636425967c33af890042c483632d33fa7a18f19ad1d7ea72e8998c6ef8dea5" | |||
"checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd" | |||
"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c" | |||
"checksum rand_core 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "edecf0f94da5551fc9b492093e30b041a891657db7940ee221f9d2f66e82eef2" | |||
"checksum rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1961a422c4d189dfb50ffa9320bf1f2a9bd54ecb92792fb9477f99a1045f3372" | |||
"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db" | |||
"checksum rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "df7a791f788cb4c516f0e091301a29c2b71ef680db5e644a7d68835c8ae6dbfa" | |||
"checksum rayon-core 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b055d1e92aba6877574d8fe604a63c8b5df60f60e5982bf7ccbb1338ea527356" | |||
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1" | |||
@@ -2935,11 +2917,12 @@ dependencies = [ | |||
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" | |||
"checksum serde 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)" = "84257ccd054dc351472528c8587b4de2dbf0dc0fe2e634030c1a90bfdacebaa9" | |||
"checksum serde_derive 1.0.79 (registry+https://github.com/rust-lang/crates.io-index)" = "31569d901045afbff7a9479f793177fe9259819aff10ab4f89ef69bbc5f567fe" | |||
"checksum serde_json 1.0.31 (registry+https://github.com/rust-lang/crates.io-index)" = "bb47a3d5c84320222f66d7db21157c4a7407755de41798f9b4c1c40593397b1a" | |||
"checksum serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)" = "43344e7ce05d0d8280c5940cabb4964bea626aa58b1ec0e8c73fa2a8512a38ce" | |||
"checksum serde_urlencoded 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aaed41d9fb1e2f587201b863356590c90c1157495d811430a0c0325fe8169650" | |||
"checksum sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d" | |||
"checksum siphasher 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" | |||
"checksum slab 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5f9776d6b986f77b35c6cf846c11ad986ff128fe0b2b63a3628e3755e8d3102d" | |||
"checksum slotmap 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4ed041f7f2ff35f2bf7d688bf30686976512f8300e37433c2c73ea9f4cf14b" | |||
"checksum slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b3bc762e6a4b6c6fcaade73e77f9ebc6991b676f88bb2358bddb56560f073373" | |||
"checksum smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "153ffa32fd170e9944f7e0838edf824a754ec4c1fc64746fcc9fe1f8fa602e5d" | |||
"checksum socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c4d11a52082057d87cb5caa31ad812f4504b97ab44732cd8359df2e9ff9f48e7" | |||
@@ -2954,7 +2937,7 @@ dependencies = [ | |||
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad" | |||
"checksum syn 0.13.11 (registry+https://github.com/rust-lang/crates.io-index)" = "14f9bf6292f3a61d2c716723fdb789a41bbe104168e6f496dc6497e531ea1b9b" | |||
"checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741" | |||
"checksum syn 0.15.6 (registry+https://github.com/rust-lang/crates.io-index)" = "854b08a640fc8f54728fb95321e3ec485b365a97fe47609797c671addd1dde69" | |||
"checksum syn 0.15.8 (registry+https://github.com/rust-lang/crates.io-index)" = "356d1c5043597c40489e9af2d2498c7fefc33e99b7d75b43be336c8a59b3e45e" | |||
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6" | |||
"checksum synstructure 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "85bb9b7550d063ea184027c9b8c20ac167cd36d3e06b3a40bceb9d746dc1a7b7" | |||
"checksum syntect 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "dc8a6f0db88d4afc340522c20d260411e746b2225b257c6b238a75de9d7cec78" | |||
@@ -2980,7 +2963,7 @@ dependencies = [ | |||
"checksum tokio-timer 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "3a52f00c97fedb6d535d27f65cccb7181c8dd4c6edc3eda9ea93f6d45d05168e" | |||
"checksum tokio-udp 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "da941144b816d0dcda4db3a1ba87596e4df5e860a72b70783fe435891f80601c" | |||
"checksum tokio-uds 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "22e3aa6d1fcc19e635418dc0a30ab5bd65d347973d6f43f1a37bf8d9d1335fc9" | |||
"checksum toml 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b7e7d59d55f36979a9dd86d71ae54657a5e9c7fdb4fa2212f4064e2d32f9dcda" | |||
"checksum toml 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)" = "4a2ecc31b0351ea18b3fe11274b8db6e4d82bce861bbb22e6dbed40417902c65" | |||
"checksum trust-dns-proto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "32d7c204ee231f802aa821f9dc2195aa0d0269ef7e9f8c844208565c9e3981e4" | |||
"checksum trust-dns-resolver 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4a821ad51a29816420b8cac4b026756b81c023630b97eaa4c8090637ee3508bd" | |||
"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382" | |||
@@ -33,7 +33,6 @@ ctrlc = "3" | |||
site = { path = "components/site" } | |||
errors = { path = "components/errors" } | |||
content = { path = "components/content" } | |||
front_matter = { path = "components/front_matter" } | |||
utils = { path = "components/utils" } | |||
rebuild = { path = "components/rebuild" } | |||
@@ -41,18 +40,19 @@ rebuild = { path = "components/rebuild" } | |||
[workspace] | |||
members = [ | |||
"components/config", | |||
"components/content", | |||
"components/errors", | |||
"components/front_matter", | |||
"components/highlighting", | |||
"components/pagination", | |||
"components/rebuild", | |||
"components/rendering", | |||
"components/site", | |||
"components/taxonomies", | |||
"components/templates", | |||
"components/utils", | |||
"components/search", | |||
"components/imageproc", | |||
"components/link_checker", | |||
"components/library", | |||
] | |||
#[profile.release] | |||
#debug = true |
@@ -10,7 +10,7 @@ environment: | |||
matrix: | |||
- target: x86_64-pc-windows-msvc | |||
RUST_VERSION: 1.27.0 | |||
RUST_VERSION: 1.28.0 | |||
- target: x86_64-pc-windows-msvc | |||
RUST_VERSION: stable | |||
@@ -1,146 +0,0 @@ | |||
#![feature(test)] | |||
extern crate test; | |||
extern crate tera; | |||
extern crate content; | |||
extern crate front_matter; | |||
extern crate config; | |||
use std::collections::HashMap; | |||
use std::path::Path; | |||
use config::Config; | |||
use tera::Tera; | |||
use front_matter::{SortBy, InsertAnchor}; | |||
use content::{Page, sort_pages, populate_siblings}; | |||
fn create_pages(number: usize) -> Vec<Page> { | |||
let mut pages = vec![]; | |||
let config = Config::default(); | |||
let mut tera = Tera::default(); | |||
tera.add_raw_template("shortcodes/youtube.html", "hello"); | |||
let permalinks = HashMap::new(); | |||
for i in 0..number { | |||
let mut page = Page::default(); | |||
page.meta.weight = Some(i); | |||
page.raw_content = r#" | |||
# Modus cognitius profanam ne duae virtutis mundi | |||
## Ut vita | |||
Lorem markdownum litora, care ponto nomina, et ut aspicit gelidas sui et | |||
purpureo genuit. Tamen colla venientis [delphina](http://nil-sol.com/ecquis) | |||
Tusci et temptata citaeque curam isto ubi vult vulnere reppulit. | |||
- Seque vidit flendoque de quodam | |||
- Dabit minimos deiecto caputque noctis pluma | |||
- Leti coniunx est Helicen | |||
- Illius pulvereumque Icare inpositos | |||
- Vivunt pereo pluvio tot ramos Olenios gelidis | |||
- Quater teretes natura inde | |||
### A subsection | |||
Protinus dicunt, breve per, et vivacis genus Orphei munere. Me terram [dimittere | |||
casside](http://corpus.org/) pervenit saxo primoque frequentat genuum sorori | |||
praeferre causas Libys. Illud in serpit adsuetam utrimque nunc haberent, | |||
**terrae si** veni! Hectoreis potes sumite [Mavortis retusa](http://tua.org/) | |||
granum captantur potuisse Minervae, frugum. | |||
> Clivo sub inprovisoque nostrum minus fama est, discordia patrem petebat precatur | |||
absumitur, poena per sit. Foramina *tamen cupidine* memor supplex tollentes | |||
dictum unam orbem, Anubis caecae. Viderat formosior tegebat satis, Aethiopasque | |||
sit submisso coniuge tristis ubi! | |||
## Praeceps Corinthus totidem quem crus vultum cape | |||
```rs | |||
#[derive(Debug)] | |||
pub struct Site { | |||
/// The base path of the gutenberg site | |||
pub base_path: PathBuf, | |||
/// The parsed config for the site | |||
pub config: Config, | |||
pub pages: HashMap<PathBuf, Page>, | |||
pub sections: HashMap<PathBuf, Section>, | |||
pub tera: Tera, | |||
live_reload: bool, | |||
output_path: PathBuf, | |||
static_path: PathBuf, | |||
pub tags: Option<Taxonomy>, | |||
pub categories: Option<Taxonomy>, | |||
/// A map of all .md files (section and pages) and their permalink | |||
/// We need that if there are relative links in the content that need to be resolved | |||
pub permalinks: HashMap<String, String>, | |||
} | |||
``` | |||
## More stuff | |||
And a shortcode: | |||
{{ youtube(id="my_youtube_id") }} | |||
### Another subsection | |||
Gotta make the toc do a little bit of work | |||
# A big title | |||
- hello | |||
- world | |||
- ! | |||
```py | |||
if __name__ == "__main__": | |||
gen_site("basic-blog", [""], 250, paginate=True) | |||
``` | |||
"#.to_string(); | |||
page.render_markdown(&permalinks, &tera, &config, &Path::new(""), InsertAnchor::None).unwrap(); | |||
pages.push(page); | |||
} | |||
pages | |||
} | |||
// Most of the time spent in those benches are due to the .clone()... | |||
// but i don't know how to remove them so there are some baseline bench with | |||
// just the cloning and with a bit of math we can figure it out | |||
#[bench] | |||
fn bench_baseline_cloning(b: &mut test::Bencher) { | |||
let pages = create_pages(250); | |||
b.iter(|| pages.clone()); | |||
} | |||
#[bench] | |||
fn bench_sorting_none(b: &mut test::Bencher) { | |||
let pages = create_pages(250); | |||
b.iter(|| sort_pages(pages.clone(), SortBy::Weight)); | |||
} | |||
#[bench] | |||
fn bench_sorting_order(b: &mut test::Bencher) { | |||
let pages = create_pages(250); | |||
b.iter(|| sort_pages(pages.clone(), SortBy::Weight)); | |||
} | |||
#[bench] | |||
fn bench_populate_siblings(b: &mut test::Bencher) { | |||
let pages = create_pages(250); | |||
let (sorted_pages, _) = sort_pages(pages, SortBy::Weight); | |||
b.iter(|| populate_siblings(&sorted_pages.clone(), SortBy::Weight)); | |||
} | |||
#[bench] | |||
fn bench_page_render_html(b: &mut test::Bencher) { | |||
let pages = create_pages(10); | |||
let (mut sorted_pages, _) = sort_pages(pages, SortBy::Weight); | |||
sorted_pages = populate_siblings(&sorted_pages, SortBy::Weight); | |||
let config = Config::default(); | |||
let mut tera = Tera::default(); | |||
tera.add_raw_template("page.html", "{{ page.content }}").unwrap(); | |||
let page = &sorted_pages[5]; | |||
b.iter(|| page.render_html(&tera, &config).unwrap()); | |||
} |
@@ -1,29 +0,0 @@ | |||
extern crate tera; | |||
extern crate slug; | |||
extern crate serde; | |||
extern crate rayon; | |||
extern crate chrono; | |||
extern crate errors; | |||
extern crate config; | |||
extern crate front_matter; | |||
extern crate rendering; | |||
extern crate utils; | |||
#[cfg(test)] | |||
extern crate tempfile; | |||
#[cfg(test)] | |||
extern crate toml; | |||
#[cfg(test)] | |||
extern crate globset; | |||
mod file_info; | |||
mod page; | |||
mod section; | |||
mod sorting; | |||
pub use file_info::FileInfo; | |||
pub use page::Page; | |||
pub use section::Section; | |||
pub use sorting::{sort_pages, populate_siblings, sort_pages_by_date}; |
@@ -1,247 +0,0 @@ | |||
use std::cmp::Ordering; | |||
use rayon::prelude::*; | |||
use page::Page; | |||
use front_matter::SortBy; | |||
/// The comparison function of sorting pages by day | |||
/// Used by the RSS rendering | |||
/// To remove if `sort_pages` is changed to work on borrowed values | |||
/// This cannot be used in `sort_pages` currently as it takes &&Page instead of &Page | |||
pub fn sort_pages_by_date(a: &&Page, b: &&Page) -> Ordering { | |||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); | |||
if ord == Ordering::Equal { | |||
a.permalink.cmp(&b.permalink) | |||
} else { | |||
ord | |||
} | |||
} | |||
/// Sort pages by the given criteria | |||
/// | |||
/// Any pages that doesn't have a required field when the sorting method is other than none | |||
/// will be ignored. | |||
pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) { | |||
if sort_by == SortBy::None { | |||
return (pages, vec![]); | |||
} | |||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages | |||
.into_par_iter() | |||
.partition(|page| { | |||
match sort_by { | |||
SortBy::Date => page.meta.datetime.is_some(), | |||
SortBy::Weight => page.meta.weight.is_some(), | |||
_ => unreachable!() | |||
} | |||
}); | |||
match sort_by { | |||
SortBy::Date => { | |||
can_be_sorted.par_sort_unstable_by(|a, b| { | |||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); | |||
if ord == Ordering::Equal { | |||
a.permalink.cmp(&b.permalink) | |||
} else { | |||
ord | |||
} | |||
}) | |||
} | |||
SortBy::Weight => { | |||
can_be_sorted.par_sort_unstable_by(|a, b| { | |||
let ord = a.meta.weight().cmp(&b.meta.weight()); | |||
if ord == Ordering::Equal { | |||
a.permalink.cmp(&b.permalink) | |||
} else { | |||
ord | |||
} | |||
}) | |||
} | |||
_ => unreachable!() | |||
}; | |||
(can_be_sorted, cannot_be_sorted) | |||
} | |||
/// Horribly inefficient way to set previous and next on each pages that skips drafts | |||
/// So many clones | |||
pub fn populate_siblings(input: &[Page], sort_by: SortBy) -> Vec<Page> { | |||
let mut res = Vec::with_capacity(input.len()); | |||
// The input is already sorted | |||
for (i, _) in input.iter().enumerate() { | |||
let mut new_page = input[i].clone(); | |||
if new_page.is_draft() { | |||
res.push(new_page); | |||
continue; | |||
} | |||
if i > 0 { | |||
let mut j = i; | |||
loop { | |||
if j == 0 { | |||
break; | |||
} | |||
j -= 1; | |||
if input[j].is_draft() { | |||
continue; | |||
} | |||
// Remove prev/next otherwise we serialise the whole thing... | |||
let mut next_page = input[j].clone(); | |||
match sort_by { | |||
SortBy::Weight => { | |||
next_page.lighter = None; | |||
next_page.heavier = None; | |||
new_page.lighter = Some(Box::new(next_page)); | |||
} | |||
SortBy::Date => { | |||
next_page.earlier = None; | |||
next_page.later = None; | |||
new_page.later = Some(Box::new(next_page)); | |||
} | |||
SortBy::None => () | |||
} | |||
break; | |||
} | |||
} | |||
if i < input.len() - 1 { | |||
let mut j = i; | |||
loop { | |||
if j == input.len() - 1 { | |||
break; | |||
} | |||
j += 1; | |||
if input[j].is_draft() { | |||
continue; | |||
} | |||
// Remove prev/next otherwise we serialise the whole thing... | |||
let mut previous_page = input[j].clone(); | |||
match sort_by { | |||
SortBy::Weight => { | |||
previous_page.lighter = None; | |||
previous_page.heavier = None; | |||
new_page.heavier = Some(Box::new(previous_page)); | |||
} | |||
SortBy::Date => { | |||
previous_page.earlier = None; | |||
previous_page.later = None; | |||
new_page.earlier = Some(Box::new(previous_page)); | |||
} | |||
SortBy::None => {} | |||
} | |||
break; | |||
} | |||
} | |||
res.push(new_page); | |||
} | |||
res | |||
} | |||
#[cfg(test)] | |||
mod tests { | |||
use front_matter::{PageFrontMatter, SortBy}; | |||
use page::Page; | |||
use super::{sort_pages, populate_siblings}; | |||
fn create_page_with_date(date: &str) -> Page { | |||
let mut front_matter = PageFrontMatter::default(); | |||
front_matter.date = Some(date.to_string()); | |||
front_matter.date_to_datetime(); | |||
Page::new("content/hello.md", front_matter) | |||
} | |||
fn create_page_with_weight(weight: usize) -> Page { | |||
let mut front_matter = PageFrontMatter::default(); | |||
front_matter.weight = Some(weight); | |||
Page::new("content/hello.md", front_matter) | |||
} | |||
#[test] | |||
fn can_sort_by_dates() { | |||
let input = vec![ | |||
create_page_with_date("2018-01-01"), | |||
create_page_with_date("2017-01-01"), | |||
create_page_with_date("2019-01-01"), | |||
]; | |||
let (pages, _) = sort_pages(input, SortBy::Date); | |||
// Should be sorted by date | |||
assert_eq!(pages[0].clone().meta.date.unwrap().to_string(), "2019-01-01"); | |||
assert_eq!(pages[1].clone().meta.date.unwrap().to_string(), "2018-01-01"); | |||
assert_eq!(pages[2].clone().meta.date.unwrap().to_string(), "2017-01-01"); | |||
} | |||
#[test] | |||
fn can_sort_by_weight() { | |||
let input = vec![ | |||
create_page_with_weight(2), | |||
create_page_with_weight(3), | |||
create_page_with_weight(1), | |||
]; | |||
let (pages, _) = sort_pages(input, SortBy::Weight); | |||
// Should be sorted by weight | |||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 1); | |||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 2); | |||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 3); | |||
} | |||
#[test] | |||
fn can_sort_by_none() { | |||
let input = vec![ | |||
create_page_with_weight(2), | |||
create_page_with_weight(3), | |||
create_page_with_weight(1), | |||
]; | |||
let (pages, _) = sort_pages(input, SortBy::None); | |||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 2); | |||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 3); | |||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 1); | |||
} | |||
#[test] | |||
fn ignore_page_with_missing_field() { | |||
let input = vec![ | |||
create_page_with_weight(2), | |||
create_page_with_weight(3), | |||
create_page_with_date("2019-01-01"), | |||
]; | |||
let (pages, unsorted) = sort_pages(input, SortBy::Weight); | |||
assert_eq!(pages.len(), 2); | |||
assert_eq!(unsorted.len(), 1); | |||
} | |||
#[test] | |||
fn can_populate_siblings() { | |||
let input = vec![ | |||
create_page_with_weight(1), | |||
create_page_with_weight(2), | |||
create_page_with_weight(3), | |||
]; | |||
let pages = populate_siblings(&input, SortBy::Weight); | |||
assert!(pages[0].clone().lighter.is_none()); | |||
assert!(pages[0].clone().heavier.is_some()); | |||
assert_eq!(pages[0].clone().heavier.unwrap().meta.weight.unwrap(), 2); | |||
assert!(pages[1].clone().heavier.is_some()); | |||
assert!(pages[1].clone().lighter.is_some()); | |||
assert_eq!(pages[1].clone().lighter.unwrap().meta.weight.unwrap(), 1); | |||
assert_eq!(pages[1].clone().heavier.unwrap().meta.weight.unwrap(), 3); | |||
assert!(pages[2].clone().lighter.is_some()); | |||
assert!(pages[2].clone().heavier.is_none()); | |||
assert_eq!(pages[2].clone().lighter.unwrap().meta.weight.unwrap(), 2); | |||
} | |||
} |
@@ -1,20 +1,22 @@ | |||
[package] | |||
name = "content" | |||
name = "library" | |||
version = "0.1.0" | |||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||
[dependencies] | |||
slotmap = "0.2" | |||
rayon = "1" | |||
chrono = { version = "0.4", features = ["serde"] } | |||
tera = "0.11" | |||
serde = "1" | |||
serde_derive = "1" | |||
slug = "0.1" | |||
rayon = "1" | |||
chrono = { version = "0.4", features = ["serde"] } | |||
errors = { path = "../errors" } | |||
front_matter = { path = "../front_matter" } | |||
config = { path = "../config" } | |||
utils = { path = "../utils" } | |||
rendering = { path = "../rendering" } | |||
front_matter = { path = "../front_matter" } | |||
errors = { path = "../errors" } | |||
[dev-dependencies] | |||
tempfile = "3" |
@@ -0,0 +1,7 @@ | |||
mod file_info; | |||
mod page; | |||
mod section; | |||
pub use self::file_info::FileInfo; | |||
pub use self::page::{Page, SerializingPage}; | |||
pub use self::section::{Section, SerializingSection}; |
@@ -1,11 +1,10 @@ | |||
/// A page, can be a blog post or a basic page | |||
use std::collections::HashMap; | |||
use std::path::{Path, PathBuf}; | |||
use std::result::Result as StdResult; | |||
use tera::{Tera, Context as TeraContext}; | |||
use serde::ser::{SerializeStruct, self}; | |||
use tera::{Tera, Context as TeraContext, Value, Map}; | |||
use slug::slugify; | |||
use slotmap::{Key, DenseSlotMap}; | |||
use errors::{Result, ResultExt}; | |||
use config::Config; | |||
@@ -14,9 +13,119 @@ use utils::site::get_reading_analytics; | |||
use utils::templates::render_template; | |||
use front_matter::{PageFrontMatter, InsertAnchor, split_page_content}; | |||
use rendering::{RenderContext, Header, render_content}; | |||
use library::Library; | |||
use content::file_info::FileInfo; | |||
/// What we are sending to the templates when rendering them | |||
#[derive(Clone, Debug, PartialEq, Serialize)] | |||
pub struct SerializingPage<'a> { | |||
content: &'a str, | |||
permalink: &'a str, | |||
slug: &'a str, | |||
title: &'a Option<String>, | |||
description: &'a Option<String>, | |||
date: &'a Option<String>, | |||
year: Option<i32>, | |||
month: Option<u32>, | |||
day: Option<u32>, | |||
taxonomies: &'a HashMap<String, Vec<String>>, | |||
extra: &'a Map<String, Value>, | |||
path: &'a str, | |||
components: &'a [String], | |||
summary: &'a Option<String>, | |||
word_count: Option<usize>, | |||
reading_time: Option<usize>, | |||
toc: &'a [Header], | |||
assets: Vec<String>, | |||
draft: bool, | |||
lighter: Option<Box<SerializingPage<'a>>>, | |||
heavier: Option<Box<SerializingPage<'a>>>, | |||
earlier: Option<Box<SerializingPage<'a>>>, | |||
later: Option<Box<SerializingPage<'a>>>, | |||
} | |||
impl<'a> SerializingPage<'a> { | |||
/// Grabs all the data from a page, including sibling pages | |||
pub fn from_page(page: &'a Page, pages: &'a DenseSlotMap<Page>) -> Self { | |||
let mut year = None; | |||
let mut month = None; | |||
let mut day = None; | |||
if let Some(d) = page.meta.datetime_tuple { | |||
year = Some(d.0); | |||
month = Some(d.1); | |||
day = Some(d.2); | |||
} | |||
let lighter = page.lighter.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap()))); | |||
let heavier = page.heavier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap()))); | |||
let earlier = page.earlier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap()))); | |||
let later = page.later.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap()))); | |||
SerializingPage { | |||
content: &page.content, | |||
permalink: &page.permalink, | |||
slug: &page.slug, | |||
title: &page.meta.title, | |||
description: &page.meta.description, | |||
extra: &page.meta.extra, | |||
date: &page.meta.date, | |||
year, | |||
month, | |||
day, | |||
taxonomies: &page.meta.taxonomies, | |||
path: &page.path, | |||
components: &page.components, | |||
summary: &page.summary, | |||
word_count: page.word_count, | |||
reading_time: page.reading_time, | |||
toc: &page.toc, | |||
assets: page.serialize_assets(), | |||
draft: page.is_draft(), | |||
lighter, | |||
heavier, | |||
earlier, | |||
later, | |||
} | |||
} | |||
use file_info::FileInfo; | |||
/// Same as from_page but does not fill sibling pages | |||
pub fn from_page_basic(page: &'a Page) -> Self { | |||
let mut year = None; | |||
let mut month = None; | |||
let mut day = None; | |||
if let Some(d) = page.meta.datetime_tuple { | |||
year = Some(d.0); | |||
month = Some(d.1); | |||
day = Some(d.2); | |||
} | |||
SerializingPage { | |||
content: &page.content, | |||
permalink: &page.permalink, | |||
slug: &page.slug, | |||
title: &page.meta.title, | |||
description: &page.meta.description, | |||
extra: &page.meta.extra, | |||
date: &page.meta.date, | |||
year, | |||
month, | |||
day, | |||
taxonomies: &page.meta.taxonomies, | |||
path: &page.path, | |||
components: &page.components, | |||
summary: &page.summary, | |||
word_count: page.word_count, | |||
reading_time: page.reading_time, | |||
toc: &page.toc, | |||
assets: page.serialize_assets(), | |||
draft: page.is_draft(), | |||
lighter: None, | |||
heavier: None, | |||
earlier: None, | |||
later: None, | |||
} | |||
} | |||
} | |||
#[derive(Clone, Debug, PartialEq)] | |||
pub struct Page { | |||
@@ -44,13 +153,13 @@ pub struct Page { | |||
/// as summary | |||
pub summary: Option<String>, | |||
/// The earlier page, for pages sorted by date | |||
pub earlier: Option<Box<Page>>, | |||
pub earlier: Option<Key>, | |||
/// The later page, for pages sorted by date | |||
pub later: Option<Box<Page>>, | |||
pub later: Option<Key>, | |||
/// The lighter page, for pages sorted by weight | |||
pub lighter: Option<Box<Page>>, | |||
pub lighter: Option<Key>, | |||
/// The heavier page, for pages sorted by weight | |||
pub heavier: Option<Box<Page>>, | |||
pub heavier: Option<Key>, | |||
/// Toc made from the headers of the markdown file | |||
pub toc: Vec<Header>, | |||
/// How many words in the raw content | |||
@@ -190,7 +299,7 @@ impl Page { | |||
anchor_insert, | |||
); | |||
context.tera_context.insert("page", self); | |||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self)); | |||
let res = render_content(&self.raw_content, &context) | |||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; | |||
@@ -203,7 +312,7 @@ impl Page { | |||
} | |||
/// Renders the page using the default layout, unless specified in front-matter | |||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> { | |||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> { | |||
let tpl_name = match self.meta.template { | |||
Some(ref l) => l.to_string(), | |||
None => "page.html".to_string() | |||
@@ -211,9 +320,9 @@ impl Page { | |||
let mut context = TeraContext::new(); | |||
context.insert("config", config); | |||
context.insert("page", self); | |||
context.insert("current_url", &self.permalink); | |||
context.insert("current_path", &self.path); | |||
context.insert("page", &self.to_serialized(library.pages())); | |||
render_template(&tpl_name, tera, &context, &config.theme) | |||
.chain_err(|| format!("Failed to render page '{}'", self.file.path.display())) | |||
@@ -227,6 +336,14 @@ impl Page { | |||
.map(|filename| self.path.clone() + filename) | |||
.collect() | |||
} | |||
pub fn to_serialized<'a>(&'a self, pages: &'a DenseSlotMap<Page>) -> SerializingPage<'a> { | |||
SerializingPage::from_page(self, pages) | |||
} | |||
pub fn to_serialized_basic(&self) -> SerializingPage { | |||
SerializingPage::from_page_basic(self) | |||
} | |||
} | |||
impl Default for Page { | |||
@@ -253,43 +370,6 @@ impl Default for Page { | |||
} | |||
} | |||
impl ser::Serialize for Page { | |||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer { | |||
let mut state = serializer.serialize_struct("page", 20)?; | |||
state.serialize_field("content", &self.content)?; | |||
state.serialize_field("title", &self.meta.title)?; | |||
state.serialize_field("description", &self.meta.description)?; | |||
state.serialize_field("date", &self.meta.date)?; | |||
if let Some(d) = self.meta.datetime_tuple { | |||
state.serialize_field("year", &d.0)?; | |||
state.serialize_field("month", &d.1)?; | |||
state.serialize_field("day", &d.2)?; | |||
} else { | |||
state.serialize_field::<Option<usize>>("year", &None)?; | |||
state.serialize_field::<Option<usize>>("month", &None)?; | |||
state.serialize_field::<Option<usize>>("day", &None)?; | |||
} | |||
state.serialize_field("slug", &self.slug)?; | |||
state.serialize_field("path", &self.path)?; | |||
state.serialize_field("components", &self.components)?; | |||
state.serialize_field("permalink", &self.permalink)?; | |||
state.serialize_field("summary", &self.summary)?; | |||
state.serialize_field("taxonomies", &self.meta.taxonomies)?; | |||
state.serialize_field("extra", &self.meta.extra)?; | |||
state.serialize_field("word_count", &self.word_count)?; | |||
state.serialize_field("reading_time", &self.reading_time)?; | |||
state.serialize_field("earlier", &self.earlier)?; | |||
state.serialize_field("later", &self.later)?; | |||
state.serialize_field("lighter", &self.lighter)?; | |||
state.serialize_field("heavier", &self.heavier)?; | |||
state.serialize_field("toc", &self.toc)?; | |||
state.serialize_field("draft", &self.is_draft())?; | |||
let assets = self.serialize_assets(); | |||
state.serialize_field("assets", &assets)?; | |||
state.end() | |||
} | |||
} | |||
#[cfg(test)] | |||
mod tests { | |||
use std::collections::HashMap; |
@@ -1,9 +1,8 @@ | |||
use std::collections::HashMap; | |||
use std::path::{Path, PathBuf}; | |||
use std::result::Result as StdResult; | |||
use tera::{Tera, Context as TeraContext}; | |||
use serde::ser::{SerializeStruct, self}; | |||
use tera::{Tera, Context as TeraContext, Value}; | |||
use slotmap::{Key}; | |||
use config::Config; | |||
use front_matter::{SectionFrontMatter, split_section_content}; | |||
@@ -13,9 +12,77 @@ use utils::templates::render_template; | |||
use utils::site::get_reading_analytics; | |||
use rendering::{RenderContext, Header, render_content}; | |||
use page::Page; | |||
use file_info::FileInfo; | |||
use content::file_info::FileInfo; | |||
use content::SerializingPage; | |||
use library::Library; | |||
#[derive(Clone, Debug, PartialEq, Serialize)] | |||
pub struct SerializingSection<'a> { | |||
content: &'a str, | |||
permalink: &'a str, | |||
title: &'a Option<String>, | |||
description: &'a Option<String>, | |||
extra: &'a HashMap<String, Value>, | |||
path: &'a str, | |||
components: &'a [String], | |||
word_count: Option<usize>, | |||
reading_time: Option<usize>, | |||
toc: &'a [Header], | |||
assets: Vec<String>, | |||
pages: Vec<SerializingPage<'a>>, | |||
subsections: Vec<SerializingSection<'a>>, | |||
} | |||
impl<'a> SerializingSection<'a> { | |||
pub fn from_section(section: &'a Section, library: &'a Library) -> Self { | |||
let mut pages = Vec::with_capacity(section.pages.len()); | |||
let mut subsections = Vec::with_capacity(section.subsections.len()); | |||
for k in §ion.pages { | |||
pages.push(library.get_page_by_key(*k).to_serialized(library.pages())); | |||
} | |||
for k in §ion.subsections { | |||
subsections.push(library.get_section_by_key(*k).to_serialized(library)); | |||
} | |||
SerializingSection { | |||
content: §ion.content, | |||
permalink: §ion.permalink, | |||
title: §ion.meta.title, | |||
description: §ion.meta.description, | |||
extra: §ion.meta.extra, | |||
path: §ion.path, | |||
components: §ion.components, | |||
word_count: section.word_count, | |||
reading_time: section.reading_time, | |||
toc: §ion.toc, | |||
assets: section.serialize_assets(), | |||
pages, | |||
subsections, | |||
} | |||
} | |||
/// Same as from_section but doesn't fetch pages and sections | |||
pub fn from_section_basic(section: &'a Section) -> Self { | |||
SerializingSection { | |||
content: §ion.content, | |||
permalink: §ion.permalink, | |||
title: §ion.meta.title, | |||
description: §ion.meta.description, | |||
extra: §ion.meta.extra, | |||
path: §ion.path, | |||
components: §ion.components, | |||
word_count: section.word_count, | |||
reading_time: section.reading_time, | |||
toc: §ion.toc, | |||
assets: section.serialize_assets(), | |||
pages: vec![], | |||
subsections: vec![], | |||
} | |||
} | |||
} | |||
#[derive(Clone, Debug, PartialEq)] | |||
pub struct Section { | |||
@@ -36,11 +103,11 @@ pub struct Section { | |||
/// All the non-md files we found next to the .md file | |||
pub assets: Vec<PathBuf>, | |||
/// All direct pages of that section | |||
pub pages: Vec<Page>, | |||
pub pages: Vec<Key>, | |||
/// All pages that cannot be sorted in this section | |||
pub ignored_pages: Vec<Page>, | |||
pub ignored_pages: Vec<Key>, | |||
/// All direct subsections | |||
pub subsections: Vec<Section>, | |||
pub subsections: Vec<Key>, | |||
/// Toc made from the headers of the markdown file | |||
pub toc: Vec<Header>, | |||
/// How many words in the raw content | |||
@@ -133,7 +200,13 @@ impl Section { | |||
/// We need access to all pages url to render links relative to content | |||
/// so that can't happen at the same time as parsing | |||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, base_path: &Path) -> Result<()> { | |||
pub fn render_markdown( | |||
&mut self, | |||
permalinks: &HashMap<String, String>, | |||
tera: &Tera, | |||
config: &Config, | |||
base_path: &Path, | |||
) -> Result<()> { | |||
let mut context = RenderContext::new( | |||
tera, | |||
config, | |||
@@ -143,7 +216,7 @@ impl Section { | |||
self.meta.insert_anchor_links, | |||
); | |||
context.tera_context.insert("section", self); | |||
context.tera_context.insert("section", &SerializingSection::from_section_basic(self)); | |||
let res = render_content(&self.raw_content, &context) | |||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; | |||
@@ -153,14 +226,14 @@ impl Section { | |||
} | |||
/// Renders the page using the default layout, unless specified in front-matter | |||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> { | |||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> { | |||
let tpl_name = self.get_template_name(); | |||
let mut context = TeraContext::new(); | |||
context.insert("config", config); | |||
context.insert("section", self); | |||
context.insert("current_url", &self.permalink); | |||
context.insert("current_path", &self.path); | |||
context.insert("section", &self.to_serialized(library)); | |||
render_template(&tpl_name, tera, &context, &config.theme) | |||
.chain_err(|| format!("Failed to render section '{}'", self.file.path.display())) | |||
@@ -171,19 +244,6 @@ impl Section { | |||
self.file.components.is_empty() | |||
} | |||
/// Returns all the paths of the pages belonging to that section | |||
pub fn all_pages_path(&self) -> Vec<PathBuf> { | |||
let mut paths = vec![]; | |||
paths.extend(self.pages.iter().map(|p| p.file.path.clone())); | |||
paths.extend(self.ignored_pages.iter().map(|p| p.file.path.clone())); | |||
paths | |||
} | |||
/// Whether the page given belongs to that section | |||
pub fn is_child_page(&self, path: &PathBuf) -> bool { | |||
self.all_pages_path().contains(path) | |||
} | |||
/// Creates a vectors of asset URLs. | |||
fn serialize_assets(&self) -> Vec<String> { | |||
self.assets.iter() | |||
@@ -193,50 +253,8 @@ impl Section { | |||
.collect() | |||
} | |||
pub fn clone_without_pages(&self) -> Section { | |||
let mut subsections = vec![]; | |||
for subsection in &self.subsections { | |||
subsections.push(subsection.clone_without_pages()); | |||
} | |||
Section { | |||
file: self.file.clone(), | |||
meta: self.meta.clone(), | |||
path: self.path.clone(), | |||
components: self.components.clone(), | |||
permalink: self.permalink.clone(), | |||
raw_content: self.raw_content.clone(), | |||
content: self.content.clone(), | |||
assets: self.assets.clone(), | |||
toc: self.toc.clone(), | |||
subsections, | |||
pages: vec![], | |||
ignored_pages: vec![], | |||
word_count: self.word_count, | |||
reading_time: self.reading_time, | |||
} | |||
} | |||
} | |||
impl ser::Serialize for Section { | |||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer { | |||
let mut state = serializer.serialize_struct("section", 13)?; | |||
state.serialize_field("content", &self.content)?; | |||
state.serialize_field("permalink", &self.permalink)?; | |||
state.serialize_field("title", &self.meta.title)?; | |||
state.serialize_field("description", &self.meta.description)?; | |||
state.serialize_field("extra", &self.meta.extra)?; | |||
state.serialize_field("path", &self.path)?; | |||
state.serialize_field("components", &self.components)?; | |||
state.serialize_field("permalink", &self.permalink)?; | |||
state.serialize_field("pages", &self.pages)?; | |||
state.serialize_field("subsections", &self.subsections)?; | |||
state.serialize_field("word_count", &self.word_count)?; | |||
state.serialize_field("reading_time", &self.reading_time)?; | |||
state.serialize_field("toc", &self.toc)?; | |||
let assets = self.serialize_assets(); | |||
state.serialize_field("assets", &assets)?; | |||
state.end() | |||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { | |||
SerializingSection::from_section(self, library) | |||
} | |||
} | |||
@@ -0,0 +1,36 @@ | |||
extern crate tera; | |||
extern crate slug; | |||
extern crate serde; | |||
#[macro_use] | |||
extern crate serde_derive; | |||
extern crate chrono; | |||
extern crate slotmap; | |||
extern crate rayon; | |||
#[cfg(test)] | |||
extern crate tempfile; | |||
#[cfg(test)] | |||
extern crate toml; | |||
#[cfg(test)] | |||
extern crate globset; | |||
extern crate front_matter; | |||
extern crate config; | |||
extern crate utils; | |||
extern crate rendering; | |||
#[macro_use] | |||
extern crate errors; | |||
mod content; | |||
mod taxonomies; | |||
mod pagination; | |||
mod sorting; | |||
mod library; | |||
pub use slotmap::{Key, DenseSlotMap}; | |||
pub use sorting::sort_actual_pages_by_date; | |||
pub use content::{Page, SerializingPage, Section, SerializingSection}; | |||
pub use library::Library; | |||
pub use taxonomies::{Taxonomy, TaxonomyItem, find_taxonomies}; | |||
pub use pagination::Paginator; |
@@ -0,0 +1,269 @@ | |||
use std::collections::{HashMap, HashSet}; | |||
use std::path::{Path, PathBuf}; | |||
use slotmap::{DenseSlotMap, Key}; | |||
use front_matter::SortBy; | |||
use sorting::{find_siblings, sort_pages_by_weight, sort_pages_by_date}; | |||
use content::{Page, Section}; | |||
/// Houses everything about pages and sections | |||
/// Think of it as a database where each page and section has an id (Key here) | |||
/// that can be used to find the actual value | |||
/// Sections and pages can then refer to other elements by those keys, which are very cheap to | |||
/// copy. | |||
/// We can assume the keys are always existing as removing a page/section deletes all references | |||
/// to that key. | |||
#[derive(Debug)] | |||
pub struct Library { | |||
/// All the pages of the site | |||
pages: DenseSlotMap<Page>, | |||
/// All the sections of the site | |||
sections: DenseSlotMap<Section>, | |||
/// A mapping path -> key for pages so we can easily get their key | |||
paths_to_pages: HashMap<PathBuf, Key>, | |||
/// A mapping path -> key for sections so we can easily get their key | |||
paths_to_sections: HashMap<PathBuf, Key>, | |||
} | |||
impl Library { | |||
pub fn new(cap_pages: usize, cap_sections: usize) -> Self { | |||
Library { | |||
pages: DenseSlotMap::with_capacity(cap_pages), | |||
sections: DenseSlotMap::with_capacity(cap_sections), | |||
paths_to_pages: HashMap::with_capacity(cap_pages), | |||
paths_to_sections: HashMap::with_capacity(cap_sections), | |||
} | |||
} | |||
/// Add a section and return its Key | |||
pub fn insert_section(&mut self, section: Section) -> Key { | |||
let path = section.file.path.clone(); | |||
let key = self.sections.insert(section); | |||
self.paths_to_sections.insert(path, key); | |||
key | |||
} | |||
/// Add a page and return its Key | |||
pub fn insert_page(&mut self, page: Page) -> Key { | |||
let path = page.file.path.clone(); | |||
let key = self.pages.insert(page); | |||
self.paths_to_pages.insert(path, key); | |||
key | |||
} | |||
pub fn pages(&self) -> &DenseSlotMap<Page> { | |||
&self.pages | |||
} | |||
pub fn pages_mut(&mut self) -> &mut DenseSlotMap<Page> { | |||
&mut self.pages | |||
} | |||
pub fn pages_values(&self) -> Vec<&Page> { | |||
self.pages.values().collect::<Vec<_>>() | |||
} | |||
pub fn sections(&self) -> &DenseSlotMap<Section> { | |||
&self.sections | |||
} | |||
pub fn sections_mut(&mut self) -> &mut DenseSlotMap<Section> { | |||
&mut self.sections | |||
} | |||
pub fn sections_values(&self) -> Vec<&Section> { | |||
self.sections.values().collect::<Vec<_>>() | |||
} | |||
/// Find out the direct subsections of each subsection if there are some | |||
/// as well as the pages for each section | |||
pub fn populate_sections(&mut self) { | |||
let mut grandparent_paths: HashMap<PathBuf, Vec<PathBuf>> = HashMap::new(); | |||
for section in self.sections.values_mut() { | |||
if let Some(ref grand_parent) = section.file.grand_parent { | |||
grandparent_paths | |||
.entry(grand_parent.to_path_buf()) | |||
.or_insert_with(|| vec![]) | |||
.push(section.file.path.clone()); | |||
} | |||
// Make sure the pages of a section are empty since we can call that many times on `serve` | |||
section.pages = vec![]; | |||
section.ignored_pages = vec![]; | |||
} | |||
for (key, page) in &mut self.pages { | |||
let parent_section_path = page.file.parent.join("_index.md"); | |||
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) { | |||
self.sections.get_mut(*section_key).unwrap().pages.push(key); | |||
} | |||
} | |||
self.sort_sections_pages(); | |||
let sections = self.paths_to_sections.clone(); | |||
let mut sections_weight = HashMap::new(); | |||
for (key, section) in &self.sections { | |||
sections_weight.insert(key, section.meta.weight); | |||
} | |||
for section in self.sections.values_mut() { | |||
if let Some(paths) = grandparent_paths.get(§ion.file.parent) { | |||
section.subsections = paths | |||
.iter() | |||
.map(|p| sections[p]) | |||
.collect::<Vec<_>>(); | |||
section.subsections | |||
.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); | |||
} | |||
} | |||
} | |||
/// Sort all sections pages | |||
pub fn sort_sections_pages(&mut self) { | |||
let mut updates = HashMap::new(); | |||
for (key, section) in &self.sections { | |||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { | |||
SortBy::None => continue, | |||
SortBy::Date => { | |||
let data = section.pages | |||
.iter() | |||
.map(|k| { | |||
if let Some(page) = self.pages.get(*k) { | |||
(k, page.meta.datetime, page.permalink.as_ref()) | |||
} else { | |||
unreachable!("Sorting got an unknown page") | |||
} | |||
}) | |||
.collect(); | |||
sort_pages_by_date(data) | |||
}, | |||
SortBy::Weight => { | |||
let data = section.pages | |||
.iter() | |||
.map(|k| { | |||
if let Some(page) = self.pages.get(*k) { | |||
(k, page.meta.weight, page.permalink.as_ref()) | |||
} else { | |||
unreachable!("Sorting got an unknown page") | |||
} | |||
}) | |||
.collect(); | |||
sort_pages_by_weight(data) | |||
} | |||
}; | |||
updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by)); | |||
} | |||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates { | |||
// Find sibling between sorted pages first | |||
let with_siblings = find_siblings(sorted.iter().map(|k| { | |||
if let Some(page) = self.pages.get(*k) { | |||
(k, page.is_draft()) | |||
} else { | |||
unreachable!("Sorting got an unknown page") | |||
} | |||
}).collect()); | |||
for (k2, val1, val2) in with_siblings { | |||
if let Some(page) = self.pages.get_mut(k2) { | |||
match sort_by { | |||
SortBy::Date => { | |||
page.earlier = val2; | |||
page.later = val1; | |||
}, | |||
SortBy::Weight => { | |||
page.lighter = val1; | |||
page.heavier = val2; | |||
}, | |||
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None") | |||
} | |||
} else { | |||
unreachable!("Sorting got an unknown page") | |||
} | |||
} | |||
if let Some(s) = self.sections.get_mut(key) { | |||
s.pages = sorted; | |||
s.ignored_pages = cannot_be_sorted; | |||
} | |||
} | |||
} | |||
/// Find all the orphan pages: pages that are in a folder without an `_index.md` | |||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | |||
let pages_in_sections = self.sections | |||
.values() | |||
.flat_map(|s| &s.pages) | |||
.collect::<HashSet<_>>(); | |||
self.pages | |||
.iter() | |||
.filter(|(key, _)| !pages_in_sections.contains(&key)) | |||
.map(|(_, page)| page) | |||
.collect() | |||
} | |||
pub fn find_parent_section(&self, path: &Path) -> Option<&Section> { | |||
let page_key = self.paths_to_pages[path]; | |||
for s in self.sections.values() { | |||
if s.pages.contains(&page_key) { | |||
return Some(s) | |||
} | |||
} | |||
None | |||
} | |||
pub fn get_section(&self, path: &PathBuf) -> Option<&Section> { | |||
self.sections.get(self.paths_to_sections.get(path).cloned().unwrap_or_default()) | |||
} | |||
pub fn get_section_mut(&mut self, path: &PathBuf) -> Option<&mut Section> { | |||
self.sections.get_mut(self.paths_to_sections.get(path).cloned().unwrap_or_default()) | |||
} | |||
pub fn get_section_by_key(&self, key: Key) -> &Section { | |||
self.sections.get(key).unwrap() | |||
} | |||
pub fn get_page(&self, path: &PathBuf) -> Option<&Page> { | |||
self.pages.get(self.paths_to_pages.get(path).cloned().unwrap_or_default()) | |||
} | |||
pub fn get_page_by_key(&self, key: Key) -> &Page { | |||
self.pages.get(key).unwrap() | |||
} | |||
pub fn remove_section(&mut self, path: &PathBuf) -> Option<Section> { | |||
if let Some(k) = self.paths_to_sections.remove(path) { | |||
// TODO: delete section from parent subsection if there is one | |||
self.sections.remove(k) | |||
} else { | |||
None | |||
} | |||
} | |||
pub fn remove_page(&mut self, path: &PathBuf) -> Option<Page> { | |||
if let Some(k) = self.paths_to_pages.remove(path) { | |||
// TODO: delete page from all parent sections | |||
self.pages.remove(k) | |||
} else { | |||
None | |||
} | |||
} | |||
/// Used in rebuild, to check if we know it already | |||
pub fn contains_section(&self, path: &PathBuf) -> bool { | |||
self.paths_to_sections.contains_key(path) | |||
} | |||
/// Used in rebuild, to check if we know it already | |||
pub fn contains_page(&self, path: &PathBuf) -> bool { | |||
self.paths_to_pages.contains_key(path) | |||
} | |||
} |
@@ -1,25 +1,15 @@ | |||
#[macro_use] | |||
extern crate serde_derive; | |||
extern crate tera; | |||
extern crate errors; | |||
extern crate config; | |||
extern crate content; | |||
extern crate utils; | |||
extern crate taxonomies; | |||
#[cfg(test)] | |||
extern crate front_matter; | |||
use std::collections::HashMap; | |||
use tera::{Tera, Context, to_value, Value}; | |||
use slotmap::{Key}; | |||
use errors::{Result, ResultExt}; | |||
use config::Config; | |||
use content::{Page, Section}; | |||
use utils::templates::render_template; | |||
use taxonomies::{Taxonomy, TaxonomyItem}; | |||
use content::{Section, SerializingSection, SerializingPage}; | |||
use taxonomies::{TaxonomyItem, Taxonomy}; | |||
use library::Library; | |||
#[derive(Clone, Debug, PartialEq)] | |||
@@ -39,11 +29,11 @@ pub struct Pager<'a> { | |||
/// Path to that page | |||
path: String, | |||
/// All pages for the pager | |||
pages: Vec<&'a Page>, | |||
pages: Vec<SerializingPage<'a>>, | |||
} | |||
impl<'a> Pager<'a> { | |||
fn new(index: usize, pages: Vec<&'a Page>, permalink: String, path: String) -> Pager<'a> { | |||
fn new(index: usize, pages: Vec<SerializingPage<'a>>, permalink: String, path: String) -> Pager<'a> { | |||
Pager { | |||
index, | |||
permalink, | |||
@@ -55,8 +45,8 @@ impl<'a> Pager<'a> { | |||
#[derive(Clone, Debug, PartialEq)] | |||
pub struct Paginator<'a> { | |||
/// All pages in the section | |||
all_pages: &'a [Page], | |||
/// All pages in the section/taxonomy | |||
all_pages: &'a [Key], | |||
/// Pages split in chunks of `paginate_by` | |||
pub pagers: Vec<Pager<'a>>, | |||
/// How many content pages on a paginated page at max | |||
@@ -67,17 +57,18 @@ pub struct Paginator<'a> { | |||
pub permalink: String, | |||
path: String, | |||
pub paginate_path: String, | |||
/// Whether this is the index section, we need it for the template name | |||
is_index: bool, | |||
} | |||
impl<'a> Paginator<'a> { | |||
/// Create a new paginator from a section | |||
/// It will always at least create one pager (the first) even if there are no pages to paginate | |||
pub fn from_section(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> { | |||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate | |||
pub fn from_section(section: &'a Section, library: &'a Library) -> Paginator<'a> { | |||
let paginate_by = section.meta.paginate_by.unwrap(); | |||
let mut paginator = Paginator { | |||
all_pages, | |||
pagers: vec![], | |||
all_pages: §ion.pages, | |||
pagers: Vec::with_capacity(section.pages.len() / paginate_by), | |||
paginate_by, | |||
root: PaginationRoot::Section(section), | |||
permalink: section.permalink.clone(), | |||
@@ -86,17 +77,17 @@ impl<'a> Paginator<'a> { | |||
is_index: section.is_index(), | |||
}; | |||
paginator.fill_pagers(); | |||
paginator.fill_pagers(library); | |||
paginator | |||
} | |||
/// Create a new paginator from a taxonomy | |||
/// It will always at least create one pager (the first) even if there are no pages to paginate | |||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem) -> Paginator<'a> { | |||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate | |||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem, library: &'a Library) -> Paginator<'a> { | |||
let paginate_by = taxonomy.kind.paginate_by.unwrap(); | |||
let mut paginator = Paginator { | |||
all_pages: &item.pages, | |||
pagers: vec![], | |||
pagers: Vec::with_capacity(item.pages.len() / paginate_by), | |||
paginate_by, | |||
root: PaginationRoot::Taxonomy(taxonomy), | |||
permalink: item.permalink.clone(), | |||
@@ -105,31 +96,35 @@ impl<'a> Paginator<'a> { | |||
is_index: false, | |||
}; | |||
paginator.fill_pagers(); | |||
paginator.fill_pagers(library); | |||
paginator | |||
} | |||
fn fill_pagers(&mut self) { | |||
fn fill_pagers(&mut self, library: &'a Library) { | |||
// the list of pagers | |||
let mut pages = vec![]; | |||
// the pages in the current pagers | |||
let mut current_page = vec![]; | |||
for page in self.all_pages { | |||
current_page.push(page); | |||
for key in self.all_pages { | |||
let page = library.get_page_by_key(*key); | |||
current_page.push(page.to_serialized_basic()); | |||
if current_page.len() == self.paginate_by { | |||
pages.push(current_page); | |||
current_page = vec![]; | |||
} | |||
} | |||
if !current_page.is_empty() { | |||
pages.push(current_page); | |||
} | |||
let mut pagers = vec![]; | |||
for (index, page) in pages.iter().enumerate() { | |||
for (index, page) in pages.into_iter().enumerate() { | |||
// First page has no pagination path | |||
if index == 0 { | |||
pagers.push(Pager::new(1, page.clone(), self.permalink.clone(), self.path.clone())); | |||
pagers.push(Pager::new(1, page, self.permalink.clone(), self.path.clone())); | |||
continue; | |||
} | |||
@@ -146,7 +141,7 @@ impl<'a> Paginator<'a> { | |||
pagers.push(Pager::new( | |||
index + 1, | |||
page.clone(), | |||
page, | |||
permalink, | |||
pager_path, | |||
)); | |||
@@ -198,7 +193,7 @@ impl<'a> Paginator<'a> { | |||
context.insert("config", &config); | |||
let template_name = match self.root { | |||
PaginationRoot::Section(s) => { | |||
context.insert("section", &s.clone_without_pages()); | |||
context.insert("section", &SerializingSection::from_section_basic(s)); | |||
s.get_template_name() | |||
} | |||
PaginationRoot::Taxonomy(t) => { | |||
@@ -223,6 +218,7 @@ mod tests { | |||
use content::{Page, Section}; | |||
use config::Taxonomy as TaxonomyConfig; | |||
use taxonomies::{Taxonomy, TaxonomyItem}; | |||
use library::Library; | |||
use super::Paginator; | |||
@@ -241,15 +237,22 @@ mod tests { | |||
s | |||
} | |||
fn create_library(is_index: bool) -> (Section, Library) { | |||
let mut library = Library::new(3, 0); | |||
library.insert_page(Page::default()); | |||
library.insert_page(Page::default()); | |||
library.insert_page(Page::default()); | |||
let mut section = create_section(is_index); | |||
section.pages = library.pages().keys().collect(); | |||
library.insert_section(section.clone()); | |||
(section, library) | |||
} | |||
#[test] | |||
fn test_can_create_paginator() { | |||
let pages = vec![ | |||
Page::default(), | |||
Page::default(), | |||
Page::default(), | |||
]; | |||
let section = create_section(false); | |||
let paginator = Paginator::from_section(pages.as_slice(), §ion); | |||
let (section, library) = create_library(false); | |||
let paginator = Paginator::from_section(§ion, &library); | |||
assert_eq!(paginator.pagers.len(), 2); | |||
assert_eq!(paginator.pagers[0].index, 1); | |||
@@ -265,13 +268,8 @@ mod tests { | |||
#[test] | |||
fn test_can_create_paginator_for_index() { | |||
let pages = vec![ | |||
Page::default(), | |||
Page::default(), | |||
Page::default(), | |||
]; | |||
let section = create_section(true); | |||
let paginator = Paginator::from_section(pages.as_slice(), §ion); | |||
let (section, library) = create_library(true); | |||
let paginator = Paginator::from_section(§ion, &library); | |||
assert_eq!(paginator.pagers.len(), 2); | |||
assert_eq!(paginator.pagers[0].index, 1); | |||
@@ -287,13 +285,8 @@ mod tests { | |||
#[test] | |||
fn test_can_build_paginator_context() { | |||
let pages = vec![ | |||
Page::default(), | |||
Page::default(), | |||
Page::default(), | |||
]; | |||
let section = create_section(false); | |||
let paginator = Paginator::from_section(pages.as_slice(), §ion); | |||
let (section, library) = create_library(false); | |||
let paginator = Paginator::from_section(§ion, &library); | |||
assert_eq!(paginator.pagers.len(), 2); | |||
let context = paginator.build_paginator_context(&paginator.pagers[0]); | |||
@@ -315,11 +308,7 @@ mod tests { | |||
#[test] | |||
fn test_can_create_paginator_for_taxonomy() { | |||
let pages = vec![ | |||
Page::default(), | |||
Page::default(), | |||
Page::default(), | |||
]; | |||
let (_, library) = create_library(false); | |||
let taxonomy_def = TaxonomyConfig { | |||
name: "tags".to_string(), | |||
paginate_by: Some(2), | |||
@@ -329,10 +318,10 @@ mod tests { | |||
name: "Something".to_string(), | |||
slug: "something".to_string(), | |||
permalink: "https://vincent.is/tags/something/".to_string(), | |||
pages, | |||
pages: library.pages().keys().collect(), | |||
}; | |||
let taxonomy = Taxonomy { kind: taxonomy_def, items: vec![taxonomy_item.clone()] }; | |||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item); | |||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library); | |||
assert_eq!(paginator.pagers.len(), 2); | |||
assert_eq!(paginator.pagers[0].index, 1); |
@@ -0,0 +1,231 @@ | |||
use std::cmp::Ordering; | |||
use rayon::prelude::*; | |||
use slotmap::Key; | |||
use chrono::NaiveDateTime; | |||
use content::Page; | |||
/// Used by the RSS feed | |||
/// There to not have to import sorting stuff in the site crate | |||
pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { | |||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); | |||
if ord == Ordering::Equal { | |||
a.permalink.cmp(&b.permalink) | |||
} else { | |||
ord | |||
} | |||
} | |||
/// Takes a list of (page key, date, permalink) and sort them by dates if possible | |||
/// Pages without date will be put in the unsortable bucket | |||
/// The permalink is used to break ties | |||
pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (Vec<Key>, Vec<Key>) { | |||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages | |||
.into_par_iter() | |||
.partition(|page| page.1.is_some()); | |||
can_be_sorted | |||
.par_sort_unstable_by(|a, b| { | |||
let ord = b.1.unwrap().cmp(&a.1.unwrap()); | |||
if ord == Ordering::Equal { | |||
a.2.cmp(&b.2) | |||
} else { | |||
ord | |||
} | |||
}); | |||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) | |||
} | |||
/// Takes a list of (page key, weight, permalink) and sort them by weight if possible | |||
/// Pages without weight will be put in the unsortable bucket | |||
/// The permalink is used to break ties | |||
pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key>, Vec<Key>) { | |||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages | |||
.into_par_iter() | |||
.partition(|page| page.1.is_some()); | |||
can_be_sorted | |||
.par_sort_unstable_by(|a, b| { | |||
let ord = a.1.unwrap().cmp(&b.1.unwrap()); | |||
if ord == Ordering::Equal { | |||
a.2.cmp(&b.2) | |||
} else { | |||
ord | |||
} | |||
}); | |||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) | |||
} | |||
/// Find the lighter/heavier and earlier/later pages for all pages having a date/weight | |||
/// and that are not drafts. | |||
pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option<Key>)> { | |||
let mut res = Vec::with_capacity(sorted.len()); | |||
let length = sorted.len(); | |||
for (i, (key, is_draft)) in sorted.iter().enumerate() { | |||
if *is_draft { | |||
res.push((**key, None, None)); | |||
continue; | |||
} | |||
let mut with_siblings = (**key, None, None); | |||
if i > 0 { | |||
let mut j = i; | |||
loop { | |||
if j == 0 { | |||
break; | |||
} | |||
j -= 1; | |||
if sorted[j].1 { | |||
continue; | |||
} | |||
// lighter / later | |||
with_siblings.1 = Some(*sorted[j].0); | |||
break; | |||
} | |||
} | |||
if i < length - 1 { | |||
let mut j = i; | |||
loop { | |||
if j == length - 1 { | |||
break; | |||
} | |||
j += 1; | |||
if sorted[j].1 { | |||
continue; | |||
} | |||
// heavier/earlier | |||
with_siblings.2 = Some(*sorted[j].0); | |||
break; | |||
} | |||
} | |||
res.push(with_siblings); | |||
} | |||
res | |||
} | |||
#[cfg(test)] | |||
mod tests { | |||
use slotmap::DenseSlotMap; | |||
use front_matter::{PageFrontMatter}; | |||
use content::Page; | |||
use super::{sort_pages_by_date, sort_pages_by_weight, find_siblings}; | |||
fn create_page_with_date(date: &str) -> Page { | |||
let mut front_matter = PageFrontMatter::default(); | |||
front_matter.date = Some(date.to_string()); | |||
front_matter.date_to_datetime(); | |||
Page::new("content/hello.md", front_matter) | |||
} | |||
fn create_page_with_weight(weight: usize) -> Page { | |||
let mut front_matter = PageFrontMatter::default(); | |||
front_matter.weight = Some(weight); | |||
Page::new("content/hello.md", front_matter) | |||
} | |||
#[test] | |||
fn can_sort_by_dates() { | |||
let mut dense = DenseSlotMap::new(); | |||
let page1 = create_page_with_date("2018-01-01"); | |||
let key1 = dense.insert(page1.clone()); | |||
let page2 = create_page_with_date("2017-01-01"); | |||
let key2 = dense.insert(page2.clone()); | |||
let page3 = create_page_with_date("2019-01-01"); | |||
let key3 = dense.insert(page3.clone()); | |||
let input = vec![ | |||
(&key1, page1.meta.datetime, page1.permalink.as_ref()), | |||
(&key2, page2.meta.datetime, page2.permalink.as_ref()), | |||
(&key3, page3.meta.datetime, page3.permalink.as_ref()), | |||
]; | |||
let (pages, _) = sort_pages_by_date(input); | |||
// Should be sorted by date | |||
assert_eq!(pages[0], key3); | |||
assert_eq!(pages[1], key1); | |||
assert_eq!(pages[2], key2); | |||
} | |||
#[test] | |||
fn can_sort_by_weight() { | |||
let mut dense = DenseSlotMap::new(); | |||
let page1 = create_page_with_weight(2); | |||
let key1 = dense.insert(page1.clone()); | |||
let page2 = create_page_with_weight(3); | |||
let key2 = dense.insert(page2.clone()); | |||
let page3 = create_page_with_weight(1); | |||
let key3 = dense.insert(page3.clone()); | |||
let input = vec![ | |||
(&key1, page1.meta.weight, page1.permalink.as_ref()), | |||
(&key2, page2.meta.weight, page2.permalink.as_ref()), | |||
(&key3, page3.meta.weight, page3.permalink.as_ref()), | |||
]; | |||
let (pages, _) = sort_pages_by_weight(input); | |||
// Should be sorted by weight | |||
assert_eq!(pages[0], key3); | |||
assert_eq!(pages[1], key1); | |||
assert_eq!(pages[2], key2); | |||
} | |||
#[test] | |||
fn ignore_page_with_missing_field() { | |||
let mut dense = DenseSlotMap::new(); | |||
let page1 = create_page_with_weight(2); | |||
let key1 = dense.insert(page1.clone()); | |||
let page2 = create_page_with_weight(3); | |||
let key2 = dense.insert(page2.clone()); | |||
let page3 = create_page_with_date("2019-01-01"); | |||
let key3 = dense.insert(page3.clone()); | |||
let input = vec![ | |||
(&key1, page1.meta.weight, page1.permalink.as_ref()), | |||
(&key2, page2.meta.weight, page2.permalink.as_ref()), | |||
(&key3, page3.meta.weight, page3.permalink.as_ref()), | |||
]; | |||
let (pages,unsorted) = sort_pages_by_weight(input); | |||
assert_eq!(pages.len(), 2); | |||
assert_eq!(unsorted.len(), 1); | |||
} | |||
#[test] | |||
fn can_find_siblings() { | |||
let mut dense = DenseSlotMap::new(); | |||
let page1 = create_page_with_weight(1); | |||
let key1 = dense.insert(page1.clone()); | |||
let page2 = create_page_with_weight(2); | |||
let key2 = dense.insert(page2.clone()); | |||
let page3 = create_page_with_weight(3); | |||
let key3 = dense.insert(page3.clone()); | |||
let input = vec![ | |||
(&key1, page1.is_draft()), | |||
(&key2, page2.is_draft()), | |||
(&key3, page3.is_draft()), | |||
]; | |||
let pages = find_siblings(input); | |||
assert_eq!(pages[0].1, None); | |||
assert_eq!(pages[0].2, Some(key2)); | |||
assert_eq!(pages[1].1, Some(key1)); | |||
assert_eq!(pages[1].2, Some(key3)); | |||
assert_eq!(pages[2].1, Some(key2)); | |||
assert_eq!(pages[2].2, None); | |||
} | |||
} |
@@ -1,46 +1,71 @@ | |||
#[macro_use] | |||
extern crate serde_derive; | |||
extern crate tera; | |||
extern crate slug; | |||
#[macro_use] | |||
extern crate errors; | |||
extern crate config; | |||
extern crate content; | |||
extern crate front_matter; | |||
extern crate utils; | |||
use std::collections::HashMap; | |||
use slug::slugify; | |||
use tera::{Context, Tera}; | |||
use slotmap::{Key}; | |||
use config::{Config, Taxonomy as TaxonomyConfig}; | |||
use errors::{Result, ResultExt}; | |||
use content::{Page, sort_pages}; | |||
use front_matter::SortBy; | |||
use utils::templates::render_template; | |||
use content::SerializingPage; | |||
use sorting::sort_pages_by_date; | |||
use library::Library; | |||
#[derive(Debug, Clone, PartialEq, Serialize)] | |||
struct SerializedTaxonomyItem<'a> { | |||
name: &'a str, | |||
slug: &'a str, | |||
permalink: &'a str, | |||
pages: Vec<SerializingPage<'a>>, | |||
} | |||
impl<'a> SerializedTaxonomyItem<'a> { | |||
pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self { | |||
let mut pages = vec![]; | |||
for key in &item.pages { | |||
let page = library.get_page_by_key(*key); | |||
pages.push(page.to_serialized_basic()); | |||
} | |||
/// A tag or category | |||
#[derive(Debug, Clone, Serialize, PartialEq)] | |||
SerializedTaxonomyItem { | |||
name: &item.name, | |||
slug: &item.slug, | |||
permalink: &item.permalink, | |||
pages, | |||
} | |||
} | |||
} | |||
/// A taxonomy with all its pages | |||
#[derive(Debug, Clone, PartialEq)] | |||
pub struct TaxonomyItem { | |||
pub name: String, | |||
pub slug: String, | |||
pub permalink: String, | |||
pub pages: Vec<Page>, | |||
pub pages: Vec<Key>, | |||
} | |||
impl TaxonomyItem { | |||
pub fn new(name: &str, path: &str, config: &Config, pages: Vec<Page>) -> TaxonomyItem { | |||
pub fn new(name: &str, path: &str, config: &Config, keys: Vec<Key>, library: &Library) -> Self { | |||
// Taxonomy are almost always used for blogs so we filter by dates | |||
// and it's not like we can sort things across sections by anything other | |||
// than dates | |||
let (mut pages, ignored_pages) = sort_pages(pages, SortBy::Date); | |||
let data = keys | |||
.iter() | |||
.map(|k| { | |||
if let Some(page) = library.pages().get(*k) { | |||
(k, page.meta.datetime, page.permalink.as_ref()) | |||
} else { | |||
unreachable!("Sorting got an unknown page") | |||
} | |||
}) | |||
.collect(); | |||
let (mut pages, ignored_pages) = sort_pages_by_date(data); | |||
let slug = slugify(name); | |||
let permalink = { | |||
config.make_permalink(&format!("/{}/{}", path, slug)) | |||
}; | |||
let permalink = config.make_permalink(&format!("/{}/{}", path, slug)); | |||
// We still append pages without dates at the end | |||
pages.extend(ignored_pages); | |||
@@ -54,8 +79,24 @@ impl TaxonomyItem { | |||
} | |||
} | |||
/// All the tags or categories | |||
#[derive(Debug, Clone, PartialEq, Serialize)] | |||
pub struct SerializedTaxonomy<'a> { | |||
kind: &'a TaxonomyConfig, | |||
items: Vec<SerializedTaxonomyItem<'a>>, | |||
} | |||
impl<'a> SerializedTaxonomy<'a> { | |||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { | |||
let items: Vec<SerializedTaxonomyItem> = taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); | |||
SerializedTaxonomy { | |||
kind: &taxonomy.kind, | |||
items, | |||
} | |||
} | |||
} | |||
/// All different taxonomies we have and their content | |||
#[derive(Debug, Clone, PartialEq)] | |||
pub struct Taxonomy { | |||
pub kind: TaxonomyConfig, | |||
// this vec is sorted by the count of item | |||
@@ -63,11 +104,11 @@ pub struct Taxonomy { | |||
} | |||
impl Taxonomy { | |||
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Page>>) -> Taxonomy { | |||
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Key>>, library: &Library) -> Taxonomy { | |||
let mut sorted_items = vec![]; | |||
for (name, pages) in items { | |||
sorted_items.push( | |||
TaxonomyItem::new(&name, &kind.name, config, pages) | |||
TaxonomyItem::new(&name, &kind.name, config, pages, library) | |||
); | |||
} | |||
sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); | |||
@@ -86,10 +127,10 @@ impl Taxonomy { | |||
self.len() == 0 | |||
} | |||
pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config) -> Result<String> { | |||
pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config, library: &Library) -> Result<String> { | |||
let mut context = Context::new(); | |||
context.insert("config", config); | |||
context.insert("term", item); | |||
context.insert("term", &SerializedTaxonomyItem::from_item(item, library)); | |||
context.insert("taxonomy", &self.kind); | |||
context.insert("current_url", &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug))); | |||
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug)); | |||
@@ -98,10 +139,11 @@ impl Taxonomy { | |||
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name)) | |||
} | |||
pub fn render_all_terms(&self, tera: &Tera, config: &Config) -> Result<String> { | |||
pub fn render_all_terms(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> { | |||
let mut context = Context::new(); | |||
context.insert("config", config); | |||
context.insert("terms", &self.items); | |||
let terms: Vec<SerializedTaxonomyItem> = self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); | |||
context.insert("terms", &terms); | |||
context.insert("taxonomy", &self.kind); | |||
context.insert("current_url", &config.make_permalink(&self.kind.name)); | |||
context.insert("current_path", &self.kind.name); | |||
@@ -109,9 +151,13 @@ impl Taxonomy { | |||
render_template(&format!("{}/list.html", self.kind.name), tera, &context, &config.theme) | |||
.chain_err(|| format!("Failed to render a list of {} page.", self.kind.name)) | |||
} | |||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { | |||
SerializedTaxonomy::from_taxonomy(self, library) | |||
} | |||
} | |||
pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Taxonomy>> { | |||
pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonomy>> { | |||
let taxonomies_def = { | |||
let mut m = HashMap::new(); | |||
for t in &config.taxonomies { | |||
@@ -121,8 +167,12 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax | |||
}; | |||
let mut all_taxonomies = HashMap::new(); | |||
// Find all the taxonomies first | |||
for page in all_pages { | |||
for (key, page) in library.pages() { | |||
// Draft are not part of taxonomies | |||
if page.is_draft() { | |||
continue; | |||
} | |||
for (name, val) in &page.meta.taxonomies { | |||
if taxonomies_def.contains_key(name) { | |||
all_taxonomies | |||
@@ -134,7 +184,7 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax | |||
.unwrap() | |||
.entry(v.to_string()) | |||
.or_insert_with(|| vec![]) | |||
.push(page.clone()); | |||
.push(key); | |||
} | |||
} else { | |||
bail!("Page `{}` has taxonomy `{}` which is not defined in config.toml", page.file.path.display(), name); | |||
@@ -145,7 +195,7 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax | |||
let mut taxonomies = vec![]; | |||
for (name, taxo) in all_taxonomies { | |||
taxonomies.push(Taxonomy::new(taxonomies_def[name].clone(), config, taxo)); | |||
taxonomies.push(Taxonomy::new(taxonomies_def[name].clone(), config, taxo, library)); | |||
} | |||
Ok(taxonomies) | |||
@@ -157,35 +207,43 @@ mod tests { | |||
use super::*; | |||
use std::collections::HashMap; | |||
use config::{Config, Taxonomy}; | |||
use config::{Config, Taxonomy as TaxonomyConfig}; | |||
use content::Page; | |||
use library::Library; | |||
#[test] | |||
fn can_make_taxonomies() { | |||
let mut config = Config::default(); | |||
let mut library = Library::new(2, 0); | |||
config.taxonomies = vec![ | |||
Taxonomy { name: "categories".to_string(), ..Taxonomy::default() }, | |||
Taxonomy { name: "tags".to_string(), ..Taxonomy::default() }, | |||
Taxonomy { name: "authors".to_string(), ..Taxonomy::default() }, | |||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, | |||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, | |||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, | |||
]; | |||
let mut page1 = Page::default(); | |||
let mut taxo_page1 = HashMap::new(); | |||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); | |||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); | |||
page1.meta.taxonomies = taxo_page1; | |||
library.insert_page(page1); | |||
let mut page2 = Page::default(); | |||
let mut taxo_page2 = HashMap::new(); | |||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); | |||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); | |||
page2.meta.taxonomies = taxo_page2; | |||
library.insert_page(page2); | |||
let mut page3 = Page::default(); | |||
let mut taxo_page3 = HashMap::new(); | |||
taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); | |||
taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); | |||
page3.meta.taxonomies = taxo_page3; | |||
let pages = vec![&page1, &page2, &page3]; | |||
library.insert_page(page3); | |||
let taxonomies = find_taxonomies(&config, pages).unwrap(); | |||
let taxonomies = find_taxonomies(&config, &library).unwrap(); | |||
let (tags, categories, authors) = { | |||
let mut t = None; | |||
let mut c = None; | |||
@@ -233,15 +291,18 @@ mod tests { | |||
#[test] | |||
fn errors_on_unknown_taxonomy() { | |||
let mut config = Config::default(); | |||
let mut library = Library::new(2, 0); | |||
config.taxonomies = vec![ | |||
Taxonomy { name: "authors".to_string(), ..Taxonomy::default() }, | |||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, | |||
]; | |||
let mut page1 = Page::default(); | |||
let mut taxo_page1 = HashMap::new(); | |||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); | |||
page1.meta.taxonomies = taxo_page1; | |||
library.insert_page(page1); | |||
let taxonomies = find_taxonomies(&config, vec![&page1]); | |||
let taxonomies = find_taxonomies(&config, &library); | |||
assert!(taxonomies.is_err()); | |||
let err = taxonomies.unwrap_err(); | |||
// no path as this is created by Default |
@@ -1,18 +0,0 @@ | |||
[package] | |||
name = "pagination" | |||
version = "0.1.0" | |||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||
[dependencies] | |||
tera = "0.11" | |||
serde = "1" | |||
serde_derive = "1" | |||
errors = { path = "../errors" } | |||
config = { path = "../config" } | |||
content = { path = "../content" } | |||
utils = { path = "../utils" } | |||
taxonomies = { path = "../taxonomies" } | |||
[dev-dependencies] | |||
front_matter = { path = "../front_matter" } |
@@ -7,7 +7,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||
errors = { path = "../errors" } | |||
front_matter = { path = "../front_matter" } | |||
highlighting = { path = "../highlighting" } | |||
content = { path = "../content" } | |||
library = { path = "../library" } | |||
site = { path = "../site" } | |||
[dev-dependencies] | |||
@@ -1,29 +1,17 @@ | |||
extern crate site; | |||
#[macro_use] | |||
extern crate errors; | |||
extern crate content; | |||
extern crate library; | |||
extern crate front_matter; | |||
use std::path::{Path, Component}; | |||
use errors::Result; | |||
use site::Site; | |||
use content::{Page, Section}; | |||
use library::{Page, Section}; | |||
use front_matter::{PageFrontMatter, SectionFrontMatter}; | |||
/// Finds the section that contains the page given if there is one | |||
pub fn find_parent_section<'a>(site: &'a Site, page: &Page) -> Option<&'a Section> { | |||
for section in site.sections.values() { | |||
if section.is_child_page(&page.file.path) { | |||
return Some(section); | |||
} | |||
} | |||
None | |||
} | |||
#[derive(Debug, Clone, Copy, PartialEq)] | |||
pub enum PageChangesNeeded { | |||
/// Editing `taxonomies` | |||
@@ -98,30 +86,25 @@ fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMa | |||
/// Handles a path deletion: could be a page, a section, a folder | |||
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> { | |||
// Ignore the event if this path was not known | |||
if !site.sections.contains_key(path) && !site.pages.contains_key(path) { | |||
if !site.library.contains_section(&path.to_path_buf()) && !site.library.contains_page(&path.to_path_buf()) { | |||
return Ok(()); | |||
} | |||
if is_section { | |||
if let Some(s) = site.pages.remove(path) { | |||
if let Some(s) = site.library.remove_section(&path.to_path_buf()) { | |||
site.permalinks.remove(&s.file.relative); | |||
site.populate_sections(); | |||
} | |||
} else if let Some(p) = site.pages.remove(path) { | |||
} else if let Some(p) = site.library.remove_page(&path.to_path_buf()) { | |||
site.permalinks.remove(&p.file.relative); | |||
if !p.meta.taxonomies.is_empty() { | |||
site.populate_taxonomies()?; | |||
} | |||
// if there is a parent section, we will need to re-render it | |||
// most likely | |||
if find_parent_section(site, &p).is_some() { | |||
site.populate_sections(); | |||
} | |||
} | |||
site.populate_sections(); | |||
// Ensure we have our fn updated so it doesn't contain the permalink(s)/section/page deleted | |||
site.register_early_global_fns(); | |||
site.register_tera_global_fns(); | |||
// Deletion is something that doesn't happen all the time so we | |||
// don't need to optimise it too much | |||
@@ -131,30 +114,33 @@ fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> | |||
/// Handles a `_index.md` (a section) being edited in some ways | |||
fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { | |||
let section = Section::from_file(path, &site.config)?; | |||
let pathbuf = path.to_path_buf(); | |||
match site.add_section(section, true)? { | |||
// Updating a section | |||
Some(prev) => { | |||
// Copy the section data so we don't end up with an almost empty object | |||
site.sections.get_mut(path).unwrap().pages = prev.pages; | |||
site.sections.get_mut(path).unwrap().ignored_pages = prev.ignored_pages; | |||
site.sections.get_mut(path).unwrap().subsections = prev.subsections; | |||
{ | |||
let s = site.library.get_section_mut(&pathbuf).unwrap(); | |||
s.pages = prev.pages; | |||
s.ignored_pages = prev.ignored_pages; | |||
s.subsections = prev.subsections; | |||
} | |||
if site.sections[path].meta == prev.meta { | |||
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta { | |||
// Front matter didn't change, only content did | |||
// so we render only the section page, not its pages | |||
return site.render_section(&site.sections[path], false); | |||
return site.render_section(&site.library.get_section(&pathbuf).unwrap(), false); | |||
} | |||
// Front matter changed | |||
for changes in find_section_front_matter_changes(&site.sections[path].meta, &prev.meta) { | |||
for changes in find_section_front_matter_changes(&site.library.get_section(&pathbuf).unwrap().meta, &prev.meta) { | |||
// Sort always comes first if present so the rendering will be fine | |||
match changes { | |||
SectionChangesNeeded::Sort => { | |||
site.sort_sections_pages(Some(path)); | |||
site.register_tera_global_fns(); | |||
} | |||
SectionChangesNeeded::Render => site.render_section(&site.sections[path], false)?, | |||
SectionChangesNeeded::RenderWithPages => site.render_section(&site.sections[path], true)?, | |||
SectionChangesNeeded::Render => site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?, | |||
SectionChangesNeeded::RenderWithPages => site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?, | |||
// not a common enough operation to make it worth optimizing | |||
SectionChangesNeeded::Delete => { | |||
site.populate_sections(); | |||
@@ -168,14 +154,14 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { | |||
None => { | |||
site.populate_sections(); | |||
site.register_tera_global_fns(); | |||
site.render_section(&site.sections[path], true) | |||
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true) | |||
} | |||
} | |||
} | |||
macro_rules! render_parent_section { | |||
($site: expr, $path: expr) => { | |||
if let Some(s) = find_parent_section($site, &$site.pages[$path]) { | |||
if let Some(s) = $site.library.find_parent_section($path) { | |||
$site.render_section(s, false)?; | |||
}; | |||
} | |||
@@ -184,54 +170,39 @@ macro_rules! render_parent_section { | |||
/// Handles a page being edited in some ways | |||
fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { | |||
let page = Page::from_file(path, &site.config)?; | |||
let pathbuf = path.to_path_buf(); | |||
match site.add_page(page, true)? { | |||
// Updating a page | |||
Some(prev) => { | |||
site.populate_sections(); | |||
// Front matter didn't change, only content did | |||
if site.pages[path].meta == prev.meta { | |||
if site.library.get_page(&pathbuf).unwrap().meta == prev.meta { | |||
// Other than the page itself, the summary might be seen | |||
// on a paginated list for a blog for example | |||
if site.pages[path].summary.is_some() { | |||
if site.library.get_page(&pathbuf).unwrap().summary.is_some() { | |||
render_parent_section!(site, path); | |||
} | |||
// TODO: register_tera_global_fns is expensive as it involves lots of cloning | |||
// I can't think of a valid usecase where you would need the content | |||
// of a page through a global fn so it's commented out for now | |||
// site.register_tera_global_fns(); | |||
return site.render_page(&site.pages[path]); | |||
site.register_tera_global_fns(); | |||
return site.render_page(&site.library.get_page(&pathbuf).unwrap()); | |||
} | |||
// Front matter changed | |||
let mut sections_populated = false; | |||
for changes in find_page_front_matter_changes(&site.pages[path].meta, &prev.meta) { | |||
for changes in find_page_front_matter_changes(&site.library.get_page(&pathbuf).unwrap().meta, &prev.meta) { | |||
site.register_tera_global_fns(); | |||
// Sort always comes first if present so the rendering will be fine | |||
match changes { | |||
PageChangesNeeded::Taxonomies => { | |||
site.populate_taxonomies()?; | |||
site.register_tera_global_fns(); | |||
site.render_taxonomies()?; | |||
} | |||
PageChangesNeeded::Sort => { | |||
let section_path = match find_parent_section(site, &site.pages[path]) { | |||
Some(s) => s.file.path.clone(), | |||
None => continue // Do nothing if it's an orphan page | |||
}; | |||
if !sections_populated { | |||
site.populate_sections(); | |||
sections_populated = true; | |||
} | |||
site.sort_sections_pages(Some(§ion_path)); | |||
site.register_tera_global_fns(); | |||
site.render_index()?; | |||
} | |||
PageChangesNeeded::Render => { | |||
if !sections_populated { | |||
site.populate_sections(); | |||
sections_populated = true; | |||
} | |||
site.register_tera_global_fns(); | |||
render_parent_section!(site, path); | |||
site.render_page(&site.pages[path])?; | |||
site.render_page(&site.library.get_page(&path.to_path_buf()).unwrap())?; | |||
} | |||
}; | |||
} | |||
@@ -241,6 +212,7 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { | |||
None => { | |||
site.populate_sections(); | |||
site.populate_taxonomies()?; | |||
site.register_early_global_fns(); | |||
site.register_tera_global_fns(); | |||
// No need to optimise that yet, we can revisit if it becomes an issue | |||
site.build() | |||
@@ -302,7 +274,7 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> { | |||
match filename { | |||
"sitemap.xml" => site.render_sitemap(), | |||
"rss.xml" => site.render_rss_feed(site.pages.values().collect(), None), | |||
"rss.xml" => site.render_rss_feed(site.library.pages_values(), None), | |||
"robots.txt" => site.render_robots(), | |||
"single.html" | "list.html" => site.render_taxonomies(), | |||
"page.html" => { | |||
@@ -9,4 +9,4 @@ ammonia = "1" | |||
lazy_static = "1" | |||
errors = { path = "../errors" } | |||
content = { path = "../content" } | |||
library = { path = "../library" } |
@@ -2,16 +2,16 @@ extern crate elasticlunr; | |||
#[macro_use] | |||
extern crate lazy_static; | |||
extern crate ammonia; | |||
#[macro_use] | |||
extern crate errors; | |||
extern crate content; | |||
extern crate library; | |||
use std::collections::{HashMap, HashSet}; | |||
use std::path::PathBuf; | |||
use elasticlunr::{Index, Language}; | |||
use content::Section; | |||
use library::{Library, Section}; | |||
use errors::Result; | |||
@@ -39,7 +39,7 @@ lazy_static! { | |||
/// the language given | |||
/// Errors if the language given is not available in Elasticlunr | |||
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful? | |||
pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<String> { | |||
pub fn build_index(lang: &str, library: &Library) -> Result<String> { | |||
let language = match Language::from_code(lang) { | |||
Some(l) => l, | |||
None => { bail!("Tried to build search index for language {} which is not supported", lang); } | |||
@@ -47,14 +47,14 @@ pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<S | |||
let mut index = Index::with_language(language, &["title", "body"]); | |||
for section in sections.values() { | |||
add_section_to_index(&mut index, section); | |||
for section in library.sections_values() { | |||
add_section_to_index(&mut index, section, library); | |||
} | |||
Ok(index.to_json()) | |||
} | |||
fn add_section_to_index(index: &mut Index, section: &Section) { | |||
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library) { | |||
if !section.meta.in_search_index { | |||
return; | |||
} | |||
@@ -67,7 +67,8 @@ fn add_section_to_index(index: &mut Index, section: &Section) { | |||
); | |||
} | |||
for page in §ion.pages { | |||
for key in §ion.pages { | |||
let page = library.get_page_by_key(*key); | |||
if !page.meta.in_search_index || page.meta.draft { | |||
continue; | |||
} | |||
@@ -16,11 +16,9 @@ config = { path = "../config" } | |||
utils = { path = "../utils" } | |||
templates = { path = "../templates" } | |||
front_matter = { path = "../front_matter" } | |||
pagination = { path = "../pagination" } | |||
taxonomies = { path = "../taxonomies" } | |||
content = { path = "../content" } | |||
search = { path = "../search" } | |||
imageproc = { path = "../imageproc" } | |||
library = { path = "../library" } | |||
[dev-dependencies] | |||
tempfile = "3" |
@@ -1,14 +1,14 @@ | |||
#![feature(test)] | |||
extern crate test; | |||
extern crate site; | |||
extern crate pagination; | |||
extern crate library; | |||
extern crate tempfile; | |||
use std::env; | |||
use tempfile::tempdir; | |||
use site::Site; | |||
use pagination::Paginator; | |||
use library::Paginator; | |||
fn setup_site(name: &str) -> Site { | |||
@@ -44,7 +44,7 @@ fn bench_render_rss_feed(b: &mut test::Bencher) { | |||
let tmp_dir = tempdir().expect("create temp dir"); | |||
let public = &tmp_dir.path().join("public"); | |||
site.set_output_path(&public); | |||
b.iter(|| site.render_rss_feed(site.pages.values().collect(), None).unwrap()); | |||
b.iter(|| site.render_rss_feed(site.library.pages_values(), None).unwrap()); | |||
} | |||
#[bench] | |||
@@ -62,8 +62,8 @@ fn bench_render_paginated(b: &mut test::Bencher) { | |||
let tmp_dir = tempdir().expect("create temp dir"); | |||
let public = &tmp_dir.path().join("public"); | |||
site.set_output_path(&public); | |||
let section = site.sections.values().collect::<Vec<_>>()[0]; | |||
let paginator = Paginator::from_section(§ion.pages, section); | |||
let section = site.library.sections_values()[0]; | |||
let paginator = Paginator::from_section(§ion, site.library.pages()); | |||
b.iter(|| site.render_paginated(public, &paginator)); | |||
} | |||
@@ -12,38 +12,31 @@ extern crate config; | |||
extern crate utils; | |||
extern crate front_matter; | |||
extern crate templates; | |||
extern crate pagination; | |||
extern crate taxonomies; | |||
extern crate content; | |||
extern crate search; | |||
extern crate imageproc; | |||
extern crate library; | |||
#[cfg(test)] | |||
extern crate tempfile; | |||
use std::collections::{HashMap, HashSet}; | |||
use std::collections::{HashMap}; | |||
use std::fs::{create_dir_all, remove_dir_all, copy}; | |||
use std::mem; | |||
use std::path::{Path, PathBuf}; | |||
use std::sync::{Arc, Mutex}; | |||
use glob::glob; | |||
use tera::{Tera, Context}; | |||
use sass_rs::{Options as SassOptions, OutputStyle, compile_file}; | |||
use rayon::prelude::*; | |||
use errors::{Result, ResultExt}; | |||
use config::{Config, get_config}; | |||
use utils::fs::{create_file, copy_directory, create_directory, ensure_directory_exists}; | |||
use utils::templates::{render_template, rewrite_theme_paths}; | |||
use utils::net::get_available_port; | |||
use content::{Page, Section, populate_siblings, sort_pages, sort_pages_by_date}; | |||
use templates::{GUTENBERG_TERA, global_fns, render_redirect_template}; | |||
use front_matter::{InsertAnchor}; | |||
use taxonomies::{Taxonomy, find_taxonomies}; | |||
use pagination::Paginator; | |||
use rayon::prelude::*; | |||
use library::{Page, Section, sort_actual_pages_by_date, Library, Taxonomy, find_taxonomies, Paginator}; | |||
/// The sitemap only needs links and potentially date so we trim down | |||
/// all pages to only that | |||
@@ -65,8 +58,6 @@ pub struct Site { | |||
pub base_path: PathBuf, | |||
/// The parsed config for the site | |||
pub config: Config, | |||
pub pages: HashMap<PathBuf, Page>, | |||
pub sections: HashMap<PathBuf, Section>, | |||
pub tera: Tera, | |||
imageproc: Arc<Mutex<imageproc::Processor>>, | |||
// the live reload port to be used if there is one | |||
@@ -78,6 +69,8 @@ pub struct Site { | |||
/// A map of all .md files (section and pages) and their permalink | |||
/// We need that if there are relative links in the content that need to be resolved | |||
pub permalinks: HashMap<String, String>, | |||
/// Contains all pages and sections of the site | |||
pub library: Library, | |||
} | |||
impl Site { | |||
@@ -108,7 +101,7 @@ impl Site { | |||
); | |||
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?; | |||
rewrite_theme_paths(&mut tera_theme, &theme); | |||
// TODO: same as above | |||
// TODO: same as below | |||
if theme_path.join("templates").join("robots.txt").exists() { | |||
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?; | |||
} | |||
@@ -133,8 +126,6 @@ impl Site { | |||
base_path: path.to_path_buf(), | |||
config, | |||
tera, | |||
pages: HashMap::new(), | |||
sections: HashMap::new(), | |||
imageproc: Arc::new(Mutex::new(imageproc)), | |||
live_reload: None, | |||
output_path: path.join("public"), | |||
@@ -142,6 +133,8 @@ impl Site { | |||
static_path, | |||
taxonomies: Vec::new(), | |||
permalinks: HashMap::new(), | |||
// We will allocate it properly later on | |||
library: Library::new(0, 0), | |||
}; | |||
Ok(site) | |||
@@ -158,15 +151,7 @@ impl Site { | |||
/// Get all the orphan (== without section) pages in the site | |||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | |||
let pages_in_sections = self.sections | |||
.values() | |||
.flat_map(|s| s.all_pages_path()) | |||
.collect::<HashSet<_>>(); | |||
self.pages | |||
.values() | |||
.filter(|page| !pages_in_sections.contains(&page.file.path)) | |||
.collect() | |||
self.library.get_all_orphan_pages() | |||
} | |||
pub fn set_base_url(&mut self, base_url: String) { | |||
@@ -191,6 +176,8 @@ impl Site { | |||
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.')) | |||
.partition(|entry| entry.as_path().file_name().unwrap() == "_index.md"); | |||
self.library = Library::new(page_entries.len(), section_entries.len()); | |||
let sections = { | |||
let config = &self.config; | |||
@@ -225,7 +212,7 @@ impl Site { | |||
// Insert a default index section if necessary so we don't need to create | |||
// a _index.md to render the index page at the root of the site | |||
let index_path = self.index_section_path(); | |||
if let Some(ref index_section) = self.sections.get(&index_path) { | |||
if let Some(ref index_section) = self.library.get_section(&index_path) { | |||
if self.config.build_search_index && !index_section.meta.in_search_index { | |||
bail!( | |||
"You have enabled search in the config but disabled it in the index section: \ | |||
@@ -235,12 +222,13 @@ impl Site { | |||
} | |||
} | |||
// Not in else because of borrow checker | |||
if !self.sections.contains_key(&index_path) { | |||
if !self.library.contains_section(&index_path) { | |||
let mut index_section = Section::default(); | |||
index_section.permalink = self.config.make_permalink(""); | |||
index_section.file.path = self.content_path.join("_index.md"); | |||
index_section.file.parent = self.content_path.clone(); | |||
index_section.file.relative = "_index.md".to_string(); | |||
self.sections.insert(index_path, index_section); | |||
self.library.insert_section(index_section); | |||
} | |||
let mut pages_insert_anchors = HashMap::new(); | |||
@@ -253,6 +241,8 @@ impl Site { | |||
self.register_early_global_fns(); | |||
self.render_markdown()?; | |||
self.populate_sections(); | |||
// self.library.cache_all_pages(); | |||
// self.library.cache_all_sections(); | |||
self.populate_taxonomies()?; | |||
self.register_tera_global_fns(); | |||
@@ -271,19 +261,27 @@ impl Site { | |||
// This is needed in the first place because of silly borrow checker | |||
let mut pages_insert_anchors = HashMap::new(); | |||
for p in self.pages.values() { | |||
for (_, p) in self.library.pages() { | |||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone())); | |||
} | |||
self.pages.par_iter_mut() | |||
.map(|(_, page)| { | |||
self.library | |||
.pages_mut() | |||
.values_mut() | |||
.collect::<Vec<_>>() | |||
.par_iter_mut() | |||
.map(|page| { | |||
let insert_anchor = pages_insert_anchors[&page.file.path]; | |||
page.render_markdown(permalinks, tera, config, base_path, insert_anchor) | |||
}) | |||
.collect::<Result<()>>()?; | |||
self.sections.par_iter_mut() | |||
.map(|(_, section)| section.render_markdown(permalinks, tera, config, base_path)) | |||
self.library | |||
.sections_mut() | |||
.values_mut() | |||
.collect::<Vec<_>>() | |||
.par_iter_mut() | |||
.map(|section| section.render_markdown(permalinks, tera, config, base_path)) | |||
.collect::<Result<()>>()?; | |||
Ok(()) | |||
@@ -301,15 +299,15 @@ impl Site { | |||
pub fn register_tera_global_fns(&mut self) { | |||
self.tera.register_function("trans", global_fns::make_trans(self.config.clone())); | |||
self.tera.register_function("get_page", global_fns::make_get_page(&self.pages)); | |||
self.tera.register_function("get_section", global_fns::make_get_section(&self.sections)); | |||
self.tera.register_function("get_page", global_fns::make_get_page(&self.library)); | |||
self.tera.register_function("get_section", global_fns::make_get_section(&self.library)); | |||
self.tera.register_function( | |||
"get_taxonomy", | |||
global_fns::make_get_taxonomy(self.taxonomies.clone()), | |||
global_fns::make_get_taxonomy(&self.taxonomies, &self.library), | |||
); | |||
self.tera.register_function( | |||
"get_taxonomy_url", | |||
global_fns::make_get_taxonomy_url(self.taxonomies.clone()), | |||
global_fns::make_get_taxonomy_url(&self.taxonomies), | |||
); | |||
} | |||
@@ -317,16 +315,14 @@ impl Site { | |||
/// The `render` parameter is used in the serve command, when rebuilding a page. | |||
/// If `true`, it will also render the markdown for that page | |||
/// Returns the previous page struct if there was one at the same path | |||
pub fn add_page(&mut self, page: Page, render: bool) -> Result<Option<Page>> { | |||
let path = page.file.path.clone(); | |||
pub fn add_page(&mut self, mut page: Page, render: bool) -> Result<Option<Page>> { | |||
self.permalinks.insert(page.file.relative.clone(), page.permalink.clone()); | |||
let prev = self.pages.insert(page.file.path.clone(), page); | |||
if render { | |||
let insert_anchor = self.find_parent_section_insert_anchor(&self.pages[&path].file.parent); | |||
let page = self.pages.get_mut(&path).unwrap(); | |||
let insert_anchor = self.find_parent_section_insert_anchor(&page.file.parent); | |||
page.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path, insert_anchor)?; | |||
} | |||
let prev = self.library.remove_page(&page.file.path); | |||
self.library.insert_page(page); | |||
Ok(prev) | |||
} | |||
@@ -335,15 +331,13 @@ impl Site { | |||
/// The `render` parameter is used in the serve command, when rebuilding a page. | |||
/// If `true`, it will also render the markdown for that page | |||
/// Returns the previous section struct if there was one at the same path | |||
pub fn add_section(&mut self, section: Section, render: bool) -> Result<Option<Section>> { | |||
let path = section.file.path.clone(); | |||
pub fn add_section(&mut self, mut section: Section, render: bool) -> Result<Option<Section>> { | |||
self.permalinks.insert(section.file.relative.clone(), section.permalink.clone()); | |||
let prev = self.sections.insert(section.file.path.clone(), section); | |||
if render { | |||
let section = self.sections.get_mut(&path).unwrap(); | |||
section.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path)?; | |||
} | |||
let prev = self.library.remove_section(§ion.file.path); | |||
self.library.insert_section(section); | |||
Ok(prev) | |||
} | |||
@@ -351,7 +345,7 @@ impl Site { | |||
/// Finds the insert_anchor for the parent section of the directory at `path`. | |||
/// Defaults to `AnchorInsert::None` if no parent section found | |||
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor { | |||
match self.sections.get(&parent_path.join("_index.md")) { | |||
match self.library.get_section(&parent_path.join("_index.md")) { | |||
Some(s) => s.meta.insert_anchor_links, | |||
None => InsertAnchor::None | |||
} | |||
@@ -360,59 +354,7 @@ impl Site { | |||
/// Find out the direct subsections of each subsection if there are some | |||
/// as well as the pages for each section | |||
pub fn populate_sections(&mut self) { | |||
let mut grandparent_paths: HashMap<PathBuf, Vec<PathBuf>> = HashMap::new(); | |||
for section in self.sections.values_mut() { | |||
if let Some(ref grand_parent) = section.file.grand_parent { | |||
grandparent_paths | |||
.entry(grand_parent.to_path_buf()) | |||
.or_insert_with(|| vec![]) | |||
.push(section.file.path.clone()); | |||
} | |||
// Make sure the pages of a section are empty since we can call that many times on `serve` | |||
section.pages = vec![]; | |||
section.ignored_pages = vec![]; | |||
} | |||
for page in self.pages.values() { | |||
let parent_section_path = page.file.parent.join("_index.md"); | |||
if self.sections.contains_key(&parent_section_path) { | |||
// TODO: use references instead of cloning to avoid having to call populate_section on | |||
// content change | |||
self.sections.get_mut(&parent_section_path).unwrap().pages.push(page.clone()); | |||
} | |||
} | |||
self.sort_sections_pages(None); | |||
// TODO: remove this clone | |||
let sections = self.sections.clone(); | |||
for section in self.sections.values_mut() { | |||
if let Some(paths) = grandparent_paths.get(§ion.file.parent) { | |||
section.subsections = paths | |||
.iter() | |||
.map(|p| sections[p].clone()) | |||
.collect::<Vec<_>>(); | |||
section.subsections | |||
.sort_by(|a, b| a.meta.weight.cmp(&b.meta.weight)); | |||
} | |||
} | |||
} | |||
/// Sorts the pages of the section at the given path | |||
/// By default will sort all sections but can be made to only sort a single one by providing a path | |||
pub fn sort_sections_pages(&mut self, only: Option<&Path>) { | |||
for (path, section) in &mut self.sections { | |||
if let Some(p) = only { | |||
if p != path { | |||
continue; | |||
} | |||
} | |||
let pages = mem::replace(&mut section.pages, vec![]); | |||
let (sorted_pages, cannot_be_sorted_pages) = sort_pages(pages, section.meta.sort_by); | |||
section.pages = populate_siblings(&sorted_pages, section.meta.sort_by); | |||
section.ignored_pages = cannot_be_sorted_pages; | |||
} | |||
self.library.populate_sections(); | |||
} | |||
/// Find all the tags and categories if it's asked in the config | |||
@@ -421,13 +363,7 @@ impl Site { | |||
return Ok(()); | |||
} | |||
self.taxonomies = find_taxonomies( | |||
&self.config, | |||
self.pages | |||
.values() | |||
.filter(|p| !p.is_draft()) | |||
.collect::<Vec<_>>(), | |||
)?; | |||
self.taxonomies = find_taxonomies(&self.config, &self.library)?; | |||
Ok(()) | |||
} | |||
@@ -501,7 +437,7 @@ impl Site { | |||
create_directory(¤t_path)?; | |||
// Finally, create a index.html file there with the page rendered | |||
let output = page.render_html(&self.tera, &self.config)?; | |||
let output = page.render_html(&self.tera, &self.config, &self.library)?; | |||
create_file(¤t_path.join("index.html"), &self.inject_livereload(output))?; | |||
// Copy any asset we found previously into the same directory as the index.html | |||
@@ -522,7 +458,7 @@ impl Site { | |||
self.render_orphan_pages()?; | |||
self.render_sitemap()?; | |||
if self.config.generate_rss { | |||
self.render_rss_feed(self.pages.values().collect(), None)?; | |||
self.render_rss_feed(self.library.pages_values(), None)?; | |||
} | |||
self.render_404()?; | |||
self.render_robots()?; | |||
@@ -555,7 +491,7 @@ impl Site { | |||
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)), | |||
&format!( | |||
"window.searchIndex = {};", | |||
search::build_index(&self.sections, &self.config.default_language)? | |||
search::build_index(&self.config.default_language, &self.library)? | |||
), | |||
)?; | |||
@@ -627,7 +563,7 @@ impl Site { | |||
} | |||
pub fn render_aliases(&self) -> Result<()> { | |||
for page in self.pages.values() { | |||
for (_, page) in self.library.pages() { | |||
for alias in &page.meta.aliases { | |||
let mut output_path = self.output_path.to_path_buf(); | |||
let mut split = alias.split('/').collect::<Vec<_>>(); | |||
@@ -695,7 +631,7 @@ impl Site { | |||
ensure_directory_exists(&self.output_path)?; | |||
let output_path = self.output_path.join(&taxonomy.kind.name); | |||
let list_output = taxonomy.render_all_terms(&self.tera, &self.config)?; | |||
let list_output = taxonomy.render_all_terms(&self.tera, &self.config, &self.library)?; | |||
create_directory(&output_path)?; | |||
create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?; | |||
@@ -705,15 +641,15 @@ impl Site { | |||
.map(|item| { | |||
if taxonomy.kind.rss { | |||
self.render_rss_feed( | |||
item.pages.iter().map(|p| p).collect(), | |||
item.pages.iter().map(|p| self.library.get_page_by_key(*p)).collect(), | |||
Some(&PathBuf::from(format!("{}/{}", taxonomy.kind.name, item.slug))), | |||
)?; | |||
} | |||
if taxonomy.kind.is_paginated() { | |||
self.render_paginated(&output_path, &Paginator::from_taxonomy(&taxonomy, item)) | |||
self.render_paginated(&output_path, &Paginator::from_taxonomy(&taxonomy, item, &self.library)) | |||
} else { | |||
let single_output = taxonomy.render_term(item, &self.tera, &self.config)?; | |||
let single_output = taxonomy.render_term(item, &self.tera, &self.config, &self.library)?; | |||
let path = output_path.join(&item.slug); | |||
create_directory(&path)?; | |||
create_file( | |||
@@ -731,8 +667,9 @@ impl Site { | |||
let mut context = Context::new(); | |||
let mut pages = self.pages | |||
.values() | |||
let mut pages = self.library | |||
.pages_values() | |||
.iter() | |||
.filter(|p| !p.is_draft()) | |||
.map(|p| { | |||
let date = match p.meta.date { | |||
@@ -745,8 +682,9 @@ impl Site { | |||
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | |||
context.insert("pages", &pages); | |||
let mut sections = self.sections | |||
.values() | |||
let mut sections = self.library | |||
.sections_values() | |||
.iter() | |||
.map(|s| SitemapEntry::new(s.permalink.clone(), None)) | |||
.collect::<Vec<_>>(); | |||
sections.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | |||
@@ -786,16 +724,22 @@ impl Site { | |||
.filter(|p| p.meta.date.is_some() && !p.is_draft()) | |||
.collect::<Vec<_>>(); | |||
pages.par_sort_unstable_by(sort_pages_by_date); | |||
// Don't generate a RSS feed if none of the pages has a date | |||
if pages.is_empty() { | |||
return Ok(()); | |||
} | |||
pages.par_sort_unstable_by(sort_actual_pages_by_date); | |||
context.insert("last_build_date", &pages[0].meta.date.clone().map(|d| d.to_string())); | |||
// limit to the last n elements | |||
context.insert("pages", &pages.iter().take(self.config.rss_limit).collect::<Vec<_>>()); | |||
let p = pages | |||
.iter() | |||
.take(self.config.rss_limit) | |||
.map(|x| x.to_serialized_basic()) | |||
.collect::<Vec<_>>(); | |||
context.insert("pages", &p); | |||
context.insert("config", &self.config); | |||
let rss_feed_url = if let Some(ref base) = base_path { | |||
@@ -848,7 +792,7 @@ impl Site { | |||
section | |||
.pages | |||
.par_iter() | |||
.map(|p| self.render_page(p)) | |||
.map(|k| self.render_page(self.library.get_page_by_key(*k))) | |||
.collect::<Result<()>>()?; | |||
} | |||
@@ -863,9 +807,9 @@ impl Site { | |||
} | |||
if section.meta.is_paginated() { | |||
self.render_paginated(&output_path, &Paginator::from_section(§ion.pages, section))?; | |||
self.render_paginated(&output_path, &Paginator::from_section(§ion, &self.library))?; | |||
} else { | |||
let output = section.render_html(&self.tera, &self.config)?; | |||
let output = section.render_html(&self.tera, &self.config, &self.library)?; | |||
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; | |||
} | |||
@@ -875,16 +819,15 @@ impl Site { | |||
/// Used only on reload | |||
pub fn render_index(&self) -> Result<()> { | |||
self.render_section( | |||
&self.sections[&self.content_path.join("_index.md")], | |||
&self.library.get_section(&self.content_path.join("_index.md")).unwrap(), | |||
false, | |||
) | |||
} | |||
/// Renders all sections | |||
pub fn render_sections(&self) -> Result<()> { | |||
self.sections | |||
.values() | |||
.collect::<Vec<_>>() | |||
self.library | |||
.sections_values() | |||
.into_par_iter() | |||
.map(|s| self.render_section(s, true)) | |||
.collect::<Result<()>>() | |||
@@ -19,44 +19,46 @@ fn can_parse_site() { | |||
site.load().unwrap(); | |||
// Correct number of pages (sections are pages too) | |||
assert_eq!(site.pages.len(), 15); | |||
assert_eq!(site.library.pages().len(), 15); | |||
let posts_path = path.join("content").join("posts"); | |||
// Make sure we remove all the pwd + content from the sections | |||
let basic = &site.pages[&posts_path.join("simple.md")]; | |||
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap(); | |||
assert_eq!(basic.file.components, vec!["posts".to_string()]); | |||
// Make sure the page with a url doesn't have any sections | |||
let url_post = &site.pages[&posts_path.join("fixed-url.md")]; | |||
let url_post = site.library.get_page(&posts_path.join("fixed-url.md")).unwrap(); | |||
assert_eq!(url_post.path, "a-fixed-url/"); | |||
// Make sure the article in a folder with only asset doesn't get counted as a section | |||
let asset_folder_post = &site.pages[&posts_path.join("with-assets").join("index.md")]; | |||
let asset_folder_post = site.library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap(); | |||
assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]); | |||
// That we have the right number of sections | |||
assert_eq!(site.sections.len(), 7); | |||
assert_eq!(site.library.sections().len(), 7); | |||
// And that the sections are correct | |||
let index_section = &site.sections[&path.join("content").join("_index.md")]; | |||
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap(); | |||
assert_eq!(index_section.subsections.len(), 3); | |||
assert_eq!(index_section.pages.len(), 1); | |||
let posts_section = &site.sections[&posts_path.join("_index.md")]; | |||
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap(); | |||
assert_eq!(posts_section.subsections.len(), 1); | |||
assert_eq!(posts_section.pages.len(), 7); | |||
let tutorials_section = &site.sections[&posts_path.join("tutorials").join("_index.md")]; | |||
let tutorials_section = site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap(); | |||
assert_eq!(tutorials_section.subsections.len(), 2); | |||
assert_eq!(tutorials_section.subsections[0].clone().meta.title.unwrap(), "Programming"); | |||
assert_eq!(tutorials_section.subsections[1].clone().meta.title.unwrap(), "DevOps"); | |||
let sub1 = site.library.get_section_by_key(tutorials_section.subsections[0]); | |||
let sub2 = site.library.get_section_by_key(tutorials_section.subsections[1]); | |||
assert_eq!(sub1.clone().meta.title.unwrap(), "Programming"); | |||
assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps"); | |||
assert_eq!(tutorials_section.pages.len(), 0); | |||
let devops_section = &site.sections[&posts_path.join("tutorials").join("devops").join("_index.md")]; | |||
let devops_section = site.library.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")).unwrap(); | |||
assert_eq!(devops_section.subsections.len(), 0); | |||
assert_eq!(devops_section.pages.len(), 2); | |||
let prog_section = &site.sections[&posts_path.join("tutorials").join("programming").join("_index.md")]; | |||
let prog_section = site.library.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")).unwrap(); | |||
assert_eq!(prog_section.subsections.len(), 0); | |||
assert_eq!(prog_section.pages.len(), 2); | |||
} | |||
@@ -220,7 +222,7 @@ fn can_build_site_with_taxonomies() { | |||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||
site.load().unwrap(); | |||
for (i, page) in site.pages.values_mut().enumerate() { | |||
for (i, (_, page)) in site.library.pages_mut().iter_mut().enumerate() { | |||
page.meta.taxonomies = { | |||
let mut taxonomies = HashMap::new(); | |||
taxonomies.insert("categories".to_string(), vec![if i % 2 == 0 { "A" } else { "B" }.to_string()]); | |||
@@ -290,7 +292,7 @@ fn can_build_site_with_pagination_for_section() { | |||
path.push("test_site"); | |||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||
site.load().unwrap(); | |||
for section in site.sections.values_mut(){ | |||
for (_, section) in site.library.sections_mut() { | |||
if section.is_index() { | |||
continue; | |||
} | |||
@@ -369,7 +371,7 @@ fn can_build_site_with_pagination_for_index() { | |||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||
site.load().unwrap(); | |||
{ | |||
let index = site.sections.get_mut(&path.join("content").join("_index.md")).unwrap(); | |||
let index = site.library.get_section_mut(&path.join("content").join("_index.md")).unwrap(); | |||
index.meta.paginate_by = Some(2); | |||
index.meta.template = Some("index_paginated.html".to_string()); | |||
} | |||
@@ -1,16 +0,0 @@ | |||
[package] | |||
name = "taxonomies" | |||
version = "0.1.0" | |||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||
[dependencies] | |||
tera = "0.11" | |||
slug = "0.1" | |||
serde = "1" | |||
serde_derive = "1" | |||
errors = { path = "../errors" } | |||
config = { path = "../config" } | |||
content = { path = "../content" } | |||
front_matter = { path = "../front_matter" } | |||
utils = { path = "../utils" } |
@@ -11,7 +11,6 @@ pulldown-cmark = "0" | |||
errors = { path = "../errors" } | |||
utils = { path = "../utils" } | |||
content = { path = "../content" } | |||
library = { path = "../library" } | |||
config = { path = "../config" } | |||
taxonomies = { path = "../taxonomies" } | |||
imageproc = { path = "../imageproc" } |
@@ -1,13 +1,11 @@ | |||
use std::collections::HashMap; | |||
use std::path::PathBuf; | |||
use std::sync::{Arc, Mutex}; | |||
use tera::{GlobalFn, Value, from_value, to_value, Result}; | |||
use content::{Page, Section}; | |||
use library::{Taxonomy, Library}; | |||
use config::Config; | |||
use utils::site::resolve_internal_link; | |||
use taxonomies::Taxonomy; | |||
use imageproc; | |||
@@ -53,10 +51,13 @@ pub fn make_trans(config: Config) -> GlobalFn { | |||
} | |||
pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn { | |||
pub fn make_get_page(library: &Library) -> GlobalFn { | |||
let mut pages = HashMap::new(); | |||
for page in all_pages.values() { | |||
pages.insert(page.file.relative.clone(), page.clone()); | |||
for page in library.pages_values() { | |||
pages.insert( | |||
page.file.relative.clone(), | |||
to_value(library.get_page(&page.file.path).unwrap().to_serialized(library.pages())).unwrap(), | |||
); | |||
} | |||
Box::new(move |args| -> Result<Value> { | |||
@@ -66,19 +67,19 @@ pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn { | |||
"`get_page` requires a `path` argument with a string value" | |||
); | |||
match pages.get(&path) { | |||
Some(p) => Ok(to_value(p).unwrap()), | |||
Some(p) => Ok(p.clone()), | |||
None => Err(format!("Page `{}` not found.", path).into()) | |||
} | |||
}) | |||
} | |||
pub fn make_get_section(all_sections: &HashMap<PathBuf, Section>) -> GlobalFn { | |||
pub fn make_get_section(library: &Library) -> GlobalFn { | |||
let mut sections = HashMap::new(); | |||
for section in all_sections.values() { | |||
if section.file.components == vec!["rebuild".to_string()] { | |||
//println!("Setting sections:\n{:#?}", section.pages[0]); | |||
} | |||
sections.insert(section.file.relative.clone(), section.clone()); | |||
for section in library.sections_values() { | |||
sections.insert( | |||
section.file.relative.clone(), | |||
to_value(library.get_section(§ion.file.path).unwrap().to_serialized(library)).unwrap(), | |||
); | |||
} | |||
Box::new(move |args| -> Result<Value> { | |||
@@ -87,9 +88,9 @@ pub fn make_get_section(all_sections: &HashMap<PathBuf, Section>) -> GlobalFn { | |||
args.get("path"), | |||
"`get_section` requires a `path` argument with a string value" | |||
); | |||
//println!("Found {:#?}", sections.get(&path).unwrap().pages[0]); | |||
match sections.get(&path) { | |||
Some(p) => Ok(to_value(p).unwrap()), | |||
Some(p) => Ok(p.clone()), | |||
None => Err(format!("Section `{}` not found.", path).into()) | |||
} | |||
}) | |||
@@ -134,10 +135,13 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob | |||
}) | |||
} | |||
pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||
pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> GlobalFn { | |||
let mut taxonomies = HashMap::new(); | |||
for taxonomy in all_taxonomies { | |||
taxonomies.insert(taxonomy.kind.name.clone(), taxonomy); | |||
taxonomies.insert( | |||
taxonomy.kind.name.clone(), | |||
to_value(taxonomy.to_serialized(library)).unwrap() | |||
); | |||
} | |||
Box::new(move |args| -> Result<Value> { | |||
@@ -157,10 +161,14 @@ pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||
}) | |||
} | |||
pub fn make_get_taxonomy_url(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||
pub fn make_get_taxonomy_url(all_taxonomies: &[Taxonomy]) -> GlobalFn { | |||
let mut taxonomies = HashMap::new(); | |||
for taxonomy in all_taxonomies { | |||
taxonomies.insert(taxonomy.kind.name.clone(), taxonomy); | |||
let mut items = HashMap::new(); | |||
for item in &taxonomy.items { | |||
items.insert(item.name.clone(), item.permalink.clone()); | |||
} | |||
taxonomies.insert(taxonomy.kind.name.clone(), items); | |||
} | |||
Box::new(move |args| -> Result<Value> { | |||
@@ -181,10 +189,8 @@ pub fn make_get_taxonomy_url(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||
) | |||
}; | |||
for item in &container.items { | |||
if item.name == name { | |||
return Ok(to_value(item.permalink.clone()).unwrap()); | |||
} | |||
if let Some(ref permalink) = container.get(&name) { | |||
return Ok(to_value(permalink.clone()).unwrap()); | |||
} | |||
Err( | |||
@@ -247,10 +253,10 @@ mod tests { | |||
use std::collections::HashMap; | |||
use tera::to_value; | |||
use tera::{to_value, Value}; | |||
use config::{Config, Taxonomy as TaxonomyConfig}; | |||
use taxonomies::{Taxonomy, TaxonomyItem}; | |||
use library::{Taxonomy, TaxonomyItem, Library}; | |||
#[test] | |||
@@ -296,22 +302,44 @@ mod tests { | |||
#[test] | |||
fn can_get_taxonomy() { | |||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | |||
let library = Library::new(0, 0); | |||
let tag = TaxonomyItem::new( | |||
"Progamming", | |||
"Programming", | |||
"tags", | |||
&Config::default(), | |||
vec![], | |||
&library | |||
); | |||
let tags = Taxonomy { | |||
kind: taxo_config, | |||
items: vec![tag], | |||
}; | |||
let static_fn = make_get_taxonomy(vec![tags.clone()]); | |||
let taxonomies = vec![tags.clone()]; | |||
let static_fn = make_get_taxonomy(&taxonomies, &library); | |||
// can find it correctly | |||
let mut args = HashMap::new(); | |||
args.insert("kind".to_string(), to_value("tags").unwrap()); | |||
assert_eq!(static_fn(args).unwrap(), to_value(&tags).unwrap()); | |||
let res = static_fn(args).unwrap(); | |||
let res_obj = res.as_object().unwrap(); | |||
assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap()); | |||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1); | |||
assert_eq!( | |||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"], | |||
Value::String("Programming".to_string()) | |||
); | |||
assert_eq!( | |||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["slug"], | |||
Value::String("programming".to_string()) | |||
); | |||
assert_eq!( | |||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["permalink"], | |||
Value::String("http://a-website.com/tags/programming/".to_string()) | |||
); | |||
assert_eq!( | |||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["pages"], | |||
Value::Array(vec![]) | |||
); | |||
// and errors if it can't find it | |||
let mut args = HashMap::new(); | |||
args.insert("kind".to_string(), to_value("something-else").unwrap()); | |||
@@ -321,18 +349,21 @@ mod tests { | |||
#[test] | |||
fn can_get_taxonomy_url() { | |||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | |||
let library = Library::new(0, 0); | |||
let tag = TaxonomyItem::new( | |||
"Programming", | |||
"tags", | |||
&Config::default(), | |||
vec![], | |||
&library | |||
); | |||
let tags = Taxonomy { | |||
kind: taxo_config, | |||
items: vec![tag], | |||
}; | |||
let static_fn = make_get_taxonomy_url(vec![tags.clone()]); | |||
let taxonomies = vec![tags.clone()]; | |||
let static_fn = make_get_taxonomy_url(&taxonomies); | |||
// can find it correctly | |||
let mut args = HashMap::new(); | |||
args.insert("kind".to_string(), to_value("tags").unwrap()); | |||
@@ -7,9 +7,8 @@ extern crate pulldown_cmark; | |||
extern crate errors; | |||
extern crate utils; | |||
extern crate content; | |||
extern crate library; | |||
extern crate config; | |||
extern crate taxonomies; | |||
extern crate imageproc; | |||
pub mod filters; | |||
@@ -3,11 +3,11 @@ title = "Section" | |||
weight = 20 | |||
+++ | |||
A section is created whenever a folder (or subfolder) in the `content` section contains an | |||
`_index.md` file. If a folder does not contain an `_index.md` file, no section will be | |||
A section is created whenever a folder (or subfolder) in the `content` section contains an | |||
`_index.md` file. If a folder does not contain an `_index.md` file, no section will be | |||
created, but markdown files within that folder will still create pages (known as orphan pages). | |||
The index page (i.e., the page displayed when a user browses to your `base_url`) is a section, | |||
The index page (i.e., the page displayed when a user browses to your `base_url`) is a section, | |||
which is created whether or not you add an `_index.md` file at the root of your `content` folder. | |||
If you do not create an `_index.md` file in your content directory, this main content section will | |||
not have any content or metadata. If you would like to add content or metadata, you can add an | |||
@@ -96,9 +96,9 @@ by setting the `paginate_path` variable, which defaults to `page`. | |||
## Sorting | |||
It is very common for Gutenberg templates to iterate over pages or sections | |||
to display all pages/sections a given directory. Consider a very simple | |||
to display all pages/sections a given directory. Consider a very simple | |||
example: a `blog` directory with three files: `blog/Post_1.md`, | |||
`blog/Post_2.md`, and `blog/Post_3.md`. To iterate over these posts and | |||
`blog/Post_2.md`, and `blog/Post_3.md`. To iterate over these posts and | |||
create a list of links to the posts, a simple template might look like this: | |||
```j2 | |||
@@ -107,15 +107,15 @@ create a list of links to the posts, a simple template might look like this: | |||
{% endfor %} | |||
``` | |||
This would iterate over the posts, and would do so in a specific order | |||
based on the `sort_by` variable set in the `_index.md` page for the | |||
This would iterate over the posts, and would do so in a specific order | |||
based on the `sort_by` variable set in the `_index.md` page for the | |||
containing section. The `sort_by` variable can be given three values: `date`, | |||
`weight`, and `none`. If no `sort_by` method is set, the pages will be | |||
sorted in the `none` order, which is not intended to be used for sorted content. | |||
Any page that is missing the data it needs to be sorted will be ignored and | |||
won't be rendered. For example, if a page is missing the date variable the | |||
containing section sets `sort_by = "date"`, then that page will be ignored. | |||
won't be rendered. For example, if a page is missing the date variable the | |||
containing section sets `sort_by = "date"`, then that page will be ignored. | |||
The terminal will warn you if this is happening. | |||
If several pages have the same date/weight/order, their permalink will be used | |||
@@ -127,18 +127,18 @@ The `sort_by` front-matter variable can have the following values: | |||
### `date` | |||
This will sort all pages by their `date` field, from the most recent (at the | |||
top of the list) to the oldest (at the bottom of the list). Each page will | |||
get `page.earlier` and `page.later` variables that contain the pages with | |||
get `page.earlier` and `page.later` variables that contain the pages with | |||
earlier and later dates, respectively. | |||
### `weight` | |||
This will be sort all pages by their `weight` field, from lightest weight | |||
(at the top of the list) to heaviest (at the bottom of the list). Each | |||
page gets `page.lighter` and `page.heavier` variables that contain the | |||
This will be sort all pages by their `weight` field, from lightest weight | |||
(at the top of the list) to heaviest (at the bottom of the list). Each | |||
page gets `page.lighter` and `page.heavier` variables that contain the | |||
pages with lighter and heavier weights, respectively. | |||
When iterating through pages, you may wish to use the Tera `reverse` filter, | |||
When iterating through pages, you may wish to use the Tera `reverse` filter, | |||
which reverses the order of the pages. Thus, after using the `reverse` filter, | |||
pages sorted by weight will be sorted from lightest (at the top) to heaviest | |||
pages sorted by weight will be sorted from lightest (at the top) to heaviest | |||
(at the bottom); pages sorted by date will be sorted from oldest (at the top) | |||
to newest (at the bottom). | |||
@@ -153,8 +153,8 @@ the top of the list and the heaviest (highest `weight`) will be at the top; | |||
the `reverse` filter reverses this order. | |||
**Note**: Unlike pages, permalinks will **not** be used to break ties between | |||
equally weighted sections. Thus, if the `weight` variable for your section is not set (or if it | |||
is set in a way that produces ties), then your sections will be sorted in | |||
equally weighted sections. Thus, if the `weight` variable for your section is not set (or if it | |||
is set in a way that produces ties), then your sections will be sorted in | |||
**random** order. Moreover, that order is determined at build time and will | |||
change with each site rebuild. Thus, if there is any chance that you will | |||
change with each site rebuild. Thus, if there is any chance that you will | |||
iterate over your sections, you should always assign them weight. |
@@ -13,7 +13,7 @@ lazy_static! { | |||
/// Termcolor color choice. | |||
/// We do not rely on ColorChoice::Auto behavior | |||
/// as the check is already performed by has_color. | |||
static ref COLOR_CHOICE: ColorChoice = | |||
static ref COLOR_CHOICE: ColorChoice = | |||
if has_color() { | |||
ColorChoice::Always | |||
} else { | |||
@@ -49,18 +49,23 @@ fn colorize(message: &str, color: &ColorSpec) { | |||
pub fn notify_site_size(site: &Site) { | |||
println!( | |||
"-> Creating {} pages ({} orphan), {} sections, and processing {} images", | |||
site.pages.len(), | |||
site.library.pages().len(), | |||
site.get_all_orphan_pages().len(), | |||
site.sections.len() - 1, // -1 since we do not the index as a section | |||
site.library.sections().len() - 1, // -1 since we do not the index as a section | |||
site.num_img_ops(), | |||
); | |||
} | |||
/// Display a warning in the console if there are ignored pages in the site | |||
pub fn warn_about_ignored_pages(site: &Site) { | |||
let ignored_pages: Vec<_> = site.sections | |||
.values() | |||
.flat_map(|s| s.ignored_pages.iter().map(|p| p.file.path.clone())) | |||
let ignored_pages: Vec<_> = site.library | |||
.sections_values() | |||
.iter() | |||
.flat_map(|s| { | |||
s.ignored_pages | |||
.iter() | |||
.map(|k| site.library.get_page_by_key(*k).file.path.clone()) | |||
}) | |||
.collect(); | |||
if !ignored_pages.is_empty() { | |||
@@ -14,7 +14,6 @@ extern crate ctrlc; | |||
extern crate site; | |||
#[macro_use] | |||
extern crate errors; | |||
extern crate content; | |||
extern crate front_matter; | |||
extern crate utils; | |||
extern crate rebuild; | |||