@@ -36,4 +36,4 @@ | |||||
url = https://github.com/zyxar/Sublime-CMakeLists | url = https://github.com/zyxar/Sublime-CMakeLists | ||||
[submodule "sublime_syntaxes/Swift-for-f-ing-sublime"] | [submodule "sublime_syntaxes/Swift-for-f-ing-sublime"] | ||||
path = sublime_syntaxes/Swift-for-f-ing-sublime | path = sublime_syntaxes/Swift-for-f-ing-sublime | ||||
url = git@github.com:colinta/Swift-for-f-ing-sublime.git | |||||
url = https://github.com/colinta/Swift-for-f-ing-sublime.git |
@@ -4,7 +4,7 @@ services: docker | |||||
env: | env: | ||||
global: | global: | ||||
- CRATE_NAME=gutenberg | |||||
- CRATE_NAME=zola | |||||
matrix: | matrix: | ||||
include: | include: | ||||
@@ -16,7 +16,7 @@ matrix: | |||||
# The earliest stable Rust version that works | # The earliest stable Rust version that works | ||||
- env: TARGET=x86_64-unknown-linux-gnu | - env: TARGET=x86_64-unknown-linux-gnu | ||||
rust: 1.27.0 | |||||
rust: 1.29.0 | |||||
before_install: set -e | before_install: set -e | ||||
@@ -35,7 +35,7 @@ before_deploy: | |||||
deploy: | deploy: | ||||
api_key: | api_key: | ||||
secure: X0M1TT06/MHfwaENl+u/K3twBU0BVTQimXfBkHzODWsWC84SGeJPMiovIkuBxq4P7Wk7sIr1d/IINlq0sK40IvI3Xwy95YtpTKcK52ffZjTmHSNExCy+OhW2JefNPOwPI89JWKsmHM1I8FuDRiENTyawVS8akcl1XnQhS3V3a1zEuwpULO+6UwDTauJDRdVenDY7tHxbwYH644djZpKcL3LsPLkv0r0XlWnyH+Lw65/ggUmw63KaZTN+hOYfznXGNjlsv2YSD8lCo7eGGg+WA1rsr1SDOxzMM60OOE2Y4lDo0lX9tPWAxMfltvuouxfZ8Y2II4oNEYQui+AqaZ6CfhjFrfZG5P6QdFcGcjiYhDC+s+R9m+tCtzCcKh+lahxcfwOEo1O9pAsg77XVy5gf9guM++9uhYc54Z7tUeyNyJQVaQHt0drmqqeQWfk8w2YBmTPiJ7mwAbhEU5gISWQBpc9eRM1PiOaWDOJHgyV1rZfOT6RxgKBu5DW4pSZ6Iar7Qc+u4ei80QRI2jVsnlPY8/5rl/z0fqSnOen/wyGQHNI18SwXiy0TbN8aMpwr9tllOBYtjrWoX4xCj8NJksl1EAYaE2Cwy768mSfO9FTMqGQpuG5S6M9BRsG5pOtZdKpxjyP8vJ1ahp8KDI9Mz8QJSfG6kOHXqCaed+MmJNxJYI0= | |||||
secure: "nksXOY7p8vAWDpItN9Tyx+0CmOPMj/iAgH+iT512URpgJG/i+ziUWDEYpQO4PfZMJUDUa1tnSZ31O4MIe2Sgfj6DHR1zK+LKeLaZxuxxJUSMXSAkbIXcjFlOPKQBPnMZVVcDaHMxz18jiRpElDR2k0PIEtspW2rDsrr+7mzmQn7pan60k77tU3RG3K7fYgMmNjVv64XqMBSCS3fpqiroIz7rVL1HZ3sCoTNnxDM8nXo/8gTjlVowTvUTsVyHRgtDRJdlPuI0yf4oJmvQPX74P2OkQmOVpGxeJ/gSTJ1xWxYfMgyvNaiO9NKF+fUfxvHR/V58CfBHPdJkcnThV5KIPjE5mHZfSTFf5cG6gJtnVhvhQV7vBhIRI/iCt55SPCXse1HWzTY1GxE5oXw2VzUt/kzD2pFf8rtf64JURgGolenYv3aw+ps1MGUwUjl8CF31XBSiASVwpif7kd9P3bafg6pGUytfjgpV/wJJc8OpO8IGwTSNe4r0wtcFb92stxta4NKC3L4F0w/juaK+0+Mjt4SCyh6rRzpHQu9TJKniskp7/URp5KhMFAo66sFpgSYVa23OTkYmjtB8IqlJzmpuDSs/WSAVA8InSgHDaQeBd0UEbNaWU1+avtAGBtb8+rZnbw7ikPF0j2pHImD5ZjHp7+jt/hpcwqrOkBuB5CSeBKs=" | |||||
file_glob: true | file_glob: true | ||||
file: $CRATE_NAME-$TRAVIS_TAG-$TARGET.* | file: $CRATE_NAME-$TRAVIS_TAG-$TARGET.* | ||||
on: | on: | ||||
@@ -54,6 +54,7 @@ branches: | |||||
# release tags | # release tags | ||||
- /^v\d+\.\d+\.\d+.*$/ | - /^v\d+\.\d+\.\d+.*$/ | ||||
- master | - master | ||||
- next | |||||
notifications: | notifications: | ||||
email: false | email: false |
@@ -1,5 +1,52 @@ | |||||
# Changelog | # Changelog | ||||
## 0.5.0 (2018-11-17) | |||||
### Breaking | |||||
- Gutenberg has changed name to `zola`! | |||||
- The `pagers` variable of Paginator objects has been removed | |||||
- `section.subsections` is now an array of paths to be used with the `get_section` | |||||
Tera function | |||||
- Table of content now strips HTML from the titles to avoid various issues | |||||
- `gutenberg-anchor` CSS class has been renamed `zola-anchor` | |||||
- `data` is now a reserved variable name in templates, it is unused right now but | |||||
might change soon. | |||||
### Others | |||||
- Many many times faster (x5-x40) for most sites | |||||
- Update dependencies, fixing a few bugs with templates | |||||
- Load only .html files in themes from the templates folder | |||||
- Background colour is set fewer times when highlighting syntaxes, resulting in smaller HTML filesize | |||||
- Link checker will not try to validate email links anymore | |||||
- Load table and footnote markdown extensions in `markdown` filter | |||||
- `get_url` now defaults to not adding a trailing slash | |||||
- Fix `--base-url` not overriding processed images URLs | |||||
- Add more Emacs temp file to the ignored patterns in `gutenberg serve` | |||||
- Files starting with `.` are not considered pages anymore even if they end with `.md` | |||||
- `_processed_images` folder for image processing has been renamed `processed_images` to avoid issues with GitHub Pages | |||||
- Syntax highlighting default was mistakenly `true`, it has been set to `false` | |||||
- Add NO_COLOR and CLICOLOR support for having colours or not in CLI output | |||||
- Fix `robots.txt`template not being used | |||||
- RSS feed now takes all available articles by default instead of limiting to 10000 | |||||
- `templates` directory is now optional | |||||
- Add Reason and F# syntax highlighting | |||||
- Add `ancestors` to pages and sections pointing to the relative path of all ancestor | |||||
sections up to the index to be used with the `get_section` Tera function | |||||
- Add a `load_data` Tera function to load local CSV/TOML/JSON files | |||||
- Add `relative_path` to pages and sections in templates | |||||
- Do not have a trailing slash for the RSS permalinks | |||||
- `serve` will now try to find other ports than 1111 rather than panicking | |||||
- Ensure content directory exists before rendering aliases | |||||
- Do not include drafts in pagination | |||||
- Pages filenames starting by a date will now use that date as page date if there isn't one defined in frontmatter | |||||
- Accept markdown files starting with BOM | |||||
- Add a `watch-only` flag to the `serve` command for when you don't want a webserver | |||||
- Add `transparent` sections, for when you want to separate content by sections but want to group them at a higher level (think a `posts` folder with years | |||||
that want to use pagination on the index). | |||||
- Add `page_template` to section front-matter for when you want to specify the template to use for every page under it | |||||
- Improves to `zola serve`: now handles directories renaming | |||||
## 0.4.2 (2018-09-03) | ## 0.4.2 (2018-09-03) | ||||
- Add assets to section indexes | - Add assets to section indexes | ||||
@@ -1,12 +1,12 @@ | |||||
[package] | [package] | ||||
name = "gutenberg" | |||||
version = "0.4.2" | |||||
name = "zola" | |||||
version = "0.5.0" | |||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | ||||
license = "MIT" | license = "MIT" | ||||
readme = "README.md" | readme = "README.md" | ||||
description = "A fast static site generator with everything built-in" | description = "A fast static site generator with everything built-in" | ||||
homepage = "https://github.com/Keats/gutenberg" | |||||
repository = "https://github.com/Keats/gutenberg" | |||||
homepage = "https://www.getzola.org" | |||||
repository = "https://github.com/getzola/zola" | |||||
keywords = ["static", "site", "generator", "blog"] | keywords = ["static", "site", "generator", "blog"] | ||||
# build = "build.rs" | # build = "build.rs" | ||||
@@ -14,13 +14,15 @@ keywords = ["static", "site", "generator", "blog"] | |||||
clap = "2" | clap = "2" | ||||
[[bin]] | [[bin]] | ||||
name = "gutenberg" | |||||
name = "zola" | |||||
[dependencies] | [dependencies] | ||||
atty = "0.2.11" | |||||
clap = "2" | clap = "2" | ||||
chrono = "0.4" | chrono = "0.4" | ||||
lazy_static = "1.1.0" | |||||
toml = "0.4" | toml = "0.4" | ||||
term-painter = "0.2" | |||||
termcolor = "1.0.4" | |||||
# Used in init to ensure the url given as base_url is a valid one | # Used in init to ensure the url given as base_url is a valid one | ||||
url = "1.5" | url = "1.5" | ||||
# Below is for the serve cmd | # Below is for the serve cmd | ||||
@@ -31,7 +33,6 @@ ctrlc = "3" | |||||
site = { path = "components/site" } | site = { path = "components/site" } | ||||
errors = { path = "components/errors" } | errors = { path = "components/errors" } | ||||
content = { path = "components/content" } | |||||
front_matter = { path = "components/front_matter" } | front_matter = { path = "components/front_matter" } | ||||
utils = { path = "components/utils" } | utils = { path = "components/utils" } | ||||
rebuild = { path = "components/rebuild" } | rebuild = { path = "components/rebuild" } | ||||
@@ -39,18 +40,18 @@ rebuild = { path = "components/rebuild" } | |||||
[workspace] | [workspace] | ||||
members = [ | members = [ | ||||
"components/config", | "components/config", | ||||
"components/content", | |||||
"components/errors", | "components/errors", | ||||
"components/front_matter", | "components/front_matter", | ||||
"components/highlighting", | |||||
"components/pagination", | |||||
"components/rebuild", | "components/rebuild", | ||||
"components/rendering", | "components/rendering", | ||||
"components/site", | "components/site", | ||||
"components/taxonomies", | |||||
"components/templates", | "components/templates", | ||||
"components/utils", | "components/utils", | ||||
"components/search", | "components/search", | ||||
"components/imageproc", | "components/imageproc", | ||||
"components/link_checker", | "components/link_checker", | ||||
"components/library", | |||||
] | ] | ||||
#[profile.release] | |||||
#debug = true |
@@ -1,15 +1,15 @@ | |||||
# Gutenberg | |||||
[![Build Status](https://travis-ci.org/Keats/gutenberg.svg?branch=master)](https://travis-ci.org/Keats/gutenberg) | |||||
[![Build status](https://ci.appveyor.com/api/projects/status/h4t9r6h5gom839q0/branch/master?svg=true)](https://ci.appveyor.com/project/Keats/gutenberg/branch/master) | |||||
# zola (né Gutenberg) | |||||
[![Build Status](https://travis-ci.org/getzola/zola.svg?branch=master)](https://travis-ci.org/getzola/zola) | |||||
[![Build status](https://ci.appveyor.com/api/projects/status/i0ufvx2sdm2cmawo/branch/master?svg=true)](https://ci.appveyor.com/project/Keats/zola/branch/master) | |||||
A fast static site generator in a single binary with everything built-in. | A fast static site generator in a single binary with everything built-in. | ||||
Documentation is available on [its site](https://www.getgutenberg.io/documentation/getting-started/installation/) or | |||||
Documentation is available on [its site](https://www.getzola.org/documentation/getting-started/installation/) or | |||||
in the `docs/content` folder of the repository. | in the `docs/content` folder of the repository. | ||||
## Comparisons with other static site generators | ## Comparisons with other static site generators | ||||
| | Gutenberg | Cobalt | Hugo | Pelican | | |||||
| | Zola | Cobalt | Hugo | Pelican | | |||||
|:-------------------------------:|:---------:|--------|------|---------| | |:-------------------------------:|:---------:|--------|------|---------| | ||||
| Single binary | âś” | âś” | âś” | âś• | | | Single binary | âś” | âś” | âś” | âś• | | ||||
| Language | Rust | Rust | Go | Python | | | Language | Rust | Rust | Go | Python | | ||||
@@ -29,14 +29,15 @@ in the `docs/content` folder of the repository. | |||||
| Pagination | âś” | âś• | âś” | âś” | | | Pagination | âś” | âś• | âś” | âś” | | ||||
| Custom taxonomies | âś” | âś• | âś” | âś• | | | Custom taxonomies | âś” | âś• | âś” | âś• | | ||||
| Search | âś” | âś• | âś• | âś” | | | Search | âś” | âś• | âś• | âś” | | ||||
| Data files | âś• | âś” | âś” | âś• | | |||||
| Data files | âś” | âś” | âś” | âś• | | |||||
| LiveReload | âś” | âś• | âś” | âś” | | | LiveReload | âś” | âś• | âś” | âś” | | ||||
| Netlify support | âś” | âś• | âś” | âś• | | |||||
| Netlify support | ~ | âś• | âś” | âś• | | |||||
| Breadcrumbds | âś” | âś• | âś• | âś” | | |||||
### Supported content formats | ### Supported content formats | ||||
- Gutenberg: markdown | |||||
- Zola: markdown | |||||
- Cobalt: markdown | - Cobalt: markdown | ||||
- Hugo: markdown, asciidoc, org-mode | - Hugo: markdown, asciidoc, org-mode | ||||
- Pelican: reStructuredText, markdown, asciidoc, org-mode, whatever-you-want | - Pelican: reStructuredText, markdown, asciidoc, org-mode, whatever-you-want | ||||
@@ -67,7 +68,7 @@ Syntax highlighting depends on submodules so ensure you load them first: | |||||
$ git submodule update --init | $ git submodule update --init | ||||
``` | ``` | ||||
Gutenberg only works with syntaxes in the `.sublime-syntax` format. If your syntax | |||||
Zola only works with syntaxes in the `.sublime-syntax` format. If your syntax | |||||
is in `.tmLanguage` format, open it in Sublime Text and convert it to `sublime-syntax` by clicking on | is in `.tmLanguage` format, open it in Sublime Text and convert it to `sublime-syntax` by clicking on | ||||
Tools > Developer > New Syntax from ... and put it at the root of `sublime_syntaxes`. | Tools > Developer > New Syntax from ... and put it at the root of `sublime_syntaxes`. | ||||
@@ -79,7 +80,7 @@ $ git submodule add https://github.com/elm-community/SublimeElmLanguageSupport | |||||
``` | ``` | ||||
Note that you can also only copy manually the updated syntax definition file but this means | Note that you can also only copy manually the updated syntax definition file but this means | ||||
Gutenberg won't be able to automatically update it. | |||||
Zola won't be able to automatically update it. | |||||
You can check for any updates to the current packages by running: | You can check for any updates to the current packages by running: | ||||
@@ -87,7 +88,7 @@ You can check for any updates to the current packages by running: | |||||
$ git submodule update --remote --merge | $ git submodule update --remote --merge | ||||
``` | ``` | ||||
And finally from the root of the components/highlighting crate run the following command: | |||||
And finally from the root of the components/config crate run the following command: | |||||
```bash | ```bash | ||||
$ cargo run --example generate_sublime synpack ../../sublime_syntaxes ../../sublime_syntaxes/newlines.packdump | $ cargo run --example generate_sublime synpack ../../sublime_syntaxes ../../sublime_syntaxes/newlines.packdump | ||||
@@ -95,8 +96,8 @@ $ cargo run --example generate_sublime synpack ../../sublime_syntaxes ../../subl | |||||
#### Adding a theme | #### Adding a theme | ||||
A gallery containing lots of themes is located at https://tmtheme-editor.herokuapp.com/#!/editor/theme/Agola%20Dark. | A gallery containing lots of themes is located at https://tmtheme-editor.herokuapp.com/#!/editor/theme/Agola%20Dark. | ||||
More themes can be easily added to gutenberg, just make a PR with the wanted theme added in the `sublime_themes` directory | |||||
and run the following command from the root of the components/rendering: | |||||
More themes can be easily added to Zola, just make a PR with the wanted theme added in the `sublime_themes` directory | |||||
and run the following command from the root of the components/config: | |||||
```bash | ```bash | ||||
$ cargo run --example generate_sublime themepack ../../sublime_themes ../../sublime_themes/all.themedump | $ cargo run --example generate_sublime themepack ../../sublime_themes ../../sublime_themes/all.themedump | ||||
@@ -6,11 +6,11 @@ os: Visual Studio 2017 | |||||
environment: | environment: | ||||
global: | global: | ||||
RUST_VERSION: stable | RUST_VERSION: stable | ||||
CRATE_NAME: gutenberg | |||||
CRATE_NAME: zola | |||||
matrix: | matrix: | ||||
- target: x86_64-pc-windows-msvc | - target: x86_64-pc-windows-msvc | ||||
RUST_VERSION: 1.27.0 | |||||
RUST_VERSION: 1.29.0 | |||||
- target: x86_64-pc-windows-msvc | - target: x86_64-pc-windows-msvc | ||||
RUST_VERSION: stable | RUST_VERSION: stable | ||||
@@ -29,13 +29,13 @@ test_script: | |||||
) | ) | ||||
before_deploy: | before_deploy: | ||||
- cargo rustc --target %TARGET% --release --bin gutenberg -- -C lto | |||||
- cargo rustc --target %TARGET% --release --bin zola -- -C lto | |||||
- ps: ci\before_deploy.ps1 | - ps: ci\before_deploy.ps1 | ||||
deploy: | deploy: | ||||
artifact: /.*\.zip/ | artifact: /.*\.zip/ | ||||
auth_token: | auth_token: | ||||
secure: YCRPSTItx+m/3jnDfai52dEZNLYUTSEExF2lZoffULDzlv/t2jOR1fzSSIEi/xyB | |||||
secure: i64eFOHoySQryE3M9pr2JGRukAK3LGltOsUxeFHwilS+3O6/6828A4NUmI0FW4zN | |||||
description: '' | description: '' | ||||
on: | on: | ||||
RUST_VERSION: stable | RUST_VERSION: stable | ||||
@@ -51,6 +51,7 @@ branches: | |||||
# Release tags | # Release tags | ||||
- /^v\d+\.\d+\.\d+.*$/ | - /^v\d+\.\d+\.\d+.*$/ | ||||
- master | - master | ||||
- next | |||||
# disable automatic builds | # disable automatic builds | ||||
build: false | build: false |
@@ -7,9 +7,9 @@ include!("src/cli.rs"); | |||||
fn main() { | fn main() { | ||||
// disabled below as it fails in CI | // disabled below as it fails in CI | ||||
// let mut app = build_cli(); | |||||
// app.gen_completions("gutenberg", Shell::Bash, "completions/"); | |||||
// app.gen_completions("gutenberg", Shell::Fish, "completions/"); | |||||
// app.gen_completions("gutenberg", Shell::Zsh, "completions/"); | |||||
// app.gen_completions("gutenberg", Shell::PowerShell, "completions/"); | |||||
// let mut app = build_cli(); | |||||
// app.gen_completions("zola", Shell::Bash, "completions/"); | |||||
// app.gen_completions("zola", Shell::Fish, "completions/"); | |||||
// app.gen_completions("zola", Shell::Zsh, "completions/"); | |||||
// app.gen_completions("zola", Shell::PowerShell, "completions/"); | |||||
} | } |
@@ -10,8 +10,7 @@ Set-Location $STAGE | |||||
$ZIP = "$SRC_DIR\$($Env:CRATE_NAME)-$($Env:APPVEYOR_REPO_TAG_NAME)-$($Env:TARGET).zip" | $ZIP = "$SRC_DIR\$($Env:CRATE_NAME)-$($Env:APPVEYOR_REPO_TAG_NAME)-$($Env:TARGET).zip" | ||||
# TODO Update this to package the right artifacts | |||||
Copy-Item "$SRC_DIR\target\$($Env:TARGET)\release\gutenberg.exe" '.\' | |||||
Copy-Item "$SRC_DIR\target\$($Env:TARGET)\release\zola.exe" '.\' | |||||
7z a "$ZIP" * | 7z a "$ZIP" * | ||||
@@ -17,11 +17,9 @@ main() { | |||||
test -f Cargo.lock || cargo generate-lockfile | test -f Cargo.lock || cargo generate-lockfile | ||||
# TODO Update this to build the artifacts that matter to you | |||||
cross rustc --bin gutenberg --target $TARGET --release -- -C lto | |||||
cross rustc --bin zola --target $TARGET --release -- -C lto | |||||
# TODO Update this to package the right artifacts | |||||
cp target/$TARGET/release/gutenberg $stage/ | |||||
cp target/$TARGET/release/zola $stage/ | |||||
cd $stage | cd $stage | ||||
tar czf $src/$CRATE_NAME-$TRAVIS_TAG-$TARGET.tar.gz * | tar czf $src/$CRATE_NAME-$TRAVIS_TAG-$TARGET.tar.gz * | ||||
@@ -4,13 +4,11 @@ set -ex | |||||
# TODO This is the "test phase", tweak it as you see fit | # TODO This is the "test phase", tweak it as you see fit | ||||
main() { | main() { | ||||
cross build --target $TARGET --release | |||||
if [ ! -z $DISABLE_TESTS ]; then | if [ ! -z $DISABLE_TESTS ]; then | ||||
return | return | ||||
fi | fi | ||||
cross test --all --target $TARGET --release | |||||
cross test --all --target $TARGET | |||||
} | } | ||||
# we don't run the "test phase" when doing deploys | # we don't run the "test phase" when doing deploys | ||||
@@ -1,8 +1,8 @@ | |||||
#compdef gutenberg | |||||
#compdef zola | |||||
autoload -U is-at-least | autoload -U is-at-least | ||||
_gutenberg() { | |||||
_zola() { | |||||
typeset -A opt_args | typeset -A opt_args | ||||
typeset -a _arguments_options | typeset -a _arguments_options | ||||
local ret=1 | local ret=1 | ||||
@@ -21,14 +21,14 @@ _gutenberg() { | |||||
'--help[Prints help information]' \ | '--help[Prints help information]' \ | ||||
'-V[Prints version information]' \ | '-V[Prints version information]' \ | ||||
'--version[Prints version information]' \ | '--version[Prints version information]' \ | ||||
":: :_gutenberg_commands" \ | |||||
"*::: :->gutenberg" \ | |||||
":: :_zola_commands" \ | |||||
"*::: :->zola" \ | |||||
&& ret=0 | && ret=0 | ||||
case $state in | case $state in | ||||
(gutenberg) | |||||
(zola) | |||||
words=($line[1] "${words[@]}") | words=($line[1] "${words[@]}") | ||||
(( CURRENT += 1 )) | (( CURRENT += 1 )) | ||||
curcontext="${curcontext%:*:*}:gutenberg-command-$line[1]:" | |||||
curcontext="${curcontext%:*:*}:zola-command-$line[1]:" | |||||
case $line[1] in | case $line[1] in | ||||
(init) | (init) | ||||
_arguments "${_arguments_options[@]}" \ | _arguments "${_arguments_options[@]}" \ | ||||
@@ -61,6 +61,7 @@ _arguments "${_arguments_options[@]}" \ | |||||
'--output-dir=[Outputs the generated site in the given path]' \ | '--output-dir=[Outputs the generated site in the given path]' \ | ||||
'-u+[Changes the base_url]' \ | '-u+[Changes the base_url]' \ | ||||
'--base-url=[Changes the base_url]' \ | '--base-url=[Changes the base_url]' \ | ||||
'--watch-only[Do not start a server, just re-build project on changes]' \ | |||||
'-h[Prints help information]' \ | '-h[Prints help information]' \ | ||||
'--help[Prints help information]' \ | '--help[Prints help information]' \ | ||||
'-V[Prints version information]' \ | '-V[Prints version information]' \ | ||||
@@ -80,43 +81,43 @@ _arguments "${_arguments_options[@]}" \ | |||||
esac | esac | ||||
} | } | ||||
(( $+functions[_gutenberg_commands] )) || | |||||
_gutenberg_commands() { | |||||
(( $+functions[_zola_commands] )) || | |||||
_zola_commands() { | |||||
local commands; commands=( | local commands; commands=( | ||||
"init:Create a new Gutenberg project" \ | |||||
"init:Create a new Zola project" \ | |||||
"build:Builds the site" \ | "build:Builds the site" \ | ||||
"serve:Serve the site. Rebuild and reload on change automatically" \ | "serve:Serve the site. Rebuild and reload on change automatically" \ | ||||
"help:Prints this message or the help of the given subcommand(s)" \ | "help:Prints this message or the help of the given subcommand(s)" \ | ||||
) | ) | ||||
_describe -t commands 'gutenberg commands' commands "$@" | |||||
_describe -t commands 'zola commands' commands "$@" | |||||
} | } | ||||
(( $+functions[_gutenberg__build_commands] )) || | |||||
_gutenberg__build_commands() { | |||||
(( $+functions[_zola__build_commands] )) || | |||||
_zola__build_commands() { | |||||
local commands; commands=( | local commands; commands=( | ||||
) | ) | ||||
_describe -t commands 'gutenberg build commands' commands "$@" | |||||
_describe -t commands 'zola build commands' commands "$@" | |||||
} | } | ||||
(( $+functions[_gutenberg__help_commands] )) || | |||||
_gutenberg__help_commands() { | |||||
(( $+functions[_zola__help_commands] )) || | |||||
_zola__help_commands() { | |||||
local commands; commands=( | local commands; commands=( | ||||
) | ) | ||||
_describe -t commands 'gutenberg help commands' commands "$@" | |||||
_describe -t commands 'zola help commands' commands "$@" | |||||
} | } | ||||
(( $+functions[_gutenberg__init_commands] )) || | |||||
_gutenberg__init_commands() { | |||||
(( $+functions[_zola__init_commands] )) || | |||||
_zola__init_commands() { | |||||
local commands; commands=( | local commands; commands=( | ||||
) | ) | ||||
_describe -t commands 'gutenberg init commands' commands "$@" | |||||
_describe -t commands 'zola init commands' commands "$@" | |||||
} | } | ||||
(( $+functions[_gutenberg__serve_commands] )) || | |||||
_gutenberg__serve_commands() { | |||||
(( $+functions[_zola__serve_commands] )) || | |||||
_zola__serve_commands() { | |||||
local commands; commands=( | local commands; commands=( | ||||
) | ) | ||||
_describe -t commands 'gutenberg serve commands' commands "$@" | |||||
_describe -t commands 'zola serve commands' commands "$@" | |||||
} | } | ||||
_gutenberg "$@" | |||||
_zola "$@" |
@@ -2,12 +2,12 @@ | |||||
using namespace System.Management.Automation | using namespace System.Management.Automation | ||||
using namespace System.Management.Automation.Language | using namespace System.Management.Automation.Language | ||||
Register-ArgumentCompleter -Native -CommandName 'gutenberg' -ScriptBlock { | |||||
Register-ArgumentCompleter -Native -CommandName 'zola' -ScriptBlock { | |||||
param($wordToComplete, $commandAst, $cursorPosition) | param($wordToComplete, $commandAst, $cursorPosition) | ||||
$commandElements = $commandAst.CommandElements | $commandElements = $commandAst.CommandElements | ||||
$command = @( | $command = @( | ||||
'gutenberg' | |||||
'zola' | |||||
for ($i = 1; $i -lt $commandElements.Count; $i++) { | for ($i = 1; $i -lt $commandElements.Count; $i++) { | ||||
$element = $commandElements[$i] | $element = $commandElements[$i] | ||||
if ($element -isnot [StringConstantExpressionAst] -or | if ($element -isnot [StringConstantExpressionAst] -or | ||||
@@ -19,27 +19,27 @@ Register-ArgumentCompleter -Native -CommandName 'gutenberg' -ScriptBlock { | |||||
}) -join ';' | }) -join ';' | ||||
$completions = @(switch ($command) { | $completions = @(switch ($command) { | ||||
'gutenberg' { | |||||
'zola' { | |||||
[CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'Path to a config file other than config.toml') | [CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'Path to a config file other than config.toml') | ||||
[CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'Path to a config file other than config.toml') | [CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'Path to a config file other than config.toml') | ||||
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
[CompletionResult]::new('init', 'init', [CompletionResultType]::ParameterValue, 'Create a new Gutenberg project') | |||||
[CompletionResult]::new('init', 'init', [CompletionResultType]::ParameterValue, 'Create a new Zola project') | |||||
[CompletionResult]::new('build', 'build', [CompletionResultType]::ParameterValue, 'Builds the site') | [CompletionResult]::new('build', 'build', [CompletionResultType]::ParameterValue, 'Builds the site') | ||||
[CompletionResult]::new('serve', 'serve', [CompletionResultType]::ParameterValue, 'Serve the site. Rebuild and reload on change automatically') | [CompletionResult]::new('serve', 'serve', [CompletionResultType]::ParameterValue, 'Serve the site. Rebuild and reload on change automatically') | ||||
[CompletionResult]::new('help', 'help', [CompletionResultType]::ParameterValue, 'Prints this message or the help of the given subcommand(s)') | [CompletionResult]::new('help', 'help', [CompletionResultType]::ParameterValue, 'Prints this message or the help of the given subcommand(s)') | ||||
break | break | ||||
} | } | ||||
'gutenberg;init' { | |||||
'zola;init' { | |||||
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
break | break | ||||
} | } | ||||
'gutenberg;build' { | |||||
'zola;build' { | |||||
[CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'Force the base URL to be that value (default to the one in config.toml)') | [CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'Force the base URL to be that value (default to the one in config.toml)') | ||||
[CompletionResult]::new('--base-url', 'base-url', [CompletionResultType]::ParameterName, 'Force the base URL to be that value (default to the one in config.toml)') | [CompletionResult]::new('--base-url', 'base-url', [CompletionResultType]::ParameterName, 'Force the base URL to be that value (default to the one in config.toml)') | ||||
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path') | [CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path') | ||||
@@ -50,7 +50,7 @@ Register-ArgumentCompleter -Native -CommandName 'gutenberg' -ScriptBlock { | |||||
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
break | break | ||||
} | } | ||||
'gutenberg;serve' { | |||||
'zola;serve' { | |||||
[CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Interface to bind on') | [CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Interface to bind on') | ||||
[CompletionResult]::new('--interface', 'interface', [CompletionResultType]::ParameterName, 'Interface to bind on') | [CompletionResult]::new('--interface', 'interface', [CompletionResultType]::ParameterName, 'Interface to bind on') | ||||
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Which port to use') | [CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Which port to use') | ||||
@@ -59,13 +59,14 @@ Register-ArgumentCompleter -Native -CommandName 'gutenberg' -ScriptBlock { | |||||
[CompletionResult]::new('--output-dir', 'output-dir', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path') | [CompletionResult]::new('--output-dir', 'output-dir', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path') | ||||
[CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'Changes the base_url') | [CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'Changes the base_url') | ||||
[CompletionResult]::new('--base-url', 'base-url', [CompletionResultType]::ParameterName, 'Changes the base_url') | [CompletionResult]::new('--base-url', 'base-url', [CompletionResultType]::ParameterName, 'Changes the base_url') | ||||
[CompletionResult]::new('--watch-only', 'watch-only', [CompletionResultType]::ParameterName, 'Do not start a server, just re-build project on changes') | |||||
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | ||||
break | break | ||||
} | } | ||||
'gutenberg;help' { | |||||
'zola;help' { | |||||
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | ||||
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | [CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') |
@@ -1,137 +0,0 @@ | |||||
_gutenberg() { | |||||
local i cur prev opts cmds | |||||
COMPREPLY=() | |||||
cur="${COMP_WORDS[COMP_CWORD]}" | |||||
prev="${COMP_WORDS[COMP_CWORD-1]}" | |||||
cmd="" | |||||
opts="" | |||||
for i in ${COMP_WORDS[@]} | |||||
do | |||||
case "${i}" in | |||||
gutenberg) | |||||
cmd="gutenberg" | |||||
;; | |||||
build) | |||||
cmd+="__build" | |||||
;; | |||||
help) | |||||
cmd+="__help" | |||||
;; | |||||
init) | |||||
cmd+="__init" | |||||
;; | |||||
serve) | |||||
cmd+="__serve" | |||||
;; | |||||
*) | |||||
;; | |||||
esac | |||||
done | |||||
case "${cmd}" in | |||||
gutenberg) | |||||
opts=" -c -h -V --config --help --version init build serve help" | |||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
fi | |||||
case "${prev}" in | |||||
*) | |||||
COMPREPLY=() | |||||
;; | |||||
esac | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
;; | |||||
gutenberg__build) | |||||
opts=" -h -V -u --help --version --base-url " | |||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
fi | |||||
case "${prev}" in | |||||
--base-url) | |||||
COMPREPLY=("<base_url>") | |||||
return 0 | |||||
;; | |||||
-u) | |||||
COMPREPLY=("<base_url>") | |||||
return 0 | |||||
;; | |||||
*) | |||||
COMPREPLY=() | |||||
;; | |||||
esac | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
;; | |||||
gutenberg__help) | |||||
opts=" -h -V --help --version " | |||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
fi | |||||
case "${prev}" in | |||||
*) | |||||
COMPREPLY=() | |||||
;; | |||||
esac | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
;; | |||||
gutenberg__init) | |||||
opts=" -h -V --help --version <name> " | |||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
fi | |||||
case "${prev}" in | |||||
*) | |||||
COMPREPLY=() | |||||
;; | |||||
esac | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
;; | |||||
gutenberg__serve) | |||||
opts=" -h -V -i -p --help --version --interface --port " | |||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
fi | |||||
case "${prev}" in | |||||
--interface) | |||||
COMPREPLY=("<interface>") | |||||
return 0 | |||||
;; | |||||
-i) | |||||
COMPREPLY=("<interface>") | |||||
return 0 | |||||
;; | |||||
--port) | |||||
COMPREPLY=("<port>") | |||||
return 0 | |||||
;; | |||||
-p) | |||||
COMPREPLY=("<port>") | |||||
return 0 | |||||
;; | |||||
*) | |||||
COMPREPLY=() | |||||
;; | |||||
esac | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | |||||
return 0 | |||||
;; | |||||
esac | |||||
} | |||||
complete -F _gutenberg -o bashdefault -o default gutenberg |
@@ -1,34 +0,0 @@ | |||||
function __fish_using_command | |||||
set cmd (commandline -opc) | |||||
if [ (count $cmd) -eq (count $argv) ] | |||||
for i in (seq (count $argv)) | |||||
if [ $cmd[$i] != $argv[$i] ] | |||||
return 1 | |||||
end | |||||
end | |||||
return 0 | |||||
end | |||||
return 1 | |||||
end | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -s c -l config -d 'Path to a config file other than config.toml' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -s h -l help -d 'Prints help information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -s V -l version -d 'Prints version information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -f -a "init" -d 'Create a new Gutenberg project' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -f -a "build" -d 'Builds the site' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -f -a "serve" -d 'Serve the site. Rebuild and reload on change automatically' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg" -f -a "help" -d 'Prints this message or the help of the given subcommand(s)' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg init" -s h -l help -d 'Prints help information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg init" -s V -l version -d 'Prints version information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg build" -s u -l base-url -d 'Force the base URL to be that value (default to the one in config.toml)' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg build" -s o -l output-dir -d 'Outputs the generated site in the given path' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg build" -s h -l help -d 'Prints help information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg build" -s V -l version -d 'Prints version information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg serve" -s i -l interface -d 'Interface to bind on' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg serve" -s p -l port -d 'Which port to use' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg serve" -s o -l output-dir -d 'Outputs the generated site in the given path' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg serve" -s u -l base-url -d 'Changes the base_url' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg serve" -s h -l help -d 'Prints help information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg serve" -s V -l version -d 'Prints version information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg help" -s h -l help -d 'Prints help information' | |||||
complete -c gutenberg -n "__fish_using_command gutenberg help" -s V -l version -d 'Prints version information' |
@@ -1,4 +1,4 @@ | |||||
_gutenberg() { | |||||
_zola() { | |||||
local i cur prev opts cmds | local i cur prev opts cmds | ||||
COMPREPLY=() | COMPREPLY=() | ||||
cur="${COMP_WORDS[COMP_CWORD]}" | cur="${COMP_WORDS[COMP_CWORD]}" | ||||
@@ -9,8 +9,8 @@ _gutenberg() { | |||||
for i in ${COMP_WORDS[@]} | for i in ${COMP_WORDS[@]} | ||||
do | do | ||||
case "${i}" in | case "${i}" in | ||||
gutenberg) | |||||
cmd="gutenberg" | |||||
zola) | |||||
cmd="zola" | |||||
;; | ;; | ||||
build) | build) | ||||
@@ -31,7 +31,7 @@ _gutenberg() { | |||||
done | done | ||||
case "${cmd}" in | case "${cmd}" in | ||||
gutenberg) | |||||
zola) | |||||
opts=" -h -V -c --help --version --config init build serve help" | opts=" -h -V -c --help --version --config init build serve help" | ||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then | if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then | ||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
@@ -55,7 +55,7 @@ _gutenberg() { | |||||
return 0 | return 0 | ||||
;; | ;; | ||||
gutenberg__build) | |||||
zola__build) | |||||
opts=" -h -V -u -o --help --version --base-url --output-dir " | opts=" -h -V -u -o --help --version --base-url --output-dir " | ||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | ||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
@@ -86,7 +86,7 @@ _gutenberg() { | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
return 0 | return 0 | ||||
;; | ;; | ||||
gutenberg__help) | |||||
zola__help) | |||||
opts=" -h -V --help --version " | opts=" -h -V --help --version " | ||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | ||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
@@ -101,7 +101,7 @@ _gutenberg() { | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
return 0 | return 0 | ||||
;; | ;; | ||||
gutenberg__init) | |||||
zola__init) | |||||
opts=" -h -V --help --version <name> " | opts=" -h -V --help --version <name> " | ||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | ||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
@@ -116,8 +116,8 @@ _gutenberg() { | |||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
return 0 | return 0 | ||||
;; | ;; | ||||
gutenberg__serve) | |||||
opts=" -h -V -i -p -o -u --help --version --interface --port --output-dir --base-url " | |||||
zola__serve) | |||||
opts=" -h -V -i -p -o -u --watch-only --help --version --interface --port --output-dir --base-url " | |||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then | ||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) | ||||
return 0 | return 0 | ||||
@@ -166,4 +166,4 @@ _gutenberg() { | |||||
esac | esac | ||||
} | } | ||||
complete -F _gutenberg -o bashdefault -o default gutenberg | |||||
complete -F _zola -o bashdefault -o default zola |
@@ -0,0 +1,22 @@ | |||||
complete -c zola -n "__fish_use_subcommand" -s c -l config -d 'Path to a config file other than config.toml' | |||||
complete -c zola -n "__fish_use_subcommand" -s h -l help -d 'Prints help information' | |||||
complete -c zola -n "__fish_use_subcommand" -s V -l version -d 'Prints version information' | |||||
complete -c zola -n "__fish_use_subcommand" -f -a "init" -d 'Create a new Zola project' | |||||
complete -c zola -n "__fish_use_subcommand" -f -a "build" -d 'Builds the site' | |||||
complete -c zola -n "__fish_use_subcommand" -f -a "serve" -d 'Serve the site. Rebuild and reload on change automatically' | |||||
complete -c zola -n "__fish_use_subcommand" -f -a "help" -d 'Prints this message or the help of the given subcommand(s)' | |||||
complete -c zola -n "__fish_seen_subcommand_from init" -s h -l help -d 'Prints help information' | |||||
complete -c zola -n "__fish_seen_subcommand_from init" -s V -l version -d 'Prints version information' | |||||
complete -c zola -n "__fish_seen_subcommand_from build" -s u -l base-url -d 'Force the base URL to be that value (default to the one in config.toml)' | |||||
complete -c zola -n "__fish_seen_subcommand_from build" -s o -l output-dir -d 'Outputs the generated site in the given path' | |||||
complete -c zola -n "__fish_seen_subcommand_from build" -s h -l help -d 'Prints help information' | |||||
complete -c zola -n "__fish_seen_subcommand_from build" -s V -l version -d 'Prints version information' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -s i -l interface -d 'Interface to bind on' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -s p -l port -d 'Which port to use' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -s o -l output-dir -d 'Outputs the generated site in the given path' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -s u -l base-url -d 'Changes the base_url' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -l watch-only -d 'Do not start a server, just re-build project on changes' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -s h -l help -d 'Prints help information' | |||||
complete -c zola -n "__fish_seen_subcommand_from serve" -s V -l version -d 'Prints version information' | |||||
complete -c zola -n "__fish_seen_subcommand_from help" -s h -l help -d 'Prints help information' | |||||
complete -c zola -n "__fish_seen_subcommand_from help" -s V -l version -d 'Prints version information' |
@@ -9,6 +9,7 @@ serde = "1" | |||||
serde_derive = "1" | serde_derive = "1" | ||||
chrono = "0.4" | chrono = "0.4" | ||||
globset = "0.4" | globset = "0.4" | ||||
lazy_static = "1" | |||||
syntect = "3" | |||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
highlighting = { path = "../highlighting"} |
@@ -3,10 +3,10 @@ | |||||
//! Although it is a valid example for serializing syntaxes, you probably won't need | //! Although it is a valid example for serializing syntaxes, you probably won't need | ||||
//! to do this yourself unless you want to cache your own compiled grammars. | //! to do this yourself unless you want to cache your own compiled grammars. | ||||
extern crate syntect; | extern crate syntect; | ||||
use syntect::parsing::SyntaxSet; | |||||
use syntect::highlighting::ThemeSet; | |||||
use syntect::dumps::*; | |||||
use std::env; | use std::env; | ||||
use syntect::dumps::*; | |||||
use syntect::highlighting::ThemeSet; | |||||
use syntect::parsing::SyntaxSetBuilder; | |||||
fn usage_and_exit() -> ! { | fn usage_and_exit() -> ! { | ||||
println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n | println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n | ||||
@@ -14,24 +14,25 @@ fn usage_and_exit() -> ! { | |||||
::std::process::exit(2); | ::std::process::exit(2); | ||||
} | } | ||||
// Not an example of Gutenberg but is used to generate the theme and syntax dump | |||||
// Not an example of zola but is used to generate the theme and syntax dump | |||||
// used for syntax highlighting. | // used for syntax highlighting. | ||||
// Check README for more details | // Check README for more details | ||||
fn main() { | fn main() { | ||||
let mut args = env::args().skip(1); | let mut args = env::args().skip(1); | ||||
match (args.next(), args.next(), args.next()) { | match (args.next(), args.next(), args.next()) { | ||||
(Some(ref cmd), Some(ref package_dir), Some(ref packpath_newlines)) if cmd == "synpack" => { | (Some(ref cmd), Some(ref package_dir), Some(ref packpath_newlines)) if cmd == "synpack" => { | ||||
let mut ps = SyntaxSet::new(); | |||||
ps.load_plain_text_syntax(); | |||||
ps.load_syntaxes(package_dir, true).unwrap(); | |||||
dump_to_file(&ps, packpath_newlines).unwrap(); | |||||
let mut builder = SyntaxSetBuilder::new(); | |||||
builder.add_plain_text_syntax(); | |||||
builder.add_from_folder(package_dir, true).unwrap(); | |||||
let ss = builder.build(); | |||||
dump_to_file(&ss, packpath_newlines).unwrap(); | |||||
for s in ps.syntaxes() { | |||||
for s in ss.syntaxes() { | |||||
if !s.file_extensions.is_empty() { | if !s.file_extensions.is_empty() { | ||||
println!("- {} -> {:?}", s.name, s.file_extensions); | println!("- {} -> {:?}", s.name, s.file_extensions); | ||||
} | } | ||||
} | } | ||||
}, | |||||
} | |||||
(Some(ref cmd), Some(ref theme_dir), Some(ref packpath)) if cmd == "themepack" => { | (Some(ref cmd), Some(ref theme_dir), Some(ref packpath)) if cmd == "themepack" => { | ||||
let ts = ThemeSet::load_from_folder(theme_dir).unwrap(); | let ts = ThemeSet::load_from_folder(theme_dir).unwrap(); | ||||
for path in ts.themes.keys() { | for path in ts.themes.keys() { |
@@ -0,0 +1,437 @@ | |||||
use std::collections::HashMap; | |||||
use std::fs::File; | |||||
use std::io::prelude::*; | |||||
use std::path::{Path, PathBuf}; | |||||
use chrono::Utc; | |||||
use globset::{Glob, GlobSet, GlobSetBuilder}; | |||||
use syntect::parsing::{SyntaxSet, SyntaxSetBuilder}; | |||||
use toml; | |||||
use toml::Value as Toml; | |||||
use errors::{Result, ResultExt}; | |||||
use highlighting::THEME_SET; | |||||
use theme::Theme; | |||||
// We want a default base url for tests | |||||
static DEFAULT_BASE_URL: &'static str = "http://a-website.com"; | |||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] | |||||
#[serde(default)] | |||||
pub struct Taxonomy { | |||||
/// The name used in the URL, usually the plural | |||||
pub name: String, | |||||
/// If this is set, the list of individual taxonomy term page will be paginated | |||||
/// by this much | |||||
pub paginate_by: Option<usize>, | |||||
pub paginate_path: Option<String>, | |||||
/// Whether to generate a RSS feed only for each taxonomy term, defaults to false | |||||
pub rss: bool, | |||||
} | |||||
impl Taxonomy { | |||||
pub fn is_paginated(&self) -> bool { | |||||
if let Some(paginate_by) = self.paginate_by { | |||||
paginate_by > 0 | |||||
} else { | |||||
false | |||||
} | |||||
} | |||||
} | |||||
impl Default for Taxonomy { | |||||
fn default() -> Taxonomy { | |||||
Taxonomy { name: String::new(), paginate_by: None, paginate_path: None, rss: false } | |||||
} | |||||
} | |||||
#[derive(Clone, Debug, Serialize, Deserialize)] | |||||
#[serde(default)] | |||||
pub struct Config { | |||||
/// Base URL of the site, the only required config argument | |||||
pub base_url: String, | |||||
/// Theme to use | |||||
pub theme: Option<String>, | |||||
/// Title of the site. Defaults to None | |||||
pub title: Option<String>, | |||||
/// Description of the site | |||||
pub description: Option<String>, | |||||
/// The language used in the site. Defaults to "en" | |||||
pub default_language: String, | |||||
/// Languages list and translated strings | |||||
pub translations: HashMap<String, Toml>, | |||||
/// Whether to highlight all code blocks found in markdown files. Defaults to false | |||||
pub highlight_code: bool, | |||||
/// Which themes to use for code highlighting. See Readme for supported themes | |||||
/// Defaults to "base16-ocean-dark" | |||||
pub highlight_theme: String, | |||||
/// Whether to generate RSS. Defaults to false | |||||
pub generate_rss: bool, | |||||
/// The number of articles to include in the RSS feed. Defaults to including all items. | |||||
pub rss_limit: Option<usize>, | |||||
pub taxonomies: Vec<Taxonomy>, | |||||
/// Whether to compile the `sass` directory and output the css files into the static folder | |||||
pub compile_sass: bool, | |||||
/// Whether to build the search index for the content | |||||
pub build_search_index: bool, | |||||
/// A list of file glob patterns to ignore when processing the content folder. Defaults to none. | |||||
/// Had to remove the PartialEq derive because GlobSet does not implement it. No impact | |||||
/// because it's unused anyway (who wants to sort Configs?). | |||||
pub ignored_content: Vec<String>, | |||||
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are needed | |||||
pub ignored_content_globset: Option<GlobSet>, | |||||
/// Whether to check all external links for validity | |||||
pub check_external_links: bool, | |||||
/// A list of directories to search for additional `.sublime-syntax` files in. | |||||
pub extra_syntaxes: Vec<String>, | |||||
/// The compiled extra syntaxes into a syntax set | |||||
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are need | |||||
pub extra_syntax_set: Option<SyntaxSet>, | |||||
/// All user params set in [extra] in the config | |||||
pub extra: HashMap<String, Toml>, | |||||
/// Set automatically when instantiating the config. Used for cachebusting | |||||
pub build_timestamp: Option<i64>, | |||||
} | |||||
impl Config { | |||||
/// Parses a string containing TOML to our Config struct | |||||
/// Any extra parameter will end up in the extra field | |||||
pub fn parse(content: &str) -> Result<Config> { | |||||
let mut config: Config = match toml::from_str(content) { | |||||
Ok(c) => c, | |||||
Err(e) => bail!(e), | |||||
}; | |||||
if config.base_url.is_empty() || config.base_url == DEFAULT_BASE_URL { | |||||
bail!("A base URL is required in config.toml with key `base_url`"); | |||||
} | |||||
if !THEME_SET.themes.contains_key(&config.highlight_theme) { | |||||
bail!("Highlight theme {} not available", config.highlight_theme) | |||||
} | |||||
config.build_timestamp = Some(Utc::now().timestamp()); | |||||
if !config.ignored_content.is_empty() { | |||||
// Convert the file glob strings into a compiled glob set matcher. We want to do this once, | |||||
// at program initialization, rather than for every page, for example. We arrange for the | |||||
// globset matcher to always exist (even though it has to be an inside an Option at the | |||||
// moment because of the TOML serializer); if the glob set is empty the `is_match` function | |||||
// of the globber always returns false. | |||||
let mut glob_set_builder = GlobSetBuilder::new(); | |||||
for pat in &config.ignored_content { | |||||
let glob = match Glob::new(pat) { | |||||
Ok(g) => g, | |||||
Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e), | |||||
}; | |||||
glob_set_builder.add(glob); | |||||
} | |||||
config.ignored_content_globset = | |||||
Some(glob_set_builder.build().expect("Bad ignored_content in config file.")); | |||||
} | |||||
Ok(config) | |||||
} | |||||
/// Parses a config file from the given path | |||||
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> { | |||||
let mut content = String::new(); | |||||
let path = path.as_ref(); | |||||
let file_name = path.file_name().unwrap(); | |||||
File::open(path) | |||||
.chain_err(|| { | |||||
format!("No `{:?}` file found. Are you in the right directory?", file_name) | |||||
})? | |||||
.read_to_string(&mut content)?; | |||||
Config::parse(&content) | |||||
} | |||||
/// Attempt to load any extra syntax found in the extra syntaxes of the config | |||||
pub fn load_extra_syntaxes(&mut self, base_path: &Path) -> Result<()> { | |||||
if self.extra_syntaxes.is_empty() { | |||||
return Ok(()); | |||||
} | |||||
let mut ss = SyntaxSetBuilder::new(); | |||||
for dir in &self.extra_syntaxes { | |||||
ss.add_from_folder(base_path.join(dir), true)?; | |||||
} | |||||
self.extra_syntax_set = Some(ss.build()); | |||||
Ok(()) | |||||
} | |||||
/// Makes a url, taking into account that the base url might have a trailing slash | |||||
pub fn make_permalink(&self, path: &str) -> String { | |||||
let trailing_bit = if path.ends_with('/') || path.ends_with("rss.xml") || path.is_empty() { | |||||
"" | |||||
} else { | |||||
"/" | |||||
}; | |||||
// Index section with a base url that has a trailing slash | |||||
if self.base_url.ends_with('/') && path == "/" { | |||||
self.base_url.clone() | |||||
} else if path == "/" { | |||||
// index section with a base url that doesn't have a trailing slash | |||||
format!("{}/", self.base_url) | |||||
} else if self.base_url.ends_with('/') && path.starts_with('/') { | |||||
format!("{}{}{}", self.base_url, &path[1..], trailing_bit) | |||||
} else if self.base_url.ends_with('/') || path.starts_with('/') { | |||||
format!("{}{}{}", self.base_url, path, trailing_bit) | |||||
} else { | |||||
format!("{}/{}{}", self.base_url, path, trailing_bit) | |||||
} | |||||
} | |||||
/// Merges the extra data from the theme with the config extra data | |||||
fn add_theme_extra(&mut self, theme: &Theme) -> Result<()> { | |||||
// 3 pass merging | |||||
// 1. save config to preserve user | |||||
let original = self.extra.clone(); | |||||
// 2. inject theme extra values | |||||
for (key, val) in &theme.extra { | |||||
self.extra.entry(key.to_string()).or_insert_with(|| val.clone()); | |||||
} | |||||
// 3. overwrite with original config | |||||
for (key, val) in &original { | |||||
self.extra.entry(key.to_string()).or_insert_with(|| val.clone()); | |||||
} | |||||
Ok(()) | |||||
} | |||||
/// Parse the theme.toml file and merges the extra data from the theme | |||||
/// with the config extra data | |||||
pub fn merge_with_theme(&mut self, path: &PathBuf) -> Result<()> { | |||||
let theme = Theme::from_file(path)?; | |||||
self.add_theme_extra(&theme) | |||||
} | |||||
} | |||||
impl Default for Config { | |||||
fn default() -> Config { | |||||
Config { | |||||
base_url: DEFAULT_BASE_URL.to_string(), | |||||
title: None, | |||||
description: None, | |||||
theme: None, | |||||
highlight_code: false, | |||||
highlight_theme: "base16-ocean-dark".to_string(), | |||||
default_language: "en".to_string(), | |||||
generate_rss: false, | |||||
rss_limit: None, | |||||
taxonomies: Vec::new(), | |||||
compile_sass: false, | |||||
check_external_links: false, | |||||
build_search_index: false, | |||||
ignored_content: Vec::new(), | |||||
ignored_content_globset: None, | |||||
translations: HashMap::new(), | |||||
extra_syntaxes: Vec::new(), | |||||
extra_syntax_set: None, | |||||
extra: HashMap::new(), | |||||
build_timestamp: Some(1), | |||||
} | |||||
} | |||||
} | |||||
#[cfg(test)] | |||||
mod tests { | |||||
use super::{Config, Theme}; | |||||
#[test] | |||||
fn can_import_valid_config() { | |||||
let config = r#" | |||||
title = "My site" | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
"#; | |||||
let config = Config::parse(config).unwrap(); | |||||
assert_eq!(config.title.unwrap(), "My site".to_string()); | |||||
} | |||||
#[test] | |||||
fn errors_when_invalid_type() { | |||||
let config = r#" | |||||
title = 1 | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_err()); | |||||
} | |||||
#[test] | |||||
fn errors_when_missing_required_field() { | |||||
// base_url is required | |||||
let config = r#" | |||||
title = "" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_err()); | |||||
} | |||||
#[test] | |||||
fn can_add_extra_values() { | |||||
let config = r#" | |||||
title = "My site" | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
[extra] | |||||
hello = "world" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_ok()); | |||||
assert_eq!(config.unwrap().extra.get("hello").unwrap().as_str().unwrap(), "world"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_index_page_with_non_trailing_slash_url() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is".to_string(); | |||||
assert_eq!(config.make_permalink(""), "http://vincent.is/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_index_page_with_railing_slash_url() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is/".to_string(); | |||||
assert_eq!(config.make_permalink(""), "http://vincent.is/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_with_non_trailing_slash_base_url() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is".to_string(); | |||||
assert_eq!(config.make_permalink("hello"), "http://vincent.is/hello/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_with_trailing_slash_path() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is/".to_string(); | |||||
assert_eq!(config.make_permalink("/hello"), "http://vincent.is/hello/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_with_localhost() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://127.0.0.1:1111".to_string(); | |||||
assert_eq!(config.make_permalink("/tags/rust"), "http://127.0.0.1:1111/tags/rust/"); | |||||
} | |||||
// https://github.com/Keats/gutenberg/issues/486 | |||||
#[test] | |||||
fn doesnt_add_trailing_slash_to_rss() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is/".to_string(); | |||||
assert_eq!(config.make_permalink("rss.xml"), "http://vincent.is/rss.xml"); | |||||
} | |||||
#[test] | |||||
fn can_merge_with_theme_data_and_preserve_config_value() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
[extra] | |||||
hello = "world" | |||||
"#; | |||||
let mut config = Config::parse(config_str).unwrap(); | |||||
let theme_str = r#" | |||||
[extra] | |||||
hello = "foo" | |||||
a_value = 10 | |||||
"#; | |||||
let theme = Theme::parse(theme_str).unwrap(); | |||||
assert!(config.add_theme_extra(&theme).is_ok()); | |||||
let extra = config.extra; | |||||
assert_eq!(extra["hello"].as_str().unwrap(), "world".to_string()); | |||||
assert_eq!(extra["a_value"].as_integer().unwrap(), 10); | |||||
} | |||||
#[test] | |||||
fn can_use_language_configuration() { | |||||
let config = r#" | |||||
base_url = "https://remplace-par-ton-url.fr" | |||||
default_language = "fr" | |||||
[translations] | |||||
[translations.fr] | |||||
title = "Un titre" | |||||
[translations.en] | |||||
title = "A title" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_ok()); | |||||
let translations = config.unwrap().translations; | |||||
assert_eq!(translations["fr"]["title"].as_str().unwrap(), "Un titre"); | |||||
assert_eq!(translations["en"]["title"].as_str().unwrap(), "A title"); | |||||
} | |||||
#[test] | |||||
fn missing_ignored_content_results_in_empty_vector_and_empty_globset() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "example.com" | |||||
"#; | |||||
let config = Config::parse(config_str).unwrap(); | |||||
let v = config.ignored_content; | |||||
assert_eq!(v.len(), 0); | |||||
assert!(config.ignored_content_globset.is_none()); | |||||
} | |||||
#[test] | |||||
fn empty_ignored_content_results_in_empty_vector_and_empty_globset() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "example.com" | |||||
ignored_content = [] | |||||
"#; | |||||
let config = Config::parse(config_str).unwrap(); | |||||
assert_eq!(config.ignored_content.len(), 0); | |||||
assert!(config.ignored_content_globset.is_none()); | |||||
} | |||||
#[test] | |||||
fn non_empty_ignored_content_results_in_vector_of_patterns_and_configured_globset() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "example.com" | |||||
ignored_content = ["*.{graphml,iso}", "*.py?"] | |||||
"#; | |||||
let config = Config::parse(config_str).unwrap(); | |||||
let v = config.ignored_content; | |||||
assert_eq!(v, vec!["*.{graphml,iso}", "*.py?"]); | |||||
let g = config.ignored_content_globset.unwrap(); | |||||
assert_eq!(g.len(), 2); | |||||
assert!(g.is_match("foo.graphml")); | |||||
assert!(g.is_match("foo.iso")); | |||||
assert!(!g.is_match("foo.png")); | |||||
assert!(g.is_match("foo.py2")); | |||||
assert!(g.is_match("foo.py3")); | |||||
assert!(!g.is_match("foo.py")); | |||||
} | |||||
} |
@@ -0,0 +1,42 @@ | |||||
use syntect::dumps::from_binary; | |||||
use syntect::easy::HighlightLines; | |||||
use syntect::highlighting::ThemeSet; | |||||
use syntect::parsing::SyntaxSet; | |||||
use Config; | |||||
lazy_static! { | |||||
pub static ref SYNTAX_SET: SyntaxSet = { | |||||
let ss: SyntaxSet = | |||||
from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump")); | |||||
ss | |||||
}; | |||||
pub static ref THEME_SET: ThemeSet = | |||||
from_binary(include_bytes!("../../../sublime_themes/all.themedump")); | |||||
} | |||||
/// Returns the highlighter and whether it was found in the extra or not | |||||
pub fn get_highlighter<'a>(info: &str, config: &Config) -> (HighlightLines<'a>, bool) { | |||||
let theme = &THEME_SET.themes[&config.highlight_theme]; | |||||
let mut in_extra = false; | |||||
if let Some(ref lang) = info.split(' ').next() { | |||||
let syntax = SYNTAX_SET | |||||
.find_syntax_by_token(lang) | |||||
.or_else(|| { | |||||
if let Some(ref extra) = config.extra_syntax_set { | |||||
let s = extra.find_syntax_by_token(lang); | |||||
if s.is_some() { | |||||
in_extra = true; | |||||
} | |||||
s | |||||
} else { | |||||
None | |||||
} | |||||
}) | |||||
.unwrap_or_else(|| SYNTAX_SET.find_syntax_plain_text()); | |||||
(HighlightLines::new(syntax, theme), in_extra) | |||||
} else { | |||||
(HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme), false) | |||||
} | |||||
} |
@@ -5,260 +5,16 @@ extern crate toml; | |||||
extern crate errors; | extern crate errors; | ||||
extern crate chrono; | extern crate chrono; | ||||
extern crate globset; | extern crate globset; | ||||
extern crate highlighting; | |||||
use std::collections::HashMap; | |||||
use std::fs::File; | |||||
use std::io::prelude::*; | |||||
use std::path::{Path, PathBuf}; | |||||
use chrono::Utc; | |||||
use globset::{Glob, GlobSet, GlobSetBuilder}; | |||||
use toml::Value as Toml; | |||||
use errors::{Result, ResultExt}; | |||||
use highlighting::THEME_SET; | |||||
#[macro_use] | |||||
extern crate lazy_static; | |||||
extern crate syntect; | |||||
mod config; | |||||
pub mod highlighting; | |||||
mod theme; | mod theme; | ||||
pub use config::{Config, Taxonomy}; | |||||
use theme::Theme; | |||||
// We want a default base url for tests | |||||
static DEFAULT_BASE_URL: &'static str = "http://a-website.com"; | |||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] | |||||
#[serde(default)] | |||||
pub struct Taxonomy { | |||||
/// The name used in the URL, usually the plural | |||||
pub name: String, | |||||
/// If this is set, the list of individual taxonomy term page will be paginated | |||||
/// by this much | |||||
pub paginate_by: Option<usize>, | |||||
pub paginate_path: Option<String>, | |||||
/// Whether to generate a RSS feed only for each taxonomy term, defaults to false | |||||
pub rss: bool, | |||||
} | |||||
impl Taxonomy { | |||||
pub fn is_paginated(&self) -> bool { | |||||
if let Some(paginate_by) = self.paginate_by { | |||||
paginate_by > 0 | |||||
} else { | |||||
false | |||||
} | |||||
} | |||||
} | |||||
impl Default for Taxonomy { | |||||
fn default() -> Taxonomy { | |||||
Taxonomy { | |||||
name: String::new(), | |||||
paginate_by: None, | |||||
paginate_path: None, | |||||
rss: false, | |||||
} | |||||
} | |||||
} | |||||
#[derive(Clone, Debug, Serialize, Deserialize)] | |||||
#[serde(default)] | |||||
pub struct Config { | |||||
/// Base URL of the site, the only required config argument | |||||
pub base_url: String, | |||||
/// Theme to use | |||||
pub theme: Option<String>, | |||||
/// Title of the site. Defaults to None | |||||
pub title: Option<String>, | |||||
/// Description of the site | |||||
pub description: Option<String>, | |||||
/// The language used in the site. Defaults to "en" | |||||
pub default_language: String, | |||||
/// Languages list and translated strings | |||||
pub translations: HashMap<String, Toml>, | |||||
/// Whether to highlight all code blocks found in markdown files. Defaults to false | |||||
pub highlight_code: bool, | |||||
/// Which themes to use for code highlighting. See Readme for supported themes | |||||
/// Defaults to "base16-ocean-dark" | |||||
pub highlight_theme: String, | |||||
/// Whether to generate RSS. Defaults to false | |||||
pub generate_rss: bool, | |||||
/// The number of articles to include in the RSS feed. Defaults to 10_000 | |||||
pub rss_limit: usize, | |||||
pub taxonomies: Vec<Taxonomy>, | |||||
/// Whether to compile the `sass` directory and output the css files into the static folder | |||||
pub compile_sass: bool, | |||||
/// Whether to build the search index for the content | |||||
pub build_search_index: bool, | |||||
/// A list of file glob patterns to ignore when processing the content folder. Defaults to none. | |||||
/// Had to remove the PartialEq derive because GlobSet does not implement it. No impact | |||||
/// because it's unused anyway (who wants to sort Configs?). | |||||
pub ignored_content: Vec<String>, | |||||
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are needed | |||||
pub ignored_content_globset: Option<GlobSet>, | |||||
/// Whether to check all external links for validity | |||||
pub check_external_links: bool, | |||||
/// A list of directories to search for additional `.sublime-syntax` files in. | |||||
pub extra_syntaxes: Vec<String>, | |||||
/// All user params set in [extra] in the config | |||||
pub extra: HashMap<String, Toml>, | |||||
/// Set automatically when instantiating the config. Used for cachebusting | |||||
pub build_timestamp: Option<i64>, | |||||
} | |||||
impl Config { | |||||
/// Parses a string containing TOML to our Config struct | |||||
/// Any extra parameter will end up in the extra field | |||||
pub fn parse(content: &str) -> Result<Config> { | |||||
let mut config: Config = match toml::from_str(content) { | |||||
Ok(c) => c, | |||||
Err(e) => bail!(e), | |||||
}; | |||||
if config.base_url.is_empty() || config.base_url == DEFAULT_BASE_URL { | |||||
bail!("A base URL is required in config.toml with key `base_url`"); | |||||
} | |||||
if !THEME_SET.themes.contains_key(&config.highlight_theme) { | |||||
bail!("Highlight theme {} not available", config.highlight_theme) | |||||
} | |||||
config.build_timestamp = Some(Utc::now().timestamp()); | |||||
if !config.ignored_content.is_empty() { | |||||
// Convert the file glob strings into a compiled glob set matcher. We want to do this once, | |||||
// at program initialization, rather than for every page, for example. We arrange for the | |||||
// globset matcher to always exist (even though it has to be an inside an Option at the | |||||
// moment because of the TOML serializer); if the glob set is empty the `is_match` function | |||||
// of the globber always returns false. | |||||
let mut glob_set_builder = GlobSetBuilder::new(); | |||||
for pat in &config.ignored_content { | |||||
let glob = match Glob::new(pat) { | |||||
Ok(g) => g, | |||||
Err(e) => bail!( | |||||
"Invalid ignored_content glob pattern: {}, error = {}", | |||||
pat, | |||||
e | |||||
), | |||||
}; | |||||
glob_set_builder.add(glob); | |||||
} | |||||
config.ignored_content_globset = Some( | |||||
glob_set_builder | |||||
.build() | |||||
.expect("Bad ignored_content in config file."), | |||||
); | |||||
} | |||||
Ok(config) | |||||
} | |||||
/// Parses a config file from the given path | |||||
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> { | |||||
let mut content = String::new(); | |||||
let path = path.as_ref(); | |||||
let file_name = path.file_name().unwrap(); | |||||
File::open(path) | |||||
.chain_err(|| { | |||||
format!( | |||||
"No `{:?}` file found. Are you in the right directory?", | |||||
file_name | |||||
) | |||||
})? | |||||
.read_to_string(&mut content)?; | |||||
Config::parse(&content) | |||||
} | |||||
/// Makes a url, taking into account that the base url might have a trailing slash | |||||
pub fn make_permalink(&self, path: &str) -> String { | |||||
let trailing_bit = if path.ends_with('/') || path.is_empty() { | |||||
"" | |||||
} else { | |||||
"/" | |||||
}; | |||||
// Index section with a base url that has a trailing slash | |||||
if self.base_url.ends_with('/') && path == "/" { | |||||
self.base_url.clone() | |||||
} else if path == "/" { | |||||
// index section with a base url that doesn't have a trailing slash | |||||
format!("{}/", self.base_url) | |||||
} else if self.base_url.ends_with('/') && path.starts_with('/') { | |||||
format!("{}{}{}", self.base_url, &path[1..], trailing_bit) | |||||
} else if self.base_url.ends_with('/') { | |||||
format!("{}{}{}", self.base_url, path, trailing_bit) | |||||
} else if path.starts_with('/') { | |||||
format!("{}{}{}", self.base_url, path, trailing_bit) | |||||
} else { | |||||
format!("{}/{}{}", self.base_url, path, trailing_bit) | |||||
} | |||||
} | |||||
/// Merges the extra data from the theme with the config extra data | |||||
fn add_theme_extra(&mut self, theme: &Theme) -> Result<()> { | |||||
// 3 pass merging | |||||
// 1. save config to preserve user | |||||
let original = self.extra.clone(); | |||||
// 2. inject theme extra values | |||||
for (key, val) in &theme.extra { | |||||
self.extra | |||||
.entry(key.to_string()) | |||||
.or_insert_with(|| val.clone()); | |||||
} | |||||
// 3. overwrite with original config | |||||
for (key, val) in &original { | |||||
self.extra | |||||
.entry(key.to_string()) | |||||
.or_insert_with(|| val.clone()); | |||||
} | |||||
Ok(()) | |||||
} | |||||
/// Parse the theme.toml file and merges the extra data from the theme | |||||
/// with the config extra data | |||||
pub fn merge_with_theme(&mut self, path: &PathBuf) -> Result<()> { | |||||
let theme = Theme::from_file(path)?; | |||||
self.add_theme_extra(&theme) | |||||
} | |||||
} | |||||
impl Default for Config { | |||||
fn default() -> Config { | |||||
Config { | |||||
base_url: DEFAULT_BASE_URL.to_string(), | |||||
title: None, | |||||
description: None, | |||||
theme: None, | |||||
highlight_code: true, | |||||
highlight_theme: "base16-ocean-dark".to_string(), | |||||
default_language: "en".to_string(), | |||||
generate_rss: false, | |||||
rss_limit: 10_000, | |||||
taxonomies: Vec::new(), | |||||
compile_sass: false, | |||||
check_external_links: false, | |||||
build_search_index: false, | |||||
ignored_content: Vec::new(), | |||||
ignored_content_globset: None, | |||||
translations: HashMap::new(), | |||||
extra_syntaxes: Vec::new(), | |||||
extra: HashMap::new(), | |||||
build_timestamp: Some(1), | |||||
} | |||||
} | |||||
} | |||||
use std::path::Path; | |||||
/// Get and parse the config. | /// Get and parse the config. | ||||
/// If it doesn't succeed, exit | /// If it doesn't succeed, exit | ||||
@@ -272,195 +28,3 @@ pub fn get_config(path: &Path, filename: &str) -> Config { | |||||
} | } | ||||
} | } | ||||
} | } | ||||
#[cfg(test)] | |||||
mod tests { | |||||
use super::{Config, Theme}; | |||||
#[test] | |||||
fn can_import_valid_config() { | |||||
let config = r#" | |||||
title = "My site" | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
"#; | |||||
let config = Config::parse(config).unwrap(); | |||||
assert_eq!(config.title.unwrap(), "My site".to_string()); | |||||
} | |||||
#[test] | |||||
fn errors_when_invalid_type() { | |||||
let config = r#" | |||||
title = 1 | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_err()); | |||||
} | |||||
#[test] | |||||
fn errors_when_missing_required_field() { | |||||
// base_url is required | |||||
let config = r#" | |||||
title = "" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_err()); | |||||
} | |||||
#[test] | |||||
fn can_add_extra_values() { | |||||
let config = r#" | |||||
title = "My site" | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
[extra] | |||||
hello = "world" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_ok()); | |||||
assert_eq!( | |||||
config | |||||
.unwrap() | |||||
.extra | |||||
.get("hello") | |||||
.unwrap() | |||||
.as_str() | |||||
.unwrap(), | |||||
"world" | |||||
); | |||||
} | |||||
#[test] | |||||
fn can_make_url_index_page_with_non_trailing_slash_url() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is".to_string(); | |||||
assert_eq!(config.make_permalink(""), "http://vincent.is/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_index_page_with_railing_slash_url() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is/".to_string(); | |||||
assert_eq!(config.make_permalink(""), "http://vincent.is/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_with_non_trailing_slash_base_url() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is".to_string(); | |||||
assert_eq!(config.make_permalink("hello"), "http://vincent.is/hello/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_with_trailing_slash_path() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://vincent.is/".to_string(); | |||||
assert_eq!(config.make_permalink("/hello"), "http://vincent.is/hello/"); | |||||
} | |||||
#[test] | |||||
fn can_make_url_with_localhost() { | |||||
let mut config = Config::default(); | |||||
config.base_url = "http://127.0.0.1:1111".to_string(); | |||||
assert_eq!( | |||||
config.make_permalink("/tags/rust"), | |||||
"http://127.0.0.1:1111/tags/rust/" | |||||
); | |||||
} | |||||
#[test] | |||||
fn can_merge_with_theme_data_and_preserve_config_value() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "https://replace-this-with-your-url.com" | |||||
[extra] | |||||
hello = "world" | |||||
"#; | |||||
let mut config = Config::parse(config_str).unwrap(); | |||||
let theme_str = r#" | |||||
[extra] | |||||
hello = "foo" | |||||
a_value = 10 | |||||
"#; | |||||
let theme = Theme::parse(theme_str).unwrap(); | |||||
assert!(config.add_theme_extra(&theme).is_ok()); | |||||
let extra = config.extra; | |||||
assert_eq!(extra["hello"].as_str().unwrap(), "world".to_string()); | |||||
assert_eq!(extra["a_value"].as_integer().unwrap(), 10); | |||||
} | |||||
#[test] | |||||
fn can_use_language_configuration() { | |||||
let config = r#" | |||||
base_url = "https://remplace-par-ton-url.fr" | |||||
default_language = "fr" | |||||
[translations] | |||||
[translations.fr] | |||||
title = "Un titre" | |||||
[translations.en] | |||||
title = "A title" | |||||
"#; | |||||
let config = Config::parse(config); | |||||
assert!(config.is_ok()); | |||||
let translations = config.unwrap().translations; | |||||
assert_eq!(translations["fr"]["title"].as_str().unwrap(), "Un titre"); | |||||
assert_eq!(translations["en"]["title"].as_str().unwrap(), "A title"); | |||||
} | |||||
#[test] | |||||
fn missing_ignored_content_results_in_empty_vector_and_empty_globset() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "example.com" | |||||
"#; | |||||
let config = Config::parse(config_str).unwrap(); | |||||
let v = config.ignored_content; | |||||
assert_eq!(v.len(), 0); | |||||
assert!(config.ignored_content_globset.is_none()); | |||||
} | |||||
#[test] | |||||
fn empty_ignored_content_results_in_empty_vector_and_empty_globset() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "example.com" | |||||
ignored_content = [] | |||||
"#; | |||||
let config = Config::parse(config_str).unwrap(); | |||||
assert_eq!(config.ignored_content.len(), 0); | |||||
assert!(config.ignored_content_globset.is_none()); | |||||
} | |||||
#[test] | |||||
fn non_empty_ignored_content_results_in_vector_of_patterns_and_configured_globset() { | |||||
let config_str = r#" | |||||
title = "My site" | |||||
base_url = "example.com" | |||||
ignored_content = ["*.{graphml,iso}", "*.py?"] | |||||
"#; | |||||
let config = Config::parse(config_str).unwrap(); | |||||
let v = config.ignored_content; | |||||
assert_eq!(v, vec!["*.{graphml,iso}", "*.py?"]); | |||||
let g = config.ignored_content_globset.unwrap(); | |||||
assert_eq!(g.len(), 2); | |||||
assert!(g.is_match("foo.graphml")); | |||||
assert!(g.is_match("foo.iso")); | |||||
assert!(!g.is_match("foo.png")); | |||||
assert!(g.is_match("foo.py2")); | |||||
assert!(g.is_match("foo.py3")); | |||||
assert!(!g.is_match("foo.py")); | |||||
} | |||||
} |
@@ -7,9 +7,8 @@ use toml::Value as Toml; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
/// Holds the data from a `theme.toml` file. | /// Holds the data from a `theme.toml` file. | ||||
/// There are other fields than `extra` in it but Gutenberg | |||||
/// There are other fields than `extra` in it but Zola | |||||
/// itself doesn't care about them. | /// itself doesn't care about them. | ||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] | ||||
pub struct Theme { | pub struct Theme { | ||||
@@ -36,7 +35,6 @@ impl Theme { | |||||
bail!("Expected the `theme.toml` to be a TOML table") | bail!("Expected the `theme.toml` to be a TOML table") | ||||
} | } | ||||
Ok(Theme { extra }) | Ok(Theme { extra }) | ||||
} | } | ||||
@@ -44,7 +42,11 @@ impl Theme { | |||||
pub fn from_file(path: &PathBuf) -> Result<Theme> { | pub fn from_file(path: &PathBuf) -> Result<Theme> { | ||||
let mut content = String::new(); | let mut content = String::new(); | ||||
File::open(path) | File::open(path) | ||||
.chain_err(|| "No `theme.toml` file found. Are you in the right directory?")? | |||||
.chain_err(|| { | |||||
"No `theme.toml` file found. \ | |||||
Is the `theme` defined in your `config.toml present in the `themes` directory \ | |||||
and does it have a `theme.toml` inside?" | |||||
})? | |||||
.read_to_string(&mut content)?; | .read_to_string(&mut content)?; | ||||
Theme::parse(&content) | Theme::parse(&content) | ||||
@@ -1,146 +0,0 @@ | |||||
#![feature(test)] | |||||
extern crate test; | |||||
extern crate tera; | |||||
extern crate content; | |||||
extern crate front_matter; | |||||
extern crate config; | |||||
use std::collections::HashMap; | |||||
use std::path::Path; | |||||
use config::Config; | |||||
use tera::Tera; | |||||
use front_matter::{SortBy, InsertAnchor}; | |||||
use content::{Page, sort_pages, populate_siblings}; | |||||
fn create_pages(number: usize) -> Vec<Page> { | |||||
let mut pages = vec![]; | |||||
let config = Config::default(); | |||||
let mut tera = Tera::default(); | |||||
tera.add_raw_template("shortcodes/youtube.html", "hello"); | |||||
let permalinks = HashMap::new(); | |||||
for i in 0..number { | |||||
let mut page = Page::default(); | |||||
page.meta.weight = Some(i); | |||||
page.raw_content = r#" | |||||
# Modus cognitius profanam ne duae virtutis mundi | |||||
## Ut vita | |||||
Lorem markdownum litora, care ponto nomina, et ut aspicit gelidas sui et | |||||
purpureo genuit. Tamen colla venientis [delphina](http://nil-sol.com/ecquis) | |||||
Tusci et temptata citaeque curam isto ubi vult vulnere reppulit. | |||||
- Seque vidit flendoque de quodam | |||||
- Dabit minimos deiecto caputque noctis pluma | |||||
- Leti coniunx est Helicen | |||||
- Illius pulvereumque Icare inpositos | |||||
- Vivunt pereo pluvio tot ramos Olenios gelidis | |||||
- Quater teretes natura inde | |||||
### A subsection | |||||
Protinus dicunt, breve per, et vivacis genus Orphei munere. Me terram [dimittere | |||||
casside](http://corpus.org/) pervenit saxo primoque frequentat genuum sorori | |||||
praeferre causas Libys. Illud in serpit adsuetam utrimque nunc haberent, | |||||
**terrae si** veni! Hectoreis potes sumite [Mavortis retusa](http://tua.org/) | |||||
granum captantur potuisse Minervae, frugum. | |||||
> Clivo sub inprovisoque nostrum minus fama est, discordia patrem petebat precatur | |||||
absumitur, poena per sit. Foramina *tamen cupidine* memor supplex tollentes | |||||
dictum unam orbem, Anubis caecae. Viderat formosior tegebat satis, Aethiopasque | |||||
sit submisso coniuge tristis ubi! | |||||
## Praeceps Corinthus totidem quem crus vultum cape | |||||
```rs | |||||
#[derive(Debug)] | |||||
pub struct Site { | |||||
/// The base path of the gutenberg site | |||||
pub base_path: PathBuf, | |||||
/// The parsed config for the site | |||||
pub config: Config, | |||||
pub pages: HashMap<PathBuf, Page>, | |||||
pub sections: HashMap<PathBuf, Section>, | |||||
pub tera: Tera, | |||||
live_reload: bool, | |||||
output_path: PathBuf, | |||||
static_path: PathBuf, | |||||
pub tags: Option<Taxonomy>, | |||||
pub categories: Option<Taxonomy>, | |||||
/// A map of all .md files (section and pages) and their permalink | |||||
/// We need that if there are relative links in the content that need to be resolved | |||||
pub permalinks: HashMap<String, String>, | |||||
} | |||||
``` | |||||
## More stuff | |||||
And a shortcode: | |||||
{{ youtube(id="my_youtube_id") }} | |||||
### Another subsection | |||||
Gotta make the toc do a little bit of work | |||||
# A big title | |||||
- hello | |||||
- world | |||||
- ! | |||||
```py | |||||
if __name__ == "__main__": | |||||
gen_site("basic-blog", [""], 250, paginate=True) | |||||
``` | |||||
"#.to_string(); | |||||
page.render_markdown(&permalinks, &tera, &config, &Path::new(""), InsertAnchor::None).unwrap(); | |||||
pages.push(page); | |||||
} | |||||
pages | |||||
} | |||||
// Most of the time spent in those benches are due to the .clone()... | |||||
// but i don't know how to remove them so there are some baseline bench with | |||||
// just the cloning and with a bit of math we can figure it out | |||||
#[bench] | |||||
fn bench_baseline_cloning(b: &mut test::Bencher) { | |||||
let pages = create_pages(250); | |||||
b.iter(|| pages.clone()); | |||||
} | |||||
#[bench] | |||||
fn bench_sorting_none(b: &mut test::Bencher) { | |||||
let pages = create_pages(250); | |||||
b.iter(|| sort_pages(pages.clone(), SortBy::Weight)); | |||||
} | |||||
#[bench] | |||||
fn bench_sorting_order(b: &mut test::Bencher) { | |||||
let pages = create_pages(250); | |||||
b.iter(|| sort_pages(pages.clone(), SortBy::Weight)); | |||||
} | |||||
#[bench] | |||||
fn bench_populate_siblings(b: &mut test::Bencher) { | |||||
let pages = create_pages(250); | |||||
let (sorted_pages, _) = sort_pages(pages, SortBy::Weight); | |||||
b.iter(|| populate_siblings(&sorted_pages.clone(), SortBy::Weight)); | |||||
} | |||||
#[bench] | |||||
fn bench_page_render_html(b: &mut test::Bencher) { | |||||
let pages = create_pages(10); | |||||
let (mut sorted_pages, _) = sort_pages(pages, SortBy::Weight); | |||||
sorted_pages = populate_siblings(&sorted_pages, SortBy::Weight); | |||||
let config = Config::default(); | |||||
let mut tera = Tera::default(); | |||||
tera.add_raw_template("page.html", "{{ page.content }}").unwrap(); | |||||
let page = &sorted_pages[5]; | |||||
b.iter(|| page.render_html(&tera, &config).unwrap()); | |||||
} |
@@ -1,29 +0,0 @@ | |||||
extern crate tera; | |||||
extern crate slug; | |||||
extern crate serde; | |||||
extern crate rayon; | |||||
extern crate chrono; | |||||
extern crate errors; | |||||
extern crate config; | |||||
extern crate front_matter; | |||||
extern crate rendering; | |||||
extern crate utils; | |||||
#[cfg(test)] | |||||
extern crate tempfile; | |||||
#[cfg(test)] | |||||
extern crate toml; | |||||
#[cfg(test)] | |||||
extern crate globset; | |||||
mod file_info; | |||||
mod page; | |||||
mod section; | |||||
mod sorting; | |||||
pub use file_info::FileInfo; | |||||
pub use page::Page; | |||||
pub use section::Section; | |||||
pub use sorting::{sort_pages, populate_siblings}; |
@@ -1,231 +0,0 @@ | |||||
use std::cmp::Ordering; | |||||
use rayon::prelude::*; | |||||
use page::Page; | |||||
use front_matter::SortBy; | |||||
/// Sort pages by the given criteria | |||||
/// | |||||
/// Any pages that doesn't have a required field when the sorting method is other than none | |||||
/// will be ignored. | |||||
pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) { | |||||
if sort_by == SortBy::None { | |||||
return (pages, vec![]); | |||||
} | |||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages | |||||
.into_par_iter() | |||||
.partition(|page| { | |||||
match sort_by { | |||||
SortBy::Date => page.meta.date.is_some(), | |||||
SortBy::Weight => page.meta.weight.is_some(), | |||||
_ => unreachable!() | |||||
} | |||||
}); | |||||
match sort_by { | |||||
SortBy::Date => { | |||||
can_be_sorted.par_sort_unstable_by(|a, b| { | |||||
let ord = b.meta.date().unwrap().cmp(&a.meta.date().unwrap()); | |||||
if ord == Ordering::Equal { | |||||
a.permalink.cmp(&b.permalink) | |||||
} else { | |||||
ord | |||||
} | |||||
}) | |||||
} | |||||
SortBy::Weight => { | |||||
can_be_sorted.par_sort_unstable_by(|a, b| { | |||||
let ord = a.meta.weight().cmp(&b.meta.weight()); | |||||
if ord == Ordering::Equal { | |||||
a.permalink.cmp(&b.permalink) | |||||
} else { | |||||
ord | |||||
} | |||||
}) | |||||
} | |||||
_ => unreachable!() | |||||
}; | |||||
(can_be_sorted, cannot_be_sorted) | |||||
} | |||||
/// Horribly inefficient way to set previous and next on each pages that skips drafts | |||||
/// So many clones | |||||
pub fn populate_siblings(input: &[Page], sort_by: SortBy) -> Vec<Page> { | |||||
let mut res = Vec::with_capacity(input.len()); | |||||
// The input is already sorted | |||||
for (i, _) in input.iter().enumerate() { | |||||
let mut new_page = input[i].clone(); | |||||
if new_page.is_draft() { | |||||
res.push(new_page); | |||||
continue; | |||||
} | |||||
if i > 0 { | |||||
let mut j = i; | |||||
loop { | |||||
if j == 0 { | |||||
break; | |||||
} | |||||
j -= 1; | |||||
if input[j].is_draft() { | |||||
continue; | |||||
} | |||||
// Remove prev/next otherwise we serialise the whole thing... | |||||
let mut next_page = input[j].clone(); | |||||
match sort_by { | |||||
SortBy::Weight => { | |||||
next_page.lighter = None; | |||||
next_page.heavier = None; | |||||
new_page.lighter = Some(Box::new(next_page)); | |||||
} | |||||
SortBy::Date => { | |||||
next_page.earlier = None; | |||||
next_page.later = None; | |||||
new_page.later = Some(Box::new(next_page)); | |||||
} | |||||
SortBy::None => () | |||||
} | |||||
break; | |||||
} | |||||
} | |||||
if i < input.len() - 1 { | |||||
let mut j = i; | |||||
loop { | |||||
if j == input.len() - 1 { | |||||
break; | |||||
} | |||||
j += 1; | |||||
if input[j].is_draft() { | |||||
continue; | |||||
} | |||||
// Remove prev/next otherwise we serialise the whole thing... | |||||
let mut previous_page = input[j].clone(); | |||||
match sort_by { | |||||
SortBy::Weight => { | |||||
previous_page.lighter = None; | |||||
previous_page.heavier = None; | |||||
new_page.heavier = Some(Box::new(previous_page)); | |||||
} | |||||
SortBy::Date => { | |||||
previous_page.earlier = None; | |||||
previous_page.later = None; | |||||
new_page.earlier = Some(Box::new(previous_page)); | |||||
} | |||||
SortBy::None => {} | |||||
} | |||||
break; | |||||
} | |||||
} | |||||
res.push(new_page); | |||||
} | |||||
res | |||||
} | |||||
#[cfg(test)] | |||||
mod tests { | |||||
use front_matter::{PageFrontMatter, SortBy}; | |||||
use page::Page; | |||||
use super::{sort_pages, populate_siblings}; | |||||
fn create_page_with_date(date: &str) -> Page { | |||||
let mut front_matter = PageFrontMatter::default(); | |||||
front_matter.date = Some(date.to_string()); | |||||
Page::new("content/hello.md", front_matter) | |||||
} | |||||
fn create_page_with_weight(weight: usize) -> Page { | |||||
let mut front_matter = PageFrontMatter::default(); | |||||
front_matter.weight = Some(weight); | |||||
Page::new("content/hello.md", front_matter) | |||||
} | |||||
#[test] | |||||
fn can_sort_by_dates() { | |||||
let input = vec![ | |||||
create_page_with_date("2018-01-01"), | |||||
create_page_with_date("2017-01-01"), | |||||
create_page_with_date("2019-01-01"), | |||||
]; | |||||
let (pages, _) = sort_pages(input, SortBy::Date); | |||||
// Should be sorted by date | |||||
assert_eq!(pages[0].clone().meta.date.unwrap().to_string(), "2019-01-01"); | |||||
assert_eq!(pages[1].clone().meta.date.unwrap().to_string(), "2018-01-01"); | |||||
assert_eq!(pages[2].clone().meta.date.unwrap().to_string(), "2017-01-01"); | |||||
} | |||||
#[test] | |||||
fn can_sort_by_weight() { | |||||
let input = vec![ | |||||
create_page_with_weight(2), | |||||
create_page_with_weight(3), | |||||
create_page_with_weight(1), | |||||
]; | |||||
let (pages, _) = sort_pages(input, SortBy::Weight); | |||||
// Should be sorted by weight | |||||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 1); | |||||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 2); | |||||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 3); | |||||
} | |||||
#[test] | |||||
fn can_sort_by_none() { | |||||
let input = vec![ | |||||
create_page_with_weight(2), | |||||
create_page_with_weight(3), | |||||
create_page_with_weight(1), | |||||
]; | |||||
let (pages, _) = sort_pages(input, SortBy::None); | |||||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 2); | |||||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 3); | |||||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 1); | |||||
} | |||||
#[test] | |||||
fn ignore_page_with_missing_field() { | |||||
let input = vec![ | |||||
create_page_with_weight(2), | |||||
create_page_with_weight(3), | |||||
create_page_with_date("2019-01-01"), | |||||
]; | |||||
let (pages, unsorted) = sort_pages(input, SortBy::Weight); | |||||
assert_eq!(pages.len(), 2); | |||||
assert_eq!(unsorted.len(), 1); | |||||
} | |||||
#[test] | |||||
fn can_populate_siblings() { | |||||
let input = vec![ | |||||
create_page_with_weight(1), | |||||
create_page_with_weight(2), | |||||
create_page_with_weight(3), | |||||
]; | |||||
let pages = populate_siblings(&input, SortBy::Weight); | |||||
assert!(pages[0].clone().lighter.is_none()); | |||||
assert!(pages[0].clone().heavier.is_some()); | |||||
assert_eq!(pages[0].clone().heavier.unwrap().meta.weight.unwrap(), 2); | |||||
assert!(pages[1].clone().heavier.is_some()); | |||||
assert!(pages[1].clone().lighter.is_some()); | |||||
assert_eq!(pages[1].clone().lighter.unwrap().meta.weight.unwrap(), 1); | |||||
assert_eq!(pages[1].clone().heavier.unwrap().meta.weight.unwrap(), 3); | |||||
assert!(pages[2].clone().lighter.is_some()); | |||||
assert!(pages[2].clone().heavier.is_none()); | |||||
assert_eq!(pages[2].clone().lighter.unwrap().meta.weight.unwrap(), 2); | |||||
} | |||||
} |
@@ -7,4 +7,5 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
error-chain = "0.12" | error-chain = "0.12" | ||||
tera = "0.11" | tera = "0.11" | ||||
toml = "0.4" | toml = "0.4" | ||||
image = "0.19.0" | |||||
image = "0.20" | |||||
syntect = "3" |
@@ -2,9 +2,10 @@ | |||||
#[macro_use] | #[macro_use] | ||||
extern crate error_chain; | extern crate error_chain; | ||||
extern crate image; | |||||
extern crate syntect; | |||||
extern crate tera; | extern crate tera; | ||||
extern crate toml; | extern crate toml; | ||||
extern crate image; | |||||
error_chain! { | error_chain! { | ||||
errors {} | errors {} | ||||
@@ -17,6 +18,7 @@ error_chain! { | |||||
Io(::std::io::Error); | Io(::std::io::Error); | ||||
Toml(toml::de::Error); | Toml(toml::de::Error); | ||||
Image(image::ImageError); | Image(image::ImageError); | ||||
Syntect(syntect::LoadingError); | |||||
} | } | ||||
} | } | ||||
@@ -13,3 +13,4 @@ regex = "1" | |||||
lazy_static = "1" | lazy_static = "1" | ||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
utils = { path = "../utils" } |
@@ -2,18 +2,19 @@ | |||||
extern crate lazy_static; | extern crate lazy_static; | ||||
#[macro_use] | #[macro_use] | ||||
extern crate serde_derive; | extern crate serde_derive; | ||||
extern crate serde; | |||||
extern crate toml; | |||||
extern crate chrono; | |||||
extern crate regex; | extern crate regex; | ||||
extern crate serde; | |||||
extern crate tera; | extern crate tera; | ||||
extern crate chrono; | |||||
extern crate toml; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate errors; | extern crate errors; | ||||
extern crate utils; | |||||
use std::path::Path; | |||||
use regex::Regex; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
use regex::Regex; | |||||
use std::path::Path; | |||||
mod page; | mod page; | ||||
mod section; | mod section; | ||||
@@ -22,7 +23,8 @@ pub use page::PageFrontMatter; | |||||
pub use section::SectionFrontMatter; | pub use section::SectionFrontMatter; | ||||
lazy_static! { | lazy_static! { | ||||
static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap(); | |||||
static ref PAGE_RE: Regex = | |||||
Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap(); | |||||
} | } | ||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] | #[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] | ||||
@@ -44,12 +46,14 @@ pub enum InsertAnchor { | |||||
None, | None, | ||||
} | } | ||||
/// Split a file between the front matter and its content | /// Split a file between the front matter and its content | ||||
/// Will return an error if the front matter wasn't found | /// Will return an error if the front matter wasn't found | ||||
fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { | fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { | ||||
if !PAGE_RE.is_match(content) { | if !PAGE_RE.is_match(content) { | ||||
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy()); | |||||
bail!( | |||||
"Couldn't find front matter in `{}`. Did you forget to add `+++`?", | |||||
file_path.to_string_lossy() | |||||
); | |||||
} | } | ||||
// 2. extract the front matter and the content | // 2. extract the front matter and the content | ||||
@@ -62,10 +66,14 @@ fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { | |||||
/// Split a file between the front matter and its content. | /// Split a file between the front matter and its content. | ||||
/// Returns a parsed `SectionFrontMatter` and the rest of the content | /// Returns a parsed `SectionFrontMatter` and the rest of the content | ||||
pub fn split_section_content(file_path: &Path, content: &str) -> Result<(SectionFrontMatter, String)> { | |||||
pub fn split_section_content( | |||||
file_path: &Path, | |||||
content: &str, | |||||
) -> Result<(SectionFrontMatter, String)> { | |||||
let (front_matter, content) = split_content(file_path, content)?; | let (front_matter, content) = split_content(file_path, content)?; | ||||
let meta = SectionFrontMatter::parse(&front_matter) | |||||
.chain_err(|| format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()))?; | |||||
let meta = SectionFrontMatter::parse(&front_matter).chain_err(|| { | |||||
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()) | |||||
})?; | |||||
Ok((meta, content)) | Ok((meta, content)) | ||||
} | } | ||||
@@ -73,8 +81,9 @@ pub fn split_section_content(file_path: &Path, content: &str) -> Result<(Section | |||||
/// Returns a parsed `PageFrontMatter` and the rest of the content | /// Returns a parsed `PageFrontMatter` and the rest of the content | ||||
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { | pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { | ||||
let (front_matter, content) = split_content(file_path, content)?; | let (front_matter, content) = split_content(file_path, content)?; | ||||
let meta = PageFrontMatter::parse(&front_matter) | |||||
.chain_err(|| format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()))?; | |||||
let meta = PageFrontMatter::parse(&front_matter).chain_err(|| { | |||||
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()) | |||||
})?; | |||||
Ok((meta, content)) | Ok((meta, content)) | ||||
} | } | ||||
@@ -82,7 +91,7 @@ pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontM | |||||
mod tests { | mod tests { | ||||
use std::path::Path; | use std::path::Path; | ||||
use super::{split_section_content, split_page_content}; | |||||
use super::{split_page_content, split_section_content}; | |||||
#[test] | #[test] | ||||
fn can_split_page_content_valid() { | fn can_split_page_content_valid() { | ||||
@@ -1,68 +1,14 @@ | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::result::Result as StdResult; | |||||
use chrono::prelude::*; | use chrono::prelude::*; | ||||
use tera::{Map, Value}; | use tera::{Map, Value}; | ||||
use serde::{Deserialize, Deserializer}; | |||||
use toml; | use toml; | ||||
use errors::Result; | use errors::Result; | ||||
fn from_toml_datetime<'de, D>(deserializer: D) -> StdResult<Option<String>, D::Error> | |||||
where | |||||
D: Deserializer<'de>, | |||||
{ | |||||
toml::value::Datetime::deserialize(deserializer) | |||||
.map(|s| Some(s.to_string())) | |||||
} | |||||
/// Returns key/value for a converted date from TOML. | |||||
/// If the table itself is the TOML struct, only return its value without the key | |||||
fn convert_toml_date(table: Map<String, Value>) -> Value { | |||||
let mut new = Map::new(); | |||||
for (k, v) in table { | |||||
if k == "$__toml_private_datetime" { | |||||
return v; | |||||
} | |||||
match v { | |||||
Value::Object(mut o) => { | |||||
// that was a toml datetime object, just return the date | |||||
if let Some(toml_date) = o.remove("$__toml_private_datetime") { | |||||
new.insert(k, toml_date); | |||||
return Value::Object(new); | |||||
} | |||||
new.insert(k, convert_toml_date(o)); | |||||
} | |||||
_ => { new.insert(k, v); } | |||||
} | |||||
} | |||||
Value::Object(new) | |||||
} | |||||
/// TOML datetimes will be serialized as a struct but we want the | |||||
/// stringified version for json, otherwise they are going to be weird | |||||
fn fix_toml_dates(table: Map<String, Value>) -> Value { | |||||
let mut new = Map::new(); | |||||
for (key, value) in table { | |||||
match value { | |||||
Value::Object(mut o) => { | |||||
new.insert(key, convert_toml_date(o)); | |||||
} | |||||
_ => { new.insert(key, value); } | |||||
} | |||||
} | |||||
Value::Object(new) | |||||
} | |||||
use utils::de::{fix_toml_dates, from_toml_datetime}; | |||||
/// The front matter of every page | /// The front matter of every page | ||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] | |||||
#[derive(Debug, Clone, PartialEq, Deserialize)] | |||||
#[serde(default)] | #[serde(default)] | ||||
pub struct PageFrontMatter { | pub struct PageFrontMatter { | ||||
/// <title> of the page | /// <title> of the page | ||||
@@ -72,6 +18,12 @@ pub struct PageFrontMatter { | |||||
/// Date if we want to order pages (ie blog post) | /// Date if we want to order pages (ie blog post) | ||||
#[serde(default, deserialize_with = "from_toml_datetime")] | #[serde(default, deserialize_with = "from_toml_datetime")] | ||||
pub date: Option<String>, | pub date: Option<String>, | ||||
/// Chrono converted datetime | |||||
#[serde(default, skip_deserializing)] | |||||
pub datetime: Option<NaiveDateTime>, | |||||
/// The converted date into a (year, month, day) tuple | |||||
#[serde(default, skip_deserializing)] | |||||
pub datetime_tuple: Option<(i32, u32, u32)>, | |||||
/// Whether this page is a draft and should be ignored for pagination etc | /// Whether this page is a draft and should be ignored for pagination etc | ||||
pub draft: bool, | pub draft: bool, | ||||
/// The page slug. Will be used instead of the filename if present | /// The page slug. Will be used instead of the filename if present | ||||
@@ -86,7 +38,7 @@ pub struct PageFrontMatter { | |||||
pub order: Option<usize>, | pub order: Option<usize>, | ||||
/// Integer to use to order content. Highest is at the bottom, lowest first | /// Integer to use to order content. Highest is at the bottom, lowest first | ||||
pub weight: Option<usize>, | pub weight: Option<usize>, | ||||
/// All aliases for that page. Gutenberg will create HTML templates that will | |||||
/// All aliases for that page. Zola will create HTML templates that will | |||||
/// redirect to this | /// redirect to this | ||||
#[serde(skip_serializing)] | #[serde(skip_serializing)] | ||||
pub aliases: Vec<String>, | pub aliases: Vec<String>, | ||||
@@ -124,20 +76,32 @@ impl PageFrontMatter { | |||||
Value::Object(o) => o, | Value::Object(o) => o, | ||||
_ => unreachable!("Got something other than a table in page extra"), | _ => unreachable!("Got something other than a table in page extra"), | ||||
}; | }; | ||||
f.date_to_datetime(); | |||||
Ok(f) | Ok(f) | ||||
} | } | ||||
/// Converts the TOML datetime to a Chrono naive datetime | /// Converts the TOML datetime to a Chrono naive datetime | ||||
pub fn date(&self) -> Option<NaiveDateTime> { | |||||
if let Some(ref d) = self.date { | |||||
/// Also grabs the year/month/day tuple that will be used in serialization | |||||
pub fn date_to_datetime(&mut self) { | |||||
self.datetime = if let Some(ref d) = self.date { | |||||
if d.contains('T') { | if d.contains('T') { | ||||
DateTime::parse_from_rfc3339(&d).ok().and_then(|s| Some(s.naive_local())) | DateTime::parse_from_rfc3339(&d).ok().and_then(|s| Some(s.naive_local())) | ||||
} else { | } else { | ||||
NaiveDate::parse_from_str(&d, "%Y-%m-%d").ok().and_then(|s| Some(s.and_hms(0, 0, 0))) | |||||
NaiveDate::parse_from_str(&d, "%Y-%m-%d") | |||||
.ok() | |||||
.and_then(|s| Some(s.and_hms(0, 0, 0))) | |||||
} | } | ||||
} else { | } else { | ||||
None | None | ||||
} | |||||
}; | |||||
self.datetime_tuple = if let Some(ref dt) = self.datetime { | |||||
Some((dt.year(), dt.month(), dt.day())) | |||||
} else { | |||||
None | |||||
}; | |||||
} | } | ||||
pub fn order(&self) -> usize { | pub fn order(&self) -> usize { | ||||
@@ -155,6 +119,8 @@ impl Default for PageFrontMatter { | |||||
title: None, | title: None, | ||||
description: None, | description: None, | ||||
date: None, | date: None, | ||||
datetime: None, | |||||
datetime_tuple: None, | |||||
draft: false, | draft: false, | ||||
slug: None, | slug: None, | ||||
path: None, | path: None, | ||||
@@ -169,11 +135,10 @@ impl Default for PageFrontMatter { | |||||
} | } | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use tera::to_value; | |||||
use super::PageFrontMatter; | use super::PageFrontMatter; | ||||
use tera::to_value; | |||||
#[test] | #[test] | ||||
fn can_have_empty_front_matter() { | fn can_have_empty_front_matter() { | ||||
@@ -195,7 +160,6 @@ mod tests { | |||||
assert_eq!(res.description.unwrap(), "hey there".to_string()) | assert_eq!(res.description.unwrap(), "hey there".to_string()) | ||||
} | } | ||||
#[test] | #[test] | ||||
fn errors_with_invalid_front_matter() { | fn errors_with_invalid_front_matter() { | ||||
let content = r#"title = 1\n"#; | let content = r#"title = 1\n"#; | ||||
@@ -5,11 +5,10 @@ use toml; | |||||
use errors::Result; | use errors::Result; | ||||
use super::{SortBy, InsertAnchor}; | |||||
use super::{InsertAnchor, SortBy}; | |||||
static DEFAULT_PAGINATE_PATH: &'static str = "page"; | static DEFAULT_PAGINATE_PATH: &'static str = "page"; | ||||
/// The front matter of every section | /// The front matter of every section | ||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] | ||||
#[serde(default)] | #[serde(default)] | ||||
@@ -51,6 +50,14 @@ pub struct SectionFrontMatter { | |||||
/// Defaults to `true` but is only used if search if explicitly enabled in the config. | /// Defaults to `true` but is only used if search if explicitly enabled in the config. | ||||
#[serde(skip_serializing)] | #[serde(skip_serializing)] | ||||
pub in_search_index: bool, | pub in_search_index: bool, | ||||
/// Whether the section should pass its pages on to the parent section. Defaults to `false`. | |||||
/// Useful when the section shouldn't split up the parent section, like | |||||
/// sections for each year under a posts section. | |||||
#[serde(skip_serializing)] | |||||
pub transparent: bool, | |||||
/// Optional template for all pages in this section (including the pages of children section) | |||||
#[serde(skip_serializing)] | |||||
pub page_template: Option<String>, | |||||
/// Any extra parameter present in the front matter | /// Any extra parameter present in the front matter | ||||
pub extra: HashMap<String, Value>, | pub extra: HashMap<String, Value>, | ||||
} | } | ||||
@@ -69,7 +76,7 @@ impl SectionFrontMatter { | |||||
pub fn is_paginated(&self) -> bool { | pub fn is_paginated(&self) -> bool { | ||||
match self.paginate_by { | match self.paginate_by { | ||||
Some(v) => v > 0, | Some(v) => v > 0, | ||||
None => false | |||||
None => false, | |||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -88,6 +95,8 @@ impl Default for SectionFrontMatter { | |||||
redirect_to: None, | redirect_to: None, | ||||
insert_anchor_links: InsertAnchor::None, | insert_anchor_links: InsertAnchor::None, | ||||
in_search_index: true, | in_search_index: true, | ||||
transparent: false, | |||||
page_template: None, | |||||
extra: HashMap::new(), | extra: HashMap::new(), | ||||
} | } | ||||
} | } | ||||
@@ -1,8 +0,0 @@ | |||||
[package] | |||||
name = "highlighting" | |||||
version = "0.1.0" | |||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
[dependencies] | |||||
lazy_static = "1" | |||||
syntect = "2" |
@@ -1,44 +0,0 @@ | |||||
#[macro_use] | |||||
extern crate lazy_static; | |||||
extern crate syntect; | |||||
use std::cell::RefCell; | |||||
use std::path::Path; | |||||
use syntect::LoadingError; | |||||
use syntect::dumps::from_binary; | |||||
use syntect::parsing::SyntaxSet; | |||||
use syntect::highlighting::{ThemeSet, Theme}; | |||||
use syntect::easy::HighlightLines; | |||||
thread_local! { | |||||
/// A pair of the set and whether extras have been added to it. | |||||
pub static SYNTAX_SET: RefCell<(SyntaxSet, bool)> = { | |||||
let ss: SyntaxSet = from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump")); | |||||
RefCell::new((ss, false)) | |||||
}; | |||||
} | |||||
lazy_static! { | |||||
pub static ref THEME_SET: ThemeSet = from_binary(include_bytes!("../../../sublime_themes/all.themedump")); | |||||
} | |||||
pub fn get_highlighter<'a>(theme: &'a Theme, info: &str, base_path: &Path, extra_syntaxes: &[String]) -> Result<HighlightLines<'a>, LoadingError> { | |||||
SYNTAX_SET.with(|rc| { | |||||
let (ss, extras_added) = &mut *rc.borrow_mut(); | |||||
if !*extras_added { | |||||
for dir in extra_syntaxes { | |||||
ss.load_syntaxes(base_path.join(dir), true)?; | |||||
} | |||||
ss.link_syntaxes(); | |||||
*extras_added = true; | |||||
} | |||||
let syntax = info | |||||
.split(' ') | |||||
.next() | |||||
.and_then(|lang| ss.find_syntax_by_token(lang)) | |||||
.unwrap_or_else(|| ss.find_syntax_plain_text()); | |||||
Ok(HighlightLines::new(syntax, theme)) | |||||
}) | |||||
} |
@@ -7,7 +7,7 @@ authors = ["Vojtěch Král <vojtech@kral.hk>"] | |||||
lazy_static = "1" | lazy_static = "1" | ||||
regex = "1.0" | regex = "1.0" | ||||
tera = "0.11" | tera = "0.11" | ||||
image = "0.19" | |||||
image = "0.20" | |||||
rayon = "1" | rayon = "1" | ||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
@@ -1,32 +1,32 @@ | |||||
#[macro_use] | #[macro_use] | ||||
extern crate lazy_static; | extern crate lazy_static; | ||||
extern crate regex; | |||||
extern crate image; | extern crate image; | ||||
extern crate rayon; | extern crate rayon; | ||||
extern crate regex; | |||||
extern crate utils; | |||||
extern crate errors; | extern crate errors; | ||||
extern crate utils; | |||||
use std::path::{Path, PathBuf}; | |||||
use std::hash::{Hash, Hasher}; | |||||
use std::collections::HashMap; | |||||
use std::collections::hash_map::Entry as HEntry; | |||||
use std::collections::hash_map::DefaultHasher; | use std::collections::hash_map::DefaultHasher; | ||||
use std::collections::hash_map::Entry as HEntry; | |||||
use std::collections::HashMap; | |||||
use std::fs::{self, File}; | use std::fs::{self, File}; | ||||
use std::hash::{Hash, Hasher}; | |||||
use std::path::{Path, PathBuf}; | |||||
use regex::Regex; | |||||
use image::{GenericImage, FilterType}; | |||||
use image::jpeg::JPEGEncoder; | use image::jpeg::JPEGEncoder; | ||||
use image::{FilterType, GenericImageView}; | |||||
use rayon::prelude::*; | use rayon::prelude::*; | ||||
use regex::Regex; | |||||
use utils::fs as ufs; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
use utils::fs as ufs; | |||||
static RESIZED_SUBDIR: &'static str = "_processed_images"; | |||||
static RESIZED_SUBDIR: &'static str = "processed_images"; | |||||
lazy_static! { | lazy_static! { | ||||
pub static ref RESIZED_FILENAME: Regex = Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap(); | |||||
pub static ref RESIZED_FILENAME: Regex = | |||||
Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap(); | |||||
} | } | ||||
/// Describes the precise kind of a resize operation | /// Describes the precise kind of a resize operation | ||||
@@ -57,16 +57,22 @@ impl ResizeOp { | |||||
// Validate args: | // Validate args: | ||||
match op { | match op { | ||||
"fit_width" => if width.is_none() { | |||||
return Err("op=\"fit_width\" requires a `width` argument".to_string().into()); | |||||
}, | |||||
"fit_height" => if height.is_none() { | |||||
return Err("op=\"fit_height\" requires a `height` argument".to_string().into()); | |||||
}, | |||||
"scale" | "fit" | "fill" => if width.is_none() || height.is_none() { | |||||
return Err(format!("op={} requires a `width` and `height` argument", op).into()); | |||||
}, | |||||
_ => return Err(format!("Invalid image resize operation: {}", op).into()) | |||||
"fit_width" => { | |||||
if width.is_none() { | |||||
return Err("op=\"fit_width\" requires a `width` argument".to_string().into()); | |||||
} | |||||
} | |||||
"fit_height" => { | |||||
if height.is_none() { | |||||
return Err("op=\"fit_height\" requires a `height` argument".to_string().into()); | |||||
} | |||||
} | |||||
"scale" | "fit" | "fill" => { | |||||
if width.is_none() || height.is_none() { | |||||
return Err(format!("op={} requires a `width` and `height` argument", op).into()); | |||||
} | |||||
} | |||||
_ => return Err(format!("Invalid image resize operation: {}", op).into()), | |||||
}; | }; | ||||
Ok(match op { | Ok(match op { | ||||
@@ -121,8 +127,12 @@ impl From<ResizeOp> for u8 { | |||||
impl Hash for ResizeOp { | impl Hash for ResizeOp { | ||||
fn hash<H: Hasher>(&self, hasher: &mut H) { | fn hash<H: Hasher>(&self, hasher: &mut H) { | ||||
hasher.write_u8(u8::from(*self)); | hasher.write_u8(u8::from(*self)); | ||||
if let Some(w) = self.width() { hasher.write_u32(w); } | |||||
if let Some(h) = self.height() { hasher.write_u32(h); } | |||||
if let Some(w) = self.width() { | |||||
hasher.write_u32(w); | |||||
} | |||||
if let Some(h) = self.height() { | |||||
hasher.write_u32(h); | |||||
} | |||||
} | } | ||||
} | } | ||||
@@ -207,8 +217,7 @@ impl ImageOp { | |||||
((img_w - crop_w) / 2, 0) | ((img_w - crop_w) / 2, 0) | ||||
}; | }; | ||||
img.crop(offset_w, offset_h, crop_w, crop_h) | |||||
.resize_exact(w, h, RESIZE_FILTER) | |||||
img.crop(offset_w, offset_h, crop_w, crop_h).resize_exact(w, h, RESIZE_FILTER) | |||||
} | } | ||||
} | } | ||||
}; | }; | ||||
@@ -221,7 +230,6 @@ impl ImageOp { | |||||
} | } | ||||
} | } | ||||
/// A strcture into which image operations can be enqueued and then performed. | /// A strcture into which image operations can be enqueued and then performed. | ||||
/// All output is written in a subdirectory in `static_path`, | /// All output is written in a subdirectory in `static_path`, | ||||
/// taking care of file stale status based on timestamps and possible hash collisions. | /// taking care of file stale status based on timestamps and possible hash collisions. | ||||
@@ -271,7 +279,11 @@ impl Processor { | |||||
fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 { | fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 { | ||||
match self.img_ops.entry(img_op.hash) { | match self.img_ops.entry(img_op.hash) { | ||||
HEntry::Occupied(entry) => if *entry.get() == img_op { return 0; }, | |||||
HEntry::Occupied(entry) => { | |||||
if *entry.get() == img_op { | |||||
return 0; | |||||
} | |||||
} | |||||
HEntry::Vacant(entry) => { | HEntry::Vacant(entry) => { | ||||
entry.insert(img_op); | entry.insert(img_op); | ||||
return 0; | return 0; | ||||
@@ -341,9 +353,8 @@ impl Processor { | |||||
let filename = entry_path.file_name().unwrap().to_string_lossy(); | let filename = entry_path.file_name().unwrap().to_string_lossy(); | ||||
if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) { | if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) { | ||||
let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap(); | let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap(); | ||||
let collision_id = u32::from_str_radix( | |||||
capts.get(2).unwrap().as_str(), 16, | |||||
).unwrap(); | |||||
let collision_id = | |||||
u32::from_str_radix(capts.get(2).unwrap().as_str(), 16).unwrap(); | |||||
if collision_id > 0 || !self.img_ops.contains_key(&hash) { | if collision_id > 0 || !self.img_ops.contains_key(&hash) { | ||||
fs::remove_file(&entry_path)?; | fs::remove_file(&entry_path)?; | ||||
@@ -359,26 +370,28 @@ impl Processor { | |||||
ufs::ensure_directory_exists(&self.resized_path)?; | ufs::ensure_directory_exists(&self.resized_path)?; | ||||
} | } | ||||
self.img_ops.par_iter().map(|(hash, op)| { | |||||
let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id)); | |||||
op.perform(&self.content_path, &target) | |||||
.chain_err(|| format!("Failed to process image: {}", op.source)) | |||||
}) | |||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and) | |||||
self.img_ops | |||||
.par_iter() | |||||
.map(|(hash, op)| { | |||||
let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id)); | |||||
op.perform(&self.content_path, &target) | |||||
.chain_err(|| format!("Failed to process image: {}", op.source)) | |||||
}) | |||||
.collect::<Result<()>>() | |||||
} | } | ||||
} | } | ||||
/// Looks at file's extension and returns whether it's a supported image format | /// Looks at file's extension and returns whether it's a supported image format | ||||
pub fn file_is_img<P: AsRef<Path>>(p: P) -> bool { | pub fn file_is_img<P: AsRef<Path>>(p: P) -> bool { | ||||
p.as_ref().extension().and_then(|s| s.to_str()).map(|ext| { | |||||
match ext.to_lowercase().as_str() { | |||||
p.as_ref() | |||||
.extension() | |||||
.and_then(|s| s.to_str()) | |||||
.map(|ext| match ext.to_lowercase().as_str() { | |||||
"jpg" | "jpeg" => true, | "jpg" | "jpeg" => true, | ||||
"png" => true, | "png" => true, | ||||
"gif" => true, | "gif" => true, | ||||
"bmp" => true, | "bmp" => true, | ||||
_ => false, | _ => false, | ||||
} | |||||
}).unwrap_or(false) | |||||
}) | |||||
.unwrap_or(false) | |||||
} | } |
@@ -1,20 +1,24 @@ | |||||
[package] | [package] | ||||
name = "content" | |||||
name = "library" | |||||
version = "0.1.0" | version = "0.1.0" | ||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | ||||
[dependencies] | [dependencies] | ||||
slotmap = "0.2" | |||||
rayon = "1" | |||||
chrono = { version = "0.4", features = ["serde"] } | |||||
tera = "0.11" | tera = "0.11" | ||||
serde = "1" | serde = "1" | ||||
serde_derive = "1" | |||||
slug = "0.1" | slug = "0.1" | ||||
rayon = "1" | |||||
chrono = "0.4" | |||||
regex = "1" | |||||
lazy_static = "1" | |||||
errors = { path = "../errors" } | |||||
front_matter = { path = "../front_matter" } | |||||
config = { path = "../config" } | config = { path = "../config" } | ||||
utils = { path = "../utils" } | utils = { path = "../utils" } | ||||
rendering = { path = "../rendering" } | rendering = { path = "../rendering" } | ||||
front_matter = { path = "../front_matter" } | |||||
errors = { path = "../errors" } | |||||
[dev-dependencies] | [dev-dependencies] | ||||
tempfile = "3" | tempfile = "3" |
@@ -114,7 +114,8 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_find_content_components() { | fn can_find_content_components() { | ||||
let res = find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md"); | |||||
let res = | |||||
find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md"); | |||||
assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]); | assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]); | ||||
} | } | ||||
} | } |
@@ -0,0 +1,9 @@ | |||||
mod file_info; | |||||
mod page; | |||||
mod section; | |||||
mod ser; | |||||
pub use self::file_info::FileInfo; | |||||
pub use self::page::Page; | |||||
pub use self::section::Section; | |||||
pub use self::ser::{SerializingPage, SerializingSection}; |
@@ -1,23 +1,28 @@ | |||||
/// A page, can be a blog post or a basic page | /// A page, can be a blog post or a basic page | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::{Path, PathBuf}; | use std::path::{Path, PathBuf}; | ||||
use std::result::Result as StdResult; | |||||
use chrono::Datelike; | |||||
use tera::{Tera, Context as TeraContext}; | |||||
use serde::ser::{SerializeStruct, self}; | |||||
use regex::Regex; | |||||
use slotmap::Key; | |||||
use slug::slugify; | use slug::slugify; | ||||
use tera::{Context as TeraContext, Tera}; | |||||
use errors::{Result, ResultExt}; | |||||
use config::Config; | use config::Config; | ||||
use utils::fs::{read_file, find_related_assets}; | |||||
use errors::{Result, ResultExt}; | |||||
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter}; | |||||
use library::Library; | |||||
use rendering::{render_content, Header, RenderContext}; | |||||
use utils::fs::{find_related_assets, read_file}; | |||||
use utils::site::get_reading_analytics; | use utils::site::get_reading_analytics; | ||||
use utils::templates::render_template; | use utils::templates::render_template; | ||||
use front_matter::{PageFrontMatter, InsertAnchor, split_page_content}; | |||||
use rendering::{RenderContext, Header, render_content}; | |||||
use file_info::FileInfo; | |||||
use content::file_info::FileInfo; | |||||
use content::ser::SerializingPage; | |||||
lazy_static! { | |||||
// Check whether a string starts with yyyy-mm-dd{-,_} | |||||
static ref DATE_IN_FILENAME: Regex = Regex::new(r"^^([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))(_|-)").unwrap(); | |||||
} | |||||
#[derive(Clone, Debug, PartialEq)] | #[derive(Clone, Debug, PartialEq)] | ||||
pub struct Page { | pub struct Page { | ||||
@@ -25,10 +30,14 @@ pub struct Page { | |||||
pub file: FileInfo, | pub file: FileInfo, | ||||
/// The front matter meta-data | /// The front matter meta-data | ||||
pub meta: PageFrontMatter, | pub meta: PageFrontMatter, | ||||
/// The list of parent sections | |||||
pub ancestors: Vec<Key>, | |||||
/// The actual content of the page, in markdown | /// The actual content of the page, in markdown | ||||
pub raw_content: String, | pub raw_content: String, | ||||
/// All the non-md files we found next to the .md file | /// All the non-md files we found next to the .md file | ||||
pub assets: Vec<PathBuf>, | pub assets: Vec<PathBuf>, | ||||
/// All the non-md files we found next to the .md file as string for use in templates | |||||
pub serialized_assets: Vec<String>, | |||||
/// The HTML rendered of the page | /// The HTML rendered of the page | ||||
pub content: String, | pub content: String, | ||||
/// The slug of that page. | /// The slug of that page. | ||||
@@ -45,18 +54,22 @@ pub struct Page { | |||||
/// as summary | /// as summary | ||||
pub summary: Option<String>, | pub summary: Option<String>, | ||||
/// The earlier page, for pages sorted by date | /// The earlier page, for pages sorted by date | ||||
pub earlier: Option<Box<Page>>, | |||||
pub earlier: Option<Key>, | |||||
/// The later page, for pages sorted by date | /// The later page, for pages sorted by date | ||||
pub later: Option<Box<Page>>, | |||||
pub later: Option<Key>, | |||||
/// The lighter page, for pages sorted by weight | /// The lighter page, for pages sorted by weight | ||||
pub lighter: Option<Box<Page>>, | |||||
pub lighter: Option<Key>, | |||||
/// The heavier page, for pages sorted by weight | /// The heavier page, for pages sorted by weight | ||||
pub heavier: Option<Box<Page>>, | |||||
pub heavier: Option<Key>, | |||||
/// Toc made from the headers of the markdown file | /// Toc made from the headers of the markdown file | ||||
pub toc: Vec<Header>, | pub toc: Vec<Header>, | ||||
/// How many words in the raw content | |||||
pub word_count: Option<usize>, | |||||
/// How long would it take to read the raw content. | |||||
/// See `get_reading_analytics` on how it is calculated | |||||
pub reading_time: Option<usize>, | |||||
} | } | ||||
impl Page { | impl Page { | ||||
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter) -> Page { | pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter) -> Page { | ||||
let file_path = file_path.as_ref(); | let file_path = file_path.as_ref(); | ||||
@@ -64,8 +77,10 @@ impl Page { | |||||
Page { | Page { | ||||
file: FileInfo::new_page(file_path), | file: FileInfo::new_page(file_path), | ||||
meta, | meta, | ||||
ancestors: vec![], | |||||
raw_content: "".to_string(), | raw_content: "".to_string(), | ||||
assets: vec![], | assets: vec![], | ||||
serialized_assets: vec![], | |||||
content: "".to_string(), | content: "".to_string(), | ||||
slug: "".to_string(), | slug: "".to_string(), | ||||
path: "".to_string(), | path: "".to_string(), | ||||
@@ -77,6 +92,8 @@ impl Page { | |||||
lighter: None, | lighter: None, | ||||
heavier: None, | heavier: None, | ||||
toc: vec![], | toc: vec![], | ||||
word_count: None, | |||||
reading_time: None, | |||||
} | } | ||||
} | } | ||||
@@ -90,19 +107,36 @@ impl Page { | |||||
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Page> { | pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Page> { | ||||
let (meta, content) = split_page_content(file_path, content)?; | let (meta, content) = split_page_content(file_path, content)?; | ||||
let mut page = Page::new(file_path, meta); | let mut page = Page::new(file_path, meta); | ||||
page.raw_content = content; | page.raw_content = content; | ||||
let (word_count, reading_time) = get_reading_analytics(&page.raw_content); | |||||
page.word_count = Some(word_count); | |||||
page.reading_time = Some(reading_time); | |||||
let mut has_date_in_name = false; | |||||
if DATE_IN_FILENAME.is_match(&page.file.name) { | |||||
has_date_in_name = true; | |||||
if page.meta.date.is_none() { | |||||
page.meta.date = Some(page.file.name[..10].to_string()); | |||||
page.meta.date_to_datetime(); | |||||
} | |||||
} | |||||
page.slug = { | page.slug = { | ||||
if let Some(ref slug) = page.meta.slug { | if let Some(ref slug) = page.meta.slug { | ||||
slug.trim().to_string() | slug.trim().to_string() | ||||
} else if page.file.name == "index" { | |||||
if let Some(parent) = page.file.path.parent() { | |||||
slugify(parent.file_name().unwrap().to_str().unwrap()) | |||||
} else { | |||||
slugify(&page.file.name) | |||||
} | |||||
} else { | } else { | ||||
if page.file.name == "index" { | |||||
if let Some(parent) = page.file.path.parent() { | |||||
slugify(parent.file_name().unwrap().to_str().unwrap()) | |||||
} else { | |||||
slugify(page.file.name.clone()) | |||||
} | |||||
if has_date_in_name { | |||||
// skip the date + the {_,-} | |||||
slugify(&page.file.name[11..]) | |||||
} else { | } else { | ||||
slugify(page.file.name.clone()) | |||||
slugify(&page.file.name) | |||||
} | } | ||||
} | } | ||||
}; | }; | ||||
@@ -120,7 +154,9 @@ impl Page { | |||||
page.path = format!("{}/", page.path); | page.path = format!("{}/", page.path); | ||||
} | } | ||||
page.components = page.path.split('/') | |||||
page.components = page | |||||
.path | |||||
.split('/') | |||||
.map(|p| p.to_string()) | .map(|p| p.to_string()) | ||||
.filter(|p| !p.is_empty()) | .filter(|p| !p.is_empty()) | ||||
.collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
@@ -147,16 +183,18 @@ impl Page { | |||||
// against the remaining path. Note that the current behaviour effectively means that | // against the remaining path. Note that the current behaviour effectively means that | ||||
// the `ignored_content` setting in the config file is limited to single-file glob | // the `ignored_content` setting in the config file is limited to single-file glob | ||||
// patterns (no "**" patterns). | // patterns (no "**" patterns). | ||||
page.assets = assets.into_iter() | |||||
.filter(|path| | |||||
match path.file_name() { | |||||
None => true, | |||||
Some(file) => !globset.is_match(file) | |||||
} | |||||
).collect(); | |||||
page.assets = assets | |||||
.into_iter() | |||||
.filter(|path| match path.file_name() { | |||||
None => true, | |||||
Some(file) => !globset.is_match(file), | |||||
}) | |||||
.collect(); | |||||
} else { | } else { | ||||
page.assets = assets; | page.assets = assets; | ||||
} | } | ||||
page.serialized_assets = page.serialize_assets(); | |||||
} else { | } else { | ||||
page.assets = vec![]; | page.assets = vec![]; | ||||
} | } | ||||
@@ -171,19 +209,12 @@ impl Page { | |||||
permalinks: &HashMap<String, String>, | permalinks: &HashMap<String, String>, | ||||
tera: &Tera, | tera: &Tera, | ||||
config: &Config, | config: &Config, | ||||
base_path: &Path, | |||||
anchor_insert: InsertAnchor, | anchor_insert: InsertAnchor, | ||||
) -> Result<()> { | ) -> Result<()> { | ||||
let mut context = RenderContext::new( | |||||
tera, | |||||
config, | |||||
&self.permalink, | |||||
permalinks, | |||||
base_path, | |||||
anchor_insert, | |||||
); | |||||
let mut context = | |||||
RenderContext::new(tera, config, &self.permalink, permalinks, anchor_insert); | |||||
context.tera_context.add("page", self); | |||||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None)); | |||||
let res = render_content(&self.raw_content, &context) | let res = render_content(&self.raw_content, &context) | ||||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; | .chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; | ||||
@@ -196,17 +227,17 @@ impl Page { | |||||
} | } | ||||
/// Renders the page using the default layout, unless specified in front-matter | /// Renders the page using the default layout, unless specified in front-matter | ||||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> { | |||||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> { | |||||
let tpl_name = match self.meta.template { | let tpl_name = match self.meta.template { | ||||
Some(ref l) => l.to_string(), | |||||
None => "page.html".to_string() | |||||
Some(ref l) => l, | |||||
None => "page.html", | |||||
}; | }; | ||||
let mut context = TeraContext::new(); | let mut context = TeraContext::new(); | ||||
context.add("config", config); | |||||
context.add("page", self); | |||||
context.add("current_url", &self.permalink); | |||||
context.add("current_path", &self.path); | |||||
context.insert("config", config); | |||||
context.insert("current_url", &self.permalink); | |||||
context.insert("current_path", &self.path); | |||||
context.insert("page", &self.to_serialized(library)); | |||||
render_template(&tpl_name, tera, &context, &config.theme) | render_template(&tpl_name, tera, &context, &config.theme) | ||||
.chain_err(|| format!("Failed to render page '{}'", self.file.path.display())) | .chain_err(|| format!("Failed to render page '{}'", self.file.path.display())) | ||||
@@ -214,12 +245,21 @@ impl Page { | |||||
/// Creates a vectors of asset URLs. | /// Creates a vectors of asset URLs. | ||||
fn serialize_assets(&self) -> Vec<String> { | fn serialize_assets(&self) -> Vec<String> { | ||||
self.assets.iter() | |||||
self.assets | |||||
.iter() | |||||
.filter_map(|asset| asset.file_name()) | .filter_map(|asset| asset.file_name()) | ||||
.filter_map(|filename| filename.to_str()) | .filter_map(|filename| filename.to_str()) | ||||
.map(|filename| self.path.clone() + filename) | .map(|filename| self.path.clone() + filename) | ||||
.collect() | .collect() | ||||
} | } | ||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { | |||||
SerializingPage::from_page(self, library) | |||||
} | |||||
pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> { | |||||
SerializingPage::from_page_basic(self, Some(library)) | |||||
} | |||||
} | } | ||||
impl Default for Page { | impl Default for Page { | ||||
@@ -227,8 +267,10 @@ impl Default for Page { | |||||
Page { | Page { | ||||
file: FileInfo::default(), | file: FileInfo::default(), | ||||
meta: PageFrontMatter::default(), | meta: PageFrontMatter::default(), | ||||
ancestors: vec![], | |||||
raw_content: "".to_string(), | raw_content: "".to_string(), | ||||
assets: vec![], | assets: vec![], | ||||
serialized_assets: vec![], | |||||
content: "".to_string(), | content: "".to_string(), | ||||
slug: "".to_string(), | slug: "".to_string(), | ||||
path: "".to_string(), | path: "".to_string(), | ||||
@@ -240,65 +282,27 @@ impl Default for Page { | |||||
lighter: None, | lighter: None, | ||||
heavier: None, | heavier: None, | ||||
toc: vec![], | toc: vec![], | ||||
word_count: None, | |||||
reading_time: None, | |||||
} | } | ||||
} | } | ||||
} | } | ||||
impl ser::Serialize for Page { | |||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer { | |||||
let mut state = serializer.serialize_struct("page", 20)?; | |||||
state.serialize_field("content", &self.content)?; | |||||
state.serialize_field("title", &self.meta.title)?; | |||||
state.serialize_field("description", &self.meta.description)?; | |||||
state.serialize_field("date", &self.meta.date)?; | |||||
if let Some(chrono_datetime) = self.meta.date() { | |||||
let d = chrono_datetime.date(); | |||||
state.serialize_field("year", &d.year())?; | |||||
state.serialize_field("month", &d.month())?; | |||||
state.serialize_field("day", &d.day())?; | |||||
} else { | |||||
state.serialize_field::<Option<usize>>("year", &None)?; | |||||
state.serialize_field::<Option<usize>>("month", &None)?; | |||||
state.serialize_field::<Option<usize>>("day", &None)?; | |||||
} | |||||
state.serialize_field("slug", &self.slug)?; | |||||
state.serialize_field("path", &self.path)?; | |||||
state.serialize_field("components", &self.components)?; | |||||
state.serialize_field("permalink", &self.permalink)?; | |||||
state.serialize_field("summary", &self.summary)?; | |||||
state.serialize_field("taxonomies", &self.meta.taxonomies)?; | |||||
state.serialize_field("extra", &self.meta.extra)?; | |||||
let (word_count, reading_time) = get_reading_analytics(&self.raw_content); | |||||
state.serialize_field("word_count", &word_count)?; | |||||
state.serialize_field("reading_time", &reading_time)?; | |||||
state.serialize_field("earlier", &self.earlier)?; | |||||
state.serialize_field("later", &self.later)?; | |||||
state.serialize_field("lighter", &self.lighter)?; | |||||
state.serialize_field("heavier", &self.heavier)?; | |||||
state.serialize_field("toc", &self.toc)?; | |||||
state.serialize_field("draft", &self.is_draft())?; | |||||
let assets = self.serialize_assets(); | |||||
state.serialize_field("assets", &assets)?; | |||||
state.end() | |||||
} | |||||
} | |||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::fs::{create_dir, File}; | |||||
use std::io::Write; | use std::io::Write; | ||||
use std::fs::{File, create_dir}; | |||||
use std::path::Path; | use std::path::Path; | ||||
use tera::Tera; | |||||
use tempfile::tempdir; | |||||
use globset::{Glob, GlobSetBuilder}; | use globset::{Glob, GlobSetBuilder}; | ||||
use tempfile::tempdir; | |||||
use tera::Tera; | |||||
use config::Config; | |||||
use super::Page; | use super::Page; | ||||
use config::Config; | |||||
use front_matter::InsertAnchor; | use front_matter::InsertAnchor; | ||||
#[test] | #[test] | ||||
fn test_can_parse_a_valid_page() { | fn test_can_parse_a_valid_page() { | ||||
let content = r#" | let content = r#" | ||||
@@ -315,9 +319,9 @@ Hello world"#; | |||||
&HashMap::default(), | &HashMap::default(), | ||||
&Tera::default(), | &Tera::default(), | ||||
&Config::default(), | &Config::default(), | ||||
Path::new("something"), | |||||
InsertAnchor::None, | InsertAnchor::None, | ||||
).unwrap(); | |||||
) | |||||
.unwrap(); | |||||
assert_eq!(page.meta.title.unwrap(), "Hello".to_string()); | assert_eq!(page.meta.title.unwrap(), "Hello".to_string()); | ||||
assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string()); | assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string()); | ||||
@@ -419,17 +423,13 @@ Hello world"#; | |||||
+++ | +++ | ||||
+++ | +++ | ||||
Hello world | Hello world | ||||
<!-- more -->"#.to_string(); | |||||
<!-- more -->"# | |||||
.to_string(); | |||||
let res = Page::parse(Path::new("hello.md"), &content, &config); | let res = Page::parse(Path::new("hello.md"), &content, &config); | ||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
let mut page = res.unwrap(); | let mut page = res.unwrap(); | ||||
page.render_markdown( | |||||
&HashMap::default(), | |||||
&Tera::default(), | |||||
&config, | |||||
Path::new("something"), | |||||
InsertAnchor::None, | |||||
).unwrap(); | |||||
page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None) | |||||
.unwrap(); | |||||
assert_eq!(page.summary, Some("<p>Hello world</p>\n".to_string())); | assert_eq!(page.summary, Some("<p>Hello world</p>\n".to_string())); | ||||
} | } | ||||
@@ -447,10 +447,7 @@ Hello world | |||||
File::create(nested_path.join("graph.jpg")).unwrap(); | File::create(nested_path.join("graph.jpg")).unwrap(); | ||||
File::create(nested_path.join("fail.png")).unwrap(); | File::create(nested_path.join("fail.png")).unwrap(); | ||||
let res = Page::from_file( | |||||
nested_path.join("index.md").as_path(), | |||||
&Config::default(), | |||||
); | |||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); | |||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
let page = res.unwrap(); | let page = res.unwrap(); | ||||
assert_eq!(page.file.parent, path.join("content").join("posts")); | assert_eq!(page.file.parent, path.join("content").join("posts")); | ||||
@@ -473,10 +470,7 @@ Hello world | |||||
File::create(nested_path.join("graph.jpg")).unwrap(); | File::create(nested_path.join("graph.jpg")).unwrap(); | ||||
File::create(nested_path.join("fail.png")).unwrap(); | File::create(nested_path.join("fail.png")).unwrap(); | ||||
let res = Page::from_file( | |||||
nested_path.join("index.md").as_path(), | |||||
&Config::default(), | |||||
); | |||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); | |||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
let page = res.unwrap(); | let page = res.unwrap(); | ||||
assert_eq!(page.file.parent, path.join("content").join("posts")); | assert_eq!(page.file.parent, path.join("content").join("posts")); | ||||
@@ -504,14 +498,46 @@ Hello world | |||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.ignored_content_globset = Some(gsb.build().unwrap()); | config.ignored_content_globset = Some(gsb.build().unwrap()); | ||||
let res = Page::from_file( | |||||
nested_path.join("index.md").as_path(), | |||||
&config, | |||||
); | |||||
let res = Page::from_file(nested_path.join("index.md").as_path(), &config); | |||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
let page = res.unwrap(); | let page = res.unwrap(); | ||||
assert_eq!(page.assets.len(), 1); | assert_eq!(page.assets.len(), 1); | ||||
assert_eq!(page.assets[0].file_name().unwrap().to_str(), Some("graph.jpg")); | assert_eq!(page.assets[0].file_name().unwrap().to_str(), Some("graph.jpg")); | ||||
} | } | ||||
#[test] | |||||
fn can_get_date_from_filename() { | |||||
let config = Config::default(); | |||||
let content = r#" | |||||
+++ | |||||
+++ | |||||
Hello world | |||||
<!-- more -->"# | |||||
.to_string(); | |||||
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config); | |||||
assert!(res.is_ok()); | |||||
let page = res.unwrap(); | |||||
assert_eq!(page.meta.date, Some("2018-10-08".to_string())); | |||||
assert_eq!(page.slug, "hello"); | |||||
} | |||||
#[test] | |||||
fn frontmatter_date_override_filename_date() { | |||||
let config = Config::default(); | |||||
let content = r#" | |||||
+++ | |||||
date = 2018-09-09 | |||||
+++ | |||||
Hello world | |||||
<!-- more -->"# | |||||
.to_string(); | |||||
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config); | |||||
assert!(res.is_ok()); | |||||
let page = res.unwrap(); | |||||
assert_eq!(page.meta.date, Some("2018-09-09".to_string())); | |||||
assert_eq!(page.slug, "hello"); | |||||
} | |||||
} | } |
@@ -1,21 +1,20 @@ | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::{Path, PathBuf}; | use std::path::{Path, PathBuf}; | ||||
use std::result::Result as StdResult; | |||||
use tera::{Tera, Context as TeraContext}; | |||||
use serde::ser::{SerializeStruct, self}; | |||||
use slotmap::Key; | |||||
use tera::{Context as TeraContext, Tera}; | |||||
use config::Config; | use config::Config; | ||||
use front_matter::{SectionFrontMatter, split_section_content}; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
use utils::fs::{read_file, find_related_assets}; | |||||
use utils::templates::render_template; | |||||
use front_matter::{split_section_content, SectionFrontMatter}; | |||||
use rendering::{render_content, Header, RenderContext}; | |||||
use utils::fs::{find_related_assets, read_file}; | |||||
use utils::site::get_reading_analytics; | use utils::site::get_reading_analytics; | ||||
use rendering::{RenderContext, Header, render_content}; | |||||
use page::Page; | |||||
use file_info::FileInfo; | |||||
use utils::templates::render_template; | |||||
use content::file_info::FileInfo; | |||||
use content::ser::SerializingSection; | |||||
use library::Library; | |||||
#[derive(Clone, Debug, PartialEq)] | #[derive(Clone, Debug, PartialEq)] | ||||
pub struct Section { | pub struct Section { | ||||
@@ -35,14 +34,23 @@ pub struct Section { | |||||
pub content: String, | pub content: String, | ||||
/// All the non-md files we found next to the .md file | /// All the non-md files we found next to the .md file | ||||
pub assets: Vec<PathBuf>, | pub assets: Vec<PathBuf>, | ||||
/// All the non-md files we found next to the .md file as string for use in templates | |||||
pub serialized_assets: Vec<String>, | |||||
/// All direct pages of that section | /// All direct pages of that section | ||||
pub pages: Vec<Page>, | |||||
pub pages: Vec<Key>, | |||||
/// All pages that cannot be sorted in this section | /// All pages that cannot be sorted in this section | ||||
pub ignored_pages: Vec<Page>, | |||||
pub ignored_pages: Vec<Key>, | |||||
/// The list of parent sections | |||||
pub ancestors: Vec<Key>, | |||||
/// All direct subsections | /// All direct subsections | ||||
pub subsections: Vec<Section>, | |||||
pub subsections: Vec<Key>, | |||||
/// Toc made from the headers of the markdown file | /// Toc made from the headers of the markdown file | ||||
pub toc: Vec<Header>, | pub toc: Vec<Header>, | ||||
/// How many words in the raw content | |||||
pub word_count: Option<usize>, | |||||
/// How long would it take to read the raw content. | |||||
/// See `get_reading_analytics` on how it is calculated | |||||
pub reading_time: Option<usize>, | |||||
} | } | ||||
impl Section { | impl Section { | ||||
@@ -52,16 +60,20 @@ impl Section { | |||||
Section { | Section { | ||||
file: FileInfo::new_section(file_path), | file: FileInfo::new_section(file_path), | ||||
meta, | meta, | ||||
ancestors: vec![], | |||||
path: "".to_string(), | path: "".to_string(), | ||||
components: vec![], | components: vec![], | ||||
permalink: "".to_string(), | permalink: "".to_string(), | ||||
raw_content: "".to_string(), | raw_content: "".to_string(), | ||||
assets: vec![], | assets: vec![], | ||||
serialized_assets: vec![], | |||||
content: "".to_string(), | content: "".to_string(), | ||||
pages: vec![], | pages: vec![], | ||||
ignored_pages: vec![], | ignored_pages: vec![], | ||||
subsections: vec![], | subsections: vec![], | ||||
toc: vec![], | toc: vec![], | ||||
word_count: None, | |||||
reading_time: None, | |||||
} | } | ||||
} | } | ||||
@@ -69,8 +81,13 @@ impl Section { | |||||
let (meta, content) = split_section_content(file_path, content)?; | let (meta, content) = split_section_content(file_path, content)?; | ||||
let mut section = Section::new(file_path, meta); | let mut section = Section::new(file_path, meta); | ||||
section.raw_content = content.clone(); | section.raw_content = content.clone(); | ||||
let (word_count, reading_time) = get_reading_analytics(§ion.raw_content); | |||||
section.word_count = Some(word_count); | |||||
section.reading_time = Some(reading_time); | |||||
section.path = format!("{}/", section.file.components.join("/")); | section.path = format!("{}/", section.file.components.join("/")); | ||||
section.components = section.path.split('/') | |||||
section.components = section | |||||
.path | |||||
.split('/') | |||||
.map(|p| p.to_string()) | .map(|p| p.to_string()) | ||||
.filter(|p| !p.is_empty()) | .filter(|p| !p.is_empty()) | ||||
.collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
@@ -95,45 +112,51 @@ impl Section { | |||||
// against the remaining path. Note that the current behaviour effectively means that | // against the remaining path. Note that the current behaviour effectively means that | ||||
// the `ignored_content` setting in the config file is limited to single-file glob | // the `ignored_content` setting in the config file is limited to single-file glob | ||||
// patterns (no "**" patterns). | // patterns (no "**" patterns). | ||||
section.assets = assets.into_iter() | |||||
.filter(|path| | |||||
match path.file_name() { | |||||
None => true, | |||||
Some(file) => !globset.is_match(file) | |||||
} | |||||
).collect(); | |||||
section.assets = assets | |||||
.into_iter() | |||||
.filter(|path| match path.file_name() { | |||||
None => true, | |||||
Some(file) => !globset.is_match(file), | |||||
}) | |||||
.collect(); | |||||
} else { | } else { | ||||
section.assets = assets; | section.assets = assets; | ||||
} | } | ||||
section.serialized_assets = section.serialize_assets(); | |||||
Ok(section) | Ok(section) | ||||
} | } | ||||
pub fn get_template_name(&self) -> String { | |||||
pub fn get_template_name(&self) -> &str { | |||||
match self.meta.template { | match self.meta.template { | ||||
Some(ref l) => l.to_string(), | |||||
Some(ref l) => l, | |||||
None => { | None => { | ||||
if self.is_index() { | if self.is_index() { | ||||
return "index.html".to_string(); | |||||
return "index.html"; | |||||
} | } | ||||
"section.html".to_string() | |||||
"section.html" | |||||
} | } | ||||
} | } | ||||
} | } | ||||
/// We need access to all pages url to render links relative to content | /// We need access to all pages url to render links relative to content | ||||
/// so that can't happen at the same time as parsing | /// so that can't happen at the same time as parsing | ||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, base_path: &Path) -> Result<()> { | |||||
pub fn render_markdown( | |||||
&mut self, | |||||
permalinks: &HashMap<String, String>, | |||||
tera: &Tera, | |||||
config: &Config, | |||||
) -> Result<()> { | |||||
let mut context = RenderContext::new( | let mut context = RenderContext::new( | ||||
tera, | tera, | ||||
config, | config, | ||||
&self.permalink, | &self.permalink, | ||||
permalinks, | permalinks, | ||||
base_path, | |||||
self.meta.insert_anchor_links, | self.meta.insert_anchor_links, | ||||
); | ); | ||||
context.tera_context.add("section", self); | |||||
context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None)); | |||||
let res = render_content(&self.raw_content, &context) | let res = render_content(&self.raw_content, &context) | ||||
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; | .chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; | ||||
@@ -143,16 +166,16 @@ impl Section { | |||||
} | } | ||||
/// Renders the page using the default layout, unless specified in front-matter | /// Renders the page using the default layout, unless specified in front-matter | ||||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> { | |||||
pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result<String> { | |||||
let tpl_name = self.get_template_name(); | let tpl_name = self.get_template_name(); | ||||
let mut context = TeraContext::new(); | let mut context = TeraContext::new(); | ||||
context.add("config", config); | |||||
context.add("section", self); | |||||
context.add("current_url", &self.permalink); | |||||
context.add("current_path", &self.path); | |||||
context.insert("config", config); | |||||
context.insert("current_url", &self.permalink); | |||||
context.insert("current_path", &self.path); | |||||
context.insert("section", &self.to_serialized(library)); | |||||
render_template(&tpl_name, tera, &context, &config.theme) | |||||
render_template(tpl_name, tera, &context, &config.theme) | |||||
.chain_err(|| format!("Failed to render section '{}'", self.file.path.display())) | .chain_err(|| format!("Failed to render section '{}'", self.file.path.display())) | ||||
} | } | ||||
@@ -161,49 +184,22 @@ impl Section { | |||||
self.file.components.is_empty() | self.file.components.is_empty() | ||||
} | } | ||||
/// Returns all the paths of the pages belonging to that section | |||||
pub fn all_pages_path(&self) -> Vec<PathBuf> { | |||||
let mut paths = vec![]; | |||||
paths.extend(self.pages.iter().map(|p| p.file.path.clone())); | |||||
paths.extend(self.ignored_pages.iter().map(|p| p.file.path.clone())); | |||||
paths | |||||
} | |||||
/// Whether the page given belongs to that section | |||||
pub fn is_child_page(&self, path: &PathBuf) -> bool { | |||||
self.all_pages_path().contains(path) | |||||
} | |||||
/// Creates a vectors of asset URLs. | /// Creates a vectors of asset URLs. | ||||
fn serialize_assets(&self) -> Vec<String> { | fn serialize_assets(&self) -> Vec<String> { | ||||
self.assets.iter() | |||||
self.assets | |||||
.iter() | |||||
.filter_map(|asset| asset.file_name()) | .filter_map(|asset| asset.file_name()) | ||||
.filter_map(|filename| filename.to_str()) | .filter_map(|filename| filename.to_str()) | ||||
.map(|filename| self.path.clone() + filename) | .map(|filename| self.path.clone() + filename) | ||||
.collect() | .collect() | ||||
} | } | ||||
} | |||||
impl ser::Serialize for Section { | |||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer { | |||||
let mut state = serializer.serialize_struct("section", 13)?; | |||||
state.serialize_field("content", &self.content)?; | |||||
state.serialize_field("permalink", &self.permalink)?; | |||||
state.serialize_field("title", &self.meta.title)?; | |||||
state.serialize_field("description", &self.meta.description)?; | |||||
state.serialize_field("extra", &self.meta.extra)?; | |||||
state.serialize_field("path", &self.path)?; | |||||
state.serialize_field("components", &self.components)?; | |||||
state.serialize_field("permalink", &self.permalink)?; | |||||
state.serialize_field("pages", &self.pages)?; | |||||
state.serialize_field("subsections", &self.subsections)?; | |||||
let (word_count, reading_time) = get_reading_analytics(&self.raw_content); | |||||
state.serialize_field("word_count", &word_count)?; | |||||
state.serialize_field("reading_time", &reading_time)?; | |||||
state.serialize_field("toc", &self.toc)?; | |||||
let assets = self.serialize_assets(); | |||||
state.serialize_field("assets", &assets)?; | |||||
state.end() | |||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { | |||||
SerializingSection::from_section(self, library) | |||||
} | |||||
pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingSection<'a> { | |||||
SerializingSection::from_section_basic(self, Some(library)) | |||||
} | } | ||||
} | } | ||||
@@ -213,30 +209,34 @@ impl Default for Section { | |||||
Section { | Section { | ||||
file: FileInfo::default(), | file: FileInfo::default(), | ||||
meta: SectionFrontMatter::default(), | meta: SectionFrontMatter::default(), | ||||
ancestors: vec![], | |||||
path: "".to_string(), | path: "".to_string(), | ||||
components: vec![], | components: vec![], | ||||
permalink: "".to_string(), | permalink: "".to_string(), | ||||
raw_content: "".to_string(), | raw_content: "".to_string(), | ||||
assets: vec![], | assets: vec![], | ||||
serialized_assets: vec![], | |||||
content: "".to_string(), | content: "".to_string(), | ||||
pages: vec![], | pages: vec![], | ||||
ignored_pages: vec![], | ignored_pages: vec![], | ||||
subsections: vec![], | subsections: vec![], | ||||
toc: vec![], | toc: vec![], | ||||
reading_time: None, | |||||
word_count: None, | |||||
} | } | ||||
} | } | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::fs::{create_dir, File}; | |||||
use std::io::Write; | use std::io::Write; | ||||
use std::fs::{File, create_dir}; | |||||
use tempfile::tempdir; | |||||
use globset::{Glob, GlobSetBuilder}; | use globset::{Glob, GlobSetBuilder}; | ||||
use tempfile::tempdir; | |||||
use config::Config; | |||||
use super::Section; | use super::Section; | ||||
use config::Config; | |||||
#[test] | #[test] | ||||
fn section_with_assets_gets_right_info() { | fn section_with_assets_gets_right_info() { | ||||
@@ -252,10 +252,7 @@ mod tests { | |||||
File::create(nested_path.join("graph.jpg")).unwrap(); | File::create(nested_path.join("graph.jpg")).unwrap(); | ||||
File::create(nested_path.join("fail.png")).unwrap(); | File::create(nested_path.join("fail.png")).unwrap(); | ||||
let res = Section::from_file( | |||||
nested_path.join("_index.md").as_path(), | |||||
&Config::default(), | |||||
); | |||||
let res = Section::from_file(nested_path.join("_index.md").as_path(), &Config::default()); | |||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
let section = res.unwrap(); | let section = res.unwrap(); | ||||
assert_eq!(section.assets.len(), 3); | assert_eq!(section.assets.len(), 3); | ||||
@@ -281,10 +278,7 @@ mod tests { | |||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.ignored_content_globset = Some(gsb.build().unwrap()); | config.ignored_content_globset = Some(gsb.build().unwrap()); | ||||
let res = Section::from_file( | |||||
nested_path.join("_index.md").as_path(), | |||||
&config, | |||||
); | |||||
let res = Section::from_file(nested_path.join("_index.md").as_path(), &config); | |||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
let page = res.unwrap(); | let page = res.unwrap(); |
@@ -0,0 +1,234 @@ | |||||
//! What we are sending to the templates when rendering them | |||||
use std::collections::HashMap; | |||||
use tera::{Map, Value}; | |||||
use content::{Page, Section}; | |||||
use library::Library; | |||||
use rendering::Header; | |||||
#[derive(Clone, Debug, PartialEq, Serialize)] | |||||
pub struct SerializingPage<'a> { | |||||
relative_path: &'a str, | |||||
content: &'a str, | |||||
permalink: &'a str, | |||||
slug: &'a str, | |||||
ancestors: Vec<String>, | |||||
title: &'a Option<String>, | |||||
description: &'a Option<String>, | |||||
date: &'a Option<String>, | |||||
year: Option<i32>, | |||||
month: Option<u32>, | |||||
day: Option<u32>, | |||||
taxonomies: &'a HashMap<String, Vec<String>>, | |||||
extra: &'a Map<String, Value>, | |||||
path: &'a str, | |||||
components: &'a [String], | |||||
summary: &'a Option<String>, | |||||
word_count: Option<usize>, | |||||
reading_time: Option<usize>, | |||||
toc: &'a [Header], | |||||
assets: &'a [String], | |||||
draft: bool, | |||||
lighter: Option<Box<SerializingPage<'a>>>, | |||||
heavier: Option<Box<SerializingPage<'a>>>, | |||||
earlier: Option<Box<SerializingPage<'a>>>, | |||||
later: Option<Box<SerializingPage<'a>>>, | |||||
} | |||||
impl<'a> SerializingPage<'a> { | |||||
/// Grabs all the data from a page, including sibling pages | |||||
pub fn from_page(page: &'a Page, library: &'a Library) -> Self { | |||||
let mut year = None; | |||||
let mut month = None; | |||||
let mut day = None; | |||||
if let Some(d) = page.meta.datetime_tuple { | |||||
year = Some(d.0); | |||||
month = Some(d.1); | |||||
day = Some(d.2); | |||||
} | |||||
let pages = library.pages(); | |||||
let lighter = page | |||||
.lighter | |||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); | |||||
let heavier = page | |||||
.heavier | |||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); | |||||
let earlier = page | |||||
.earlier | |||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); | |||||
let later = page | |||||
.later | |||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); | |||||
let ancestors = page | |||||
.ancestors | |||||
.iter() | |||||
.map(|k| library.get_section_by_key(*k).file.relative.clone()) | |||||
.collect(); | |||||
SerializingPage { | |||||
relative_path: &page.file.relative, | |||||
ancestors, | |||||
content: &page.content, | |||||
permalink: &page.permalink, | |||||
slug: &page.slug, | |||||
title: &page.meta.title, | |||||
description: &page.meta.description, | |||||
extra: &page.meta.extra, | |||||
date: &page.meta.date, | |||||
year, | |||||
month, | |||||
day, | |||||
taxonomies: &page.meta.taxonomies, | |||||
path: &page.path, | |||||
components: &page.components, | |||||
summary: &page.summary, | |||||
word_count: page.word_count, | |||||
reading_time: page.reading_time, | |||||
toc: &page.toc, | |||||
assets: &page.serialized_assets, | |||||
draft: page.is_draft(), | |||||
lighter, | |||||
heavier, | |||||
earlier, | |||||
later, | |||||
} | |||||
} | |||||
/// Same as from_page but does not fill sibling pages | |||||
pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self { | |||||
let mut year = None; | |||||
let mut month = None; | |||||
let mut day = None; | |||||
if let Some(d) = page.meta.datetime_tuple { | |||||
year = Some(d.0); | |||||
month = Some(d.1); | |||||
day = Some(d.2); | |||||
} | |||||
let ancestors = if let Some(ref lib) = library { | |||||
page.ancestors | |||||
.iter() | |||||
.map(|k| lib.get_section_by_key(*k).file.relative.clone()) | |||||
.collect() | |||||
} else { | |||||
vec![] | |||||
}; | |||||
SerializingPage { | |||||
relative_path: &page.file.relative, | |||||
ancestors, | |||||
content: &page.content, | |||||
permalink: &page.permalink, | |||||
slug: &page.slug, | |||||
title: &page.meta.title, | |||||
description: &page.meta.description, | |||||
extra: &page.meta.extra, | |||||
date: &page.meta.date, | |||||
year, | |||||
month, | |||||
day, | |||||
taxonomies: &page.meta.taxonomies, | |||||
path: &page.path, | |||||
components: &page.components, | |||||
summary: &page.summary, | |||||
word_count: page.word_count, | |||||
reading_time: page.reading_time, | |||||
toc: &page.toc, | |||||
assets: &page.serialized_assets, | |||||
draft: page.is_draft(), | |||||
lighter: None, | |||||
heavier: None, | |||||
earlier: None, | |||||
later: None, | |||||
} | |||||
} | |||||
} | |||||
#[derive(Clone, Debug, PartialEq, Serialize)] | |||||
pub struct SerializingSection<'a> { | |||||
relative_path: &'a str, | |||||
content: &'a str, | |||||
permalink: &'a str, | |||||
ancestors: Vec<String>, | |||||
title: &'a Option<String>, | |||||
description: &'a Option<String>, | |||||
extra: &'a HashMap<String, Value>, | |||||
path: &'a str, | |||||
components: &'a [String], | |||||
word_count: Option<usize>, | |||||
reading_time: Option<usize>, | |||||
toc: &'a [Header], | |||||
assets: &'a [String], | |||||
pages: Vec<SerializingPage<'a>>, | |||||
subsections: Vec<&'a str>, | |||||
} | |||||
impl<'a> SerializingSection<'a> { | |||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Self { | |||||
let mut pages = Vec::with_capacity(section.pages.len()); | |||||
let mut subsections = Vec::with_capacity(section.subsections.len()); | |||||
for k in §ion.pages { | |||||
pages.push(library.get_page_by_key(*k).to_serialized(library)); | |||||
} | |||||
for k in §ion.subsections { | |||||
subsections.push(library.get_section_path_by_key(*k)); | |||||
} | |||||
let ancestors = section | |||||
.ancestors | |||||
.iter() | |||||
.map(|k| library.get_section_by_key(*k).file.relative.clone()) | |||||
.collect(); | |||||
SerializingSection { | |||||
relative_path: §ion.file.relative, | |||||
ancestors, | |||||
content: §ion.content, | |||||
permalink: §ion.permalink, | |||||
title: §ion.meta.title, | |||||
description: §ion.meta.description, | |||||
extra: §ion.meta.extra, | |||||
path: §ion.path, | |||||
components: §ion.components, | |||||
word_count: section.word_count, | |||||
reading_time: section.reading_time, | |||||
toc: §ion.toc, | |||||
assets: §ion.serialized_assets, | |||||
pages, | |||||
subsections, | |||||
} | |||||
} | |||||
/// Same as from_section but doesn't fetch pages and sections | |||||
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self { | |||||
let ancestors = if let Some(ref lib) = library { | |||||
section | |||||
.ancestors | |||||
.iter() | |||||
.map(|k| lib.get_section_by_key(*k).file.relative.clone()) | |||||
.collect() | |||||
} else { | |||||
vec![] | |||||
}; | |||||
SerializingSection { | |||||
relative_path: §ion.file.relative, | |||||
ancestors, | |||||
content: §ion.content, | |||||
permalink: §ion.permalink, | |||||
title: §ion.meta.title, | |||||
description: §ion.meta.description, | |||||
extra: §ion.meta.extra, | |||||
path: §ion.path, | |||||
components: §ion.components, | |||||
word_count: section.word_count, | |||||
reading_time: section.reading_time, | |||||
toc: §ion.toc, | |||||
assets: §ion.serialized_assets, | |||||
pages: vec![], | |||||
subsections: vec![], | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,39 @@ | |||||
extern crate serde; | |||||
extern crate slug; | |||||
extern crate tera; | |||||
#[macro_use] | |||||
extern crate serde_derive; | |||||
extern crate chrono; | |||||
extern crate rayon; | |||||
extern crate slotmap; | |||||
#[macro_use] | |||||
extern crate lazy_static; | |||||
extern crate regex; | |||||
#[cfg(test)] | |||||
extern crate globset; | |||||
#[cfg(test)] | |||||
extern crate tempfile; | |||||
#[cfg(test)] | |||||
extern crate toml; | |||||
extern crate config; | |||||
extern crate front_matter; | |||||
extern crate rendering; | |||||
extern crate utils; | |||||
#[macro_use] | |||||
extern crate errors; | |||||
mod content; | |||||
mod library; | |||||
mod pagination; | |||||
mod sorting; | |||||
mod taxonomies; | |||||
pub use slotmap::{DenseSlotMap, Key}; | |||||
pub use content::{Page, Section, SerializingPage, SerializingSection}; | |||||
pub use library::Library; | |||||
pub use pagination::Paginator; | |||||
pub use sorting::sort_actual_pages_by_date; | |||||
pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem}; |
@@ -0,0 +1,354 @@ | |||||
use std::collections::{HashMap, HashSet}; | |||||
use std::path::{Path, PathBuf}; | |||||
use slotmap::{DenseSlotMap, Key}; | |||||
use front_matter::SortBy; | |||||
use content::{Page, Section}; | |||||
use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; | |||||
/// Houses everything about pages and sections | |||||
/// Think of it as a database where each page and section has an id (Key here) | |||||
/// that can be used to find the actual value | |||||
/// Sections and pages can then refer to other elements by those keys, which are very cheap to | |||||
/// copy. | |||||
/// We can assume the keys are always existing as removing a page/section deletes all references | |||||
/// to that key. | |||||
#[derive(Debug)] | |||||
pub struct Library { | |||||
/// All the pages of the site | |||||
pages: DenseSlotMap<Page>, | |||||
/// All the sections of the site | |||||
sections: DenseSlotMap<Section>, | |||||
/// A mapping path -> key for pages so we can easily get their key | |||||
paths_to_pages: HashMap<PathBuf, Key>, | |||||
/// A mapping path -> key for sections so we can easily get their key | |||||
pub paths_to_sections: HashMap<PathBuf, Key>, | |||||
} | |||||
impl Library { | |||||
pub fn new(cap_pages: usize, cap_sections: usize) -> Self { | |||||
Library { | |||||
pages: DenseSlotMap::with_capacity(cap_pages), | |||||
sections: DenseSlotMap::with_capacity(cap_sections), | |||||
paths_to_pages: HashMap::with_capacity(cap_pages), | |||||
paths_to_sections: HashMap::with_capacity(cap_sections), | |||||
} | |||||
} | |||||
/// Add a section and return its Key | |||||
pub fn insert_section(&mut self, section: Section) -> Key { | |||||
let path = section.file.path.clone(); | |||||
let key = self.sections.insert(section); | |||||
self.paths_to_sections.insert(path, key); | |||||
key | |||||
} | |||||
/// Add a page and return its Key | |||||
pub fn insert_page(&mut self, page: Page) -> Key { | |||||
let path = page.file.path.clone(); | |||||
let key = self.pages.insert(page); | |||||
self.paths_to_pages.insert(path, key); | |||||
key | |||||
} | |||||
pub fn pages(&self) -> &DenseSlotMap<Page> { | |||||
&self.pages | |||||
} | |||||
pub fn pages_mut(&mut self) -> &mut DenseSlotMap<Page> { | |||||
&mut self.pages | |||||
} | |||||
pub fn pages_values(&self) -> Vec<&Page> { | |||||
self.pages.values().collect::<Vec<_>>() | |||||
} | |||||
pub fn sections(&self) -> &DenseSlotMap<Section> { | |||||
&self.sections | |||||
} | |||||
pub fn sections_mut(&mut self) -> &mut DenseSlotMap<Section> { | |||||
&mut self.sections | |||||
} | |||||
pub fn sections_values(&self) -> Vec<&Section> { | |||||
self.sections.values().collect::<Vec<_>>() | |||||
} | |||||
/// Find out the direct subsections of each subsection if there are some | |||||
/// as well as the pages for each section | |||||
pub fn populate_sections(&mut self) { | |||||
let (root_path, index_path) = self | |||||
.sections | |||||
.values() | |||||
.find(|s| s.is_index()) | |||||
.map(|s| (s.file.parent.clone(), s.file.path.clone())) | |||||
.unwrap(); | |||||
let root_key = self.paths_to_sections[&index_path]; | |||||
// We are going to get both the ancestors and grandparents for each section in one go | |||||
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new(); | |||||
let mut subsections: HashMap<PathBuf, Vec<_>> = HashMap::new(); | |||||
for section in self.sections.values_mut() { | |||||
// Make sure the pages of a section are empty since we can call that many times on `serve` | |||||
section.pages = vec![]; | |||||
section.ignored_pages = vec![]; | |||||
if let Some(ref grand_parent) = section.file.grand_parent { | |||||
subsections | |||||
.entry(grand_parent.join("_index.md")) | |||||
.or_insert_with(|| vec![]) | |||||
.push(section.file.path.clone()); | |||||
} | |||||
// Index has no ancestors, no need to go through it | |||||
if section.is_index() { | |||||
ancestors.insert(section.file.path.clone(), vec![]); | |||||
continue; | |||||
} | |||||
let mut path = root_path.clone(); | |||||
// Index section is the first ancestor of every single section | |||||
let mut parents = vec![root_key]; | |||||
for component in §ion.file.components { | |||||
path = path.join(component); | |||||
// Skip itself | |||||
if path == section.file.parent { | |||||
continue; | |||||
} | |||||
if let Some(section_key) = self.paths_to_sections.get(&path.join("_index.md")) { | |||||
parents.push(*section_key); | |||||
} | |||||
} | |||||
ancestors.insert(section.file.path.clone(), parents); | |||||
} | |||||
for (key, page) in &mut self.pages { | |||||
let mut parent_section_path = page.file.parent.join("_index.md"); | |||||
while let Some(section_key) = self.paths_to_sections.get(&parent_section_path) { | |||||
let parent_is_transparent; | |||||
// We need to get a reference to a section later so keep the scope of borrowing small | |||||
{ | |||||
let mut section = self.sections.get_mut(*section_key).unwrap(); | |||||
section.pages.push(key); | |||||
parent_is_transparent = section.meta.transparent; | |||||
} | |||||
page.ancestors = | |||||
ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]); | |||||
// Don't forget to push the actual parent | |||||
page.ancestors.push(*section_key); | |||||
// Find the page template if one of a parent has page_template set | |||||
// Stops after the first one found, keep in mind page.ancestors | |||||
// is [index, ..., parent] so we need to reverse it first | |||||
if page.meta.template.is_none() { | |||||
for ancestor in page.ancestors.iter().rev() { | |||||
let s = self.sections.get(*ancestor).unwrap(); | |||||
if s.meta.page_template.is_some() { | |||||
page.meta.template = s.meta.page_template.clone(); | |||||
break; | |||||
} | |||||
} | |||||
} | |||||
if !parent_is_transparent { | |||||
break; | |||||
} | |||||
// We've added `_index.md` so if we are here so we need to go up twice | |||||
match parent_section_path.clone().parent().unwrap().parent() { | |||||
Some(parent) => parent_section_path = parent.join("_index.md"), | |||||
None => break, | |||||
} | |||||
} | |||||
} | |||||
self.sort_sections_pages(); | |||||
let sections = self.paths_to_sections.clone(); | |||||
let mut sections_weight = HashMap::new(); | |||||
for (key, section) in &self.sections { | |||||
sections_weight.insert(key, section.meta.weight); | |||||
} | |||||
for section in self.sections.values_mut() { | |||||
if let Some(ref children) = subsections.get(§ion.file.path) { | |||||
let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect(); | |||||
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); | |||||
section.subsections = children; | |||||
} | |||||
section.ancestors = | |||||
ancestors.get(§ion.file.path).cloned().unwrap_or_else(|| vec![]); | |||||
} | |||||
} | |||||
/// Sort all sections pages | |||||
pub fn sort_sections_pages(&mut self) { | |||||
let mut updates = HashMap::new(); | |||||
for (key, section) in &self.sections { | |||||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { | |||||
SortBy::None => continue, | |||||
SortBy::Date => { | |||||
let data = section | |||||
.pages | |||||
.iter() | |||||
.map(|k| { | |||||
if let Some(page) = self.pages.get(*k) { | |||||
(k, page.meta.datetime, page.permalink.as_ref()) | |||||
} else { | |||||
unreachable!("Sorting got an unknown page") | |||||
} | |||||
}) | |||||
.collect(); | |||||
sort_pages_by_date(data) | |||||
} | |||||
SortBy::Weight => { | |||||
let data = section | |||||
.pages | |||||
.iter() | |||||
.map(|k| { | |||||
if let Some(page) = self.pages.get(*k) { | |||||
(k, page.meta.weight, page.permalink.as_ref()) | |||||
} else { | |||||
unreachable!("Sorting got an unknown page") | |||||
} | |||||
}) | |||||
.collect(); | |||||
sort_pages_by_weight(data) | |||||
} | |||||
}; | |||||
updates.insert(key, (sorted_pages, cannot_be_sorted_pages, section.meta.sort_by)); | |||||
} | |||||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates { | |||||
// Find sibling between sorted pages first | |||||
let with_siblings = find_siblings( | |||||
sorted | |||||
.iter() | |||||
.map(|k| { | |||||
if let Some(page) = self.pages.get(*k) { | |||||
(k, page.is_draft()) | |||||
} else { | |||||
unreachable!("Sorting got an unknown page") | |||||
} | |||||
}) | |||||
.collect(), | |||||
); | |||||
for (k2, val1, val2) in with_siblings { | |||||
if let Some(page) = self.pages.get_mut(k2) { | |||||
match sort_by { | |||||
SortBy::Date => { | |||||
page.earlier = val2; | |||||
page.later = val1; | |||||
} | |||||
SortBy::Weight => { | |||||
page.lighter = val1; | |||||
page.heavier = val2; | |||||
} | |||||
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None"), | |||||
} | |||||
} else { | |||||
unreachable!("Sorting got an unknown page") | |||||
} | |||||
} | |||||
if let Some(s) = self.sections.get_mut(key) { | |||||
s.pages = sorted; | |||||
s.ignored_pages = cannot_be_sorted; | |||||
} | |||||
} | |||||
} | |||||
/// Find all the orphan pages: pages that are in a folder without an `_index.md` | |||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | |||||
let pages_in_sections = | |||||
self.sections.values().flat_map(|s| &s.pages).collect::<HashSet<_>>(); | |||||
self.pages | |||||
.iter() | |||||
.filter(|(key, _)| !pages_in_sections.contains(&key)) | |||||
.map(|(_, page)| page) | |||||
.collect() | |||||
} | |||||
pub fn find_parent_section<P: AsRef<Path>>(&self, path: P) -> Option<&Section> { | |||||
let page_key = self.paths_to_pages[path.as_ref()]; | |||||
for s in self.sections.values() { | |||||
if s.pages.contains(&page_key) { | |||||
return Some(s); | |||||
} | |||||
} | |||||
None | |||||
} | |||||
/// Only used in tests | |||||
pub fn get_section_key<P: AsRef<Path>>(&self, path: P) -> Option<&Key> { | |||||
self.paths_to_sections.get(path.as_ref()) | |||||
} | |||||
pub fn get_section<P: AsRef<Path>>(&self, path: P) -> Option<&Section> { | |||||
self.sections.get(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default()) | |||||
} | |||||
pub fn get_section_mut<P: AsRef<Path>>(&mut self, path: P) -> Option<&mut Section> { | |||||
self.sections | |||||
.get_mut(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default()) | |||||
} | |||||
pub fn get_section_by_key(&self, key: Key) -> &Section { | |||||
self.sections.get(key).unwrap() | |||||
} | |||||
pub fn get_section_mut_by_key(&mut self, key: Key) -> &mut Section { | |||||
self.sections.get_mut(key).unwrap() | |||||
} | |||||
pub fn get_section_path_by_key(&self, key: Key) -> &str { | |||||
&self.get_section_by_key(key).file.relative | |||||
} | |||||
pub fn get_page<P: AsRef<Path>>(&self, path: P) -> Option<&Page> { | |||||
self.pages.get(self.paths_to_pages.get(path.as_ref()).cloned().unwrap_or_default()) | |||||
} | |||||
pub fn get_page_by_key(&self, key: Key) -> &Page { | |||||
self.pages.get(key).unwrap() | |||||
} | |||||
pub fn get_page_mut_by_key(&mut self, key: Key) -> &mut Page { | |||||
self.pages.get_mut(key).unwrap() | |||||
} | |||||
pub fn remove_section<P: AsRef<Path>>(&mut self, path: P) -> Option<Section> { | |||||
if let Some(k) = self.paths_to_sections.remove(path.as_ref()) { | |||||
self.sections.remove(k) | |||||
} else { | |||||
None | |||||
} | |||||
} | |||||
pub fn remove_page<P: AsRef<Path>>(&mut self, path: P) -> Option<Page> { | |||||
if let Some(k) = self.paths_to_pages.remove(path.as_ref()) { | |||||
self.pages.remove(k) | |||||
} else { | |||||
None | |||||
} | |||||
} | |||||
/// Used in rebuild, to check if we know it already | |||||
pub fn contains_section<P: AsRef<Path>>(&self, path: P) -> bool { | |||||
self.paths_to_sections.contains_key(path.as_ref()) | |||||
} | |||||
/// Used in rebuild, to check if we know it already | |||||
pub fn contains_page<P: AsRef<Path>>(&self, path: P) -> bool { | |||||
self.paths_to_pages.contains_key(path.as_ref()) | |||||
} | |||||
} |
@@ -1,26 +1,15 @@ | |||||
#[macro_use] | |||||
extern crate serde_derive; | |||||
extern crate tera; | |||||
extern crate errors; | |||||
extern crate config; | |||||
extern crate content; | |||||
extern crate utils; | |||||
extern crate taxonomies; | |||||
#[cfg(test)] | |||||
extern crate front_matter; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use tera::{Tera, Context, to_value, Value}; | |||||
use slotmap::Key; | |||||
use tera::{to_value, Context, Tera, Value}; | |||||
use errors::{Result, ResultExt}; | |||||
use config::Config; | use config::Config; | ||||
use content::{Page, Section}; | |||||
use errors::{Result, ResultExt}; | |||||
use utils::templates::render_template; | use utils::templates::render_template; | ||||
use taxonomies::{Taxonomy, TaxonomyItem}; | |||||
use content::{Section, SerializingPage, SerializingSection}; | |||||
use library::Library; | |||||
use taxonomies::{Taxonomy, TaxonomyItem}; | |||||
#[derive(Clone, Debug, PartialEq)] | #[derive(Clone, Debug, PartialEq)] | ||||
enum PaginationRoot<'a> { | enum PaginationRoot<'a> { | ||||
@@ -28,46 +17,34 @@ enum PaginationRoot<'a> { | |||||
Taxonomy(&'a Taxonomy), | Taxonomy(&'a Taxonomy), | ||||
} | } | ||||
/// A list of all the pages in the paginator with their index and links | /// A list of all the pages in the paginator with their index and links | ||||
#[derive(Clone, Debug, PartialEq, Serialize)] | #[derive(Clone, Debug, PartialEq, Serialize)] | ||||
pub struct Pager<'a> { | pub struct Pager<'a> { | ||||
/// The page number in the paginator (1-indexed) | /// The page number in the paginator (1-indexed) | ||||
index: usize, | |||||
pub index: usize, | |||||
/// Permalink to that page | /// Permalink to that page | ||||
permalink: String, | permalink: String, | ||||
/// Path to that page | /// Path to that page | ||||
path: String, | path: String, | ||||
/// All pages for the pager | /// All pages for the pager | ||||
pages: Vec<&'a Page>, | |||||
pages: Vec<SerializingPage<'a>>, | |||||
} | } | ||||
impl<'a> Pager<'a> { | impl<'a> Pager<'a> { | ||||
fn new(index: usize, pages: Vec<&'a Page>, permalink: String, path: String) -> Pager<'a> { | |||||
Pager { | |||||
index, | |||||
permalink, | |||||
path, | |||||
pages, | |||||
} | |||||
} | |||||
/// Returns a manually cloned Pager with the pages removed | |||||
/// for use as template context | |||||
fn clone_without_pages(&self) -> Pager<'a> { | |||||
Pager { | |||||
index: self.index, | |||||
permalink: self.permalink.clone(), | |||||
path: self.path.clone(), | |||||
pages: vec![], | |||||
} | |||||
fn new( | |||||
index: usize, | |||||
pages: Vec<SerializingPage<'a>>, | |||||
permalink: String, | |||||
path: String, | |||||
) -> Pager<'a> { | |||||
Pager { index, permalink, path, pages } | |||||
} | } | ||||
} | } | ||||
#[derive(Clone, Debug, PartialEq)] | #[derive(Clone, Debug, PartialEq)] | ||||
pub struct Paginator<'a> { | pub struct Paginator<'a> { | ||||
/// All pages in the section | |||||
all_pages: &'a [Page], | |||||
/// All pages in the section/taxonomy | |||||
all_pages: &'a [Key], | |||||
/// Pages split in chunks of `paginate_by` | /// Pages split in chunks of `paginate_by` | ||||
pub pagers: Vec<Pager<'a>>, | pub pagers: Vec<Pager<'a>>, | ||||
/// How many content pages on a paginated page at max | /// How many content pages on a paginated page at max | ||||
@@ -78,69 +55,88 @@ pub struct Paginator<'a> { | |||||
pub permalink: String, | pub permalink: String, | ||||
path: String, | path: String, | ||||
pub paginate_path: String, | pub paginate_path: String, | ||||
template: String, | |||||
/// Whether this is the index section, we need it for the template name | |||||
is_index: bool, | is_index: bool, | ||||
} | } | ||||
impl<'a> Paginator<'a> { | impl<'a> Paginator<'a> { | ||||
/// Create a new paginator from a section | /// Create a new paginator from a section | ||||
/// It will always at least create one pager (the first) even if there are no pages to paginate | |||||
pub fn from_section(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> { | |||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate | |||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Paginator<'a> { | |||||
let paginate_by = section.meta.paginate_by.unwrap(); | let paginate_by = section.meta.paginate_by.unwrap(); | ||||
let mut paginator = Paginator { | let mut paginator = Paginator { | ||||
all_pages, | |||||
pagers: vec![], | |||||
all_pages: §ion.pages, | |||||
pagers: Vec::with_capacity(section.pages.len() / paginate_by), | |||||
paginate_by, | paginate_by, | ||||
root: PaginationRoot::Section(section), | root: PaginationRoot::Section(section), | ||||
permalink: section.permalink.clone(), | permalink: section.permalink.clone(), | ||||
path: section.path.clone(), | path: section.path.clone(), | ||||
paginate_path: section.meta.paginate_path.clone(), | paginate_path: section.meta.paginate_path.clone(), | ||||
is_index: section.is_index(), | is_index: section.is_index(), | ||||
template: section.get_template_name().to_string(), | |||||
}; | }; | ||||
paginator.fill_pagers(); | |||||
paginator.fill_pagers(library); | |||||
paginator | paginator | ||||
} | } | ||||
/// Create a new paginator from a taxonomy | /// Create a new paginator from a taxonomy | ||||
/// It will always at least create one pager (the first) even if there are no pages to paginate | |||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem) -> Paginator<'a> { | |||||
/// It will always at least create one pager (the first) even if there are not enough pages to paginate | |||||
pub fn from_taxonomy( | |||||
taxonomy: &'a Taxonomy, | |||||
item: &'a TaxonomyItem, | |||||
library: &'a Library, | |||||
) -> Paginator<'a> { | |||||
let paginate_by = taxonomy.kind.paginate_by.unwrap(); | let paginate_by = taxonomy.kind.paginate_by.unwrap(); | ||||
let mut paginator = Paginator { | let mut paginator = Paginator { | ||||
all_pages: &item.pages, | all_pages: &item.pages, | ||||
pagers: vec![], | |||||
pagers: Vec::with_capacity(item.pages.len() / paginate_by), | |||||
paginate_by, | paginate_by, | ||||
root: PaginationRoot::Taxonomy(taxonomy), | root: PaginationRoot::Taxonomy(taxonomy), | ||||
permalink: item.permalink.clone(), | permalink: item.permalink.clone(), | ||||
path: format!("{}/{}", taxonomy.kind.name, item.slug), | path: format!("{}/{}", taxonomy.kind.name, item.slug), | ||||
paginate_path: taxonomy.kind.paginate_path.clone().unwrap_or_else(|| "pages".to_string()), | |||||
paginate_path: taxonomy | |||||
.kind | |||||
.paginate_path | |||||
.clone() | |||||
.unwrap_or_else(|| "pages".to_string()), | |||||
is_index: false, | is_index: false, | ||||
template: format!("{}/single.html", taxonomy.kind.name), | |||||
}; | }; | ||||
paginator.fill_pagers(); | |||||
paginator.fill_pagers(library); | |||||
paginator | paginator | ||||
} | } | ||||
fn fill_pagers(&mut self) { | |||||
fn fill_pagers(&mut self, library: &'a Library) { | |||||
// the list of pagers | |||||
let mut pages = vec![]; | let mut pages = vec![]; | ||||
// the pages in the current pagers | |||||
let mut current_page = vec![]; | let mut current_page = vec![]; | ||||
for page in self.all_pages { | |||||
current_page.push(page); | |||||
for key in self.all_pages { | |||||
let page = library.get_page_by_key(*key); | |||||
if page.is_draft() { | |||||
continue; | |||||
} | |||||
current_page.push(page.to_serialized_basic(library)); | |||||
if current_page.len() == self.paginate_by { | if current_page.len() == self.paginate_by { | ||||
pages.push(current_page); | pages.push(current_page); | ||||
current_page = vec![]; | current_page = vec![]; | ||||
} | } | ||||
} | } | ||||
if !current_page.is_empty() { | if !current_page.is_empty() { | ||||
pages.push(current_page); | pages.push(current_page); | ||||
} | } | ||||
let mut pagers = vec![]; | let mut pagers = vec![]; | ||||
for (index, page) in pages.iter().enumerate() { | |||||
for (index, page) in pages.into_iter().enumerate() { | |||||
// First page has no pagination path | // First page has no pagination path | ||||
if index == 0 { | if index == 0 { | ||||
pagers.push(Pager::new(1, page.clone(), self.permalink.clone(), self.path.clone())); | |||||
pagers.push(Pager::new(1, page, self.permalink.clone(), self.path.clone())); | |||||
continue; | continue; | ||||
} | } | ||||
@@ -149,20 +145,13 @@ impl<'a> Paginator<'a> { | |||||
let pager_path = if self.is_index { | let pager_path = if self.is_index { | ||||
page_path | page_path | ||||
} else if self.path.ends_with('/') { | |||||
format!("{}{}", self.path, page_path) | |||||
} else { | } else { | ||||
if self.path.ends_with("/") { | |||||
format!("{}{}", self.path, page_path) | |||||
} else { | |||||
format!("{}/{}", self.path, page_path) | |||||
} | |||||
format!("{}/{}", self.path, page_path) | |||||
}; | }; | ||||
pagers.push(Pager::new( | |||||
index + 1, | |||||
page.clone(), | |||||
permalink, | |||||
pager_path, | |||||
)); | |||||
pagers.push(Pager::new(index + 1, page, permalink, pager_path)); | |||||
} | } | ||||
// We always have the index one at least | // We always have the index one at least | ||||
@@ -183,51 +172,55 @@ impl<'a> Paginator<'a> { | |||||
paginator.insert("first", to_value(&self.permalink).unwrap()); | paginator.insert("first", to_value(&self.permalink).unwrap()); | ||||
let last_pager = &self.pagers[self.pagers.len() - 1]; | let last_pager = &self.pagers[self.pagers.len() - 1]; | ||||
paginator.insert("last", to_value(&last_pager.permalink).unwrap()); | paginator.insert("last", to_value(&last_pager.permalink).unwrap()); | ||||
paginator.insert( | |||||
"pagers", | |||||
to_value( | |||||
&self.pagers.iter().map(|p| p.clone_without_pages()).collect::<Vec<_>>() | |||||
).unwrap(), | |||||
); | |||||
// Variables for this specific page | // Variables for this specific page | ||||
if pager_index > 0 { | if pager_index > 0 { | ||||
let prev_pager = &self.pagers[pager_index - 1]; | let prev_pager = &self.pagers[pager_index - 1]; | ||||
paginator.insert("previous", to_value(&prev_pager.permalink).unwrap()); | paginator.insert("previous", to_value(&prev_pager.permalink).unwrap()); | ||||
} else { | } else { | ||||
paginator.insert("previous", to_value::<Option<()>>(None).unwrap()); | |||||
paginator.insert("previous", Value::Null); | |||||
} | } | ||||
if pager_index < self.pagers.len() - 1 { | if pager_index < self.pagers.len() - 1 { | ||||
let next_pager = &self.pagers[pager_index + 1]; | let next_pager = &self.pagers[pager_index + 1]; | ||||
paginator.insert("next", to_value(&next_pager.permalink).unwrap()); | paginator.insert("next", to_value(&next_pager.permalink).unwrap()); | ||||
} else { | } else { | ||||
paginator.insert("next", to_value::<Option<()>>(None).unwrap()); | |||||
paginator.insert("next", Value::Null); | |||||
} | } | ||||
paginator.insert("number_pagers", to_value(&self.pagers.len()).unwrap()); | |||||
paginator.insert( | |||||
"base_url", | |||||
to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap(), | |||||
); | |||||
paginator.insert("pages", to_value(¤t_pager.pages).unwrap()); | paginator.insert("pages", to_value(¤t_pager.pages).unwrap()); | ||||
paginator.insert("current_index", to_value(current_pager.index).unwrap()); | paginator.insert("current_index", to_value(current_pager.index).unwrap()); | ||||
paginator | paginator | ||||
} | } | ||||
pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera) -> Result<String> { | |||||
pub fn render_pager( | |||||
&self, | |||||
pager: &Pager, | |||||
config: &Config, | |||||
tera: &Tera, | |||||
library: &Library, | |||||
) -> Result<String> { | |||||
let mut context = Context::new(); | let mut context = Context::new(); | ||||
context.add("config", &config); | |||||
let template_name = match self.root { | |||||
context.insert("config", &config); | |||||
match self.root { | |||||
PaginationRoot::Section(s) => { | PaginationRoot::Section(s) => { | ||||
context.add("section", &s); | |||||
s.get_template_name() | |||||
context | |||||
.insert("section", &SerializingSection::from_section_basic(s, Some(library))); | |||||
} | } | ||||
PaginationRoot::Taxonomy(t) => { | PaginationRoot::Taxonomy(t) => { | ||||
context.add("taxonomy", &t.kind); | |||||
format!("{}/single.html", t.kind.name) | |||||
context.insert("taxonomy", &t.kind); | |||||
} | } | ||||
}; | }; | ||||
context.add("current_url", &pager.permalink); | |||||
context.add("current_path", &pager.path); | |||||
context.add("paginator", &self.build_paginator_context(pager)); | |||||
context.insert("current_url", &pager.permalink); | |||||
context.insert("current_path", &pager.path); | |||||
context.insert("paginator", &self.build_paginator_context(pager)); | |||||
render_template(&template_name, tera, &context, &config.theme) | |||||
render_template(&self.template, tera, &context, &config.theme) | |||||
.chain_err(|| format!("Failed to render pager {}", pager.index)) | .chain_err(|| format!("Failed to render pager {}", pager.index)) | ||||
} | } | ||||
} | } | ||||
@@ -236,9 +229,10 @@ impl<'a> Paginator<'a> { | |||||
mod tests { | mod tests { | ||||
use tera::to_value; | use tera::to_value; | ||||
use front_matter::SectionFrontMatter; | |||||
use content::{Page, Section}; | |||||
use config::Taxonomy as TaxonomyConfig; | use config::Taxonomy as TaxonomyConfig; | ||||
use content::{Page, Section}; | |||||
use front_matter::SectionFrontMatter; | |||||
use library::Library; | |||||
use taxonomies::{Taxonomy, TaxonomyItem}; | use taxonomies::{Taxonomy, TaxonomyItem}; | ||||
use super::Paginator; | use super::Paginator; | ||||
@@ -258,15 +252,25 @@ mod tests { | |||||
s | s | ||||
} | } | ||||
fn create_library(is_index: bool) -> (Section, Library) { | |||||
let mut library = Library::new(3, 0); | |||||
library.insert_page(Page::default()); | |||||
library.insert_page(Page::default()); | |||||
library.insert_page(Page::default()); | |||||
let mut draft = Page::default(); | |||||
draft.meta.draft = true; | |||||
library.insert_page(draft); | |||||
let mut section = create_section(is_index); | |||||
section.pages = library.pages().keys().collect(); | |||||
library.insert_section(section.clone()); | |||||
(section, library) | |||||
} | |||||
#[test] | #[test] | ||||
fn test_can_create_paginator() { | fn test_can_create_paginator() { | ||||
let pages = vec![ | |||||
Page::default(), | |||||
Page::default(), | |||||
Page::default(), | |||||
]; | |||||
let section = create_section(false); | |||||
let paginator = Paginator::from_section(pages.as_slice(), §ion); | |||||
let (section, library) = create_library(false); | |||||
let paginator = Paginator::from_section(§ion, &library); | |||||
assert_eq!(paginator.pagers.len(), 2); | assert_eq!(paginator.pagers.len(), 2); | ||||
assert_eq!(paginator.pagers[0].index, 1); | assert_eq!(paginator.pagers[0].index, 1); | ||||
@@ -282,13 +286,8 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn test_can_create_paginator_for_index() { | fn test_can_create_paginator_for_index() { | ||||
let pages = vec![ | |||||
Page::default(), | |||||
Page::default(), | |||||
Page::default(), | |||||
]; | |||||
let section = create_section(true); | |||||
let paginator = Paginator::from_section(pages.as_slice(), §ion); | |||||
let (section, library) = create_library(true); | |||||
let paginator = Paginator::from_section(§ion, &library); | |||||
assert_eq!(paginator.pagers.len(), 2); | assert_eq!(paginator.pagers.len(), 2); | ||||
assert_eq!(paginator.pagers[0].index, 1); | assert_eq!(paginator.pagers[0].index, 1); | ||||
@@ -304,13 +303,8 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn test_can_build_paginator_context() { | fn test_can_build_paginator_context() { | ||||
let pages = vec![ | |||||
Page::default(), | |||||
Page::default(), | |||||
Page::default(), | |||||
]; | |||||
let section = create_section(false); | |||||
let paginator = Paginator::from_section(pages.as_slice(), §ion); | |||||
let (section, library) = create_library(false); | |||||
let paginator = Paginator::from_section(§ion, &library); | |||||
assert_eq!(paginator.pagers.len(), 2); | assert_eq!(paginator.pagers.len(), 2); | ||||
let context = paginator.build_paginator_context(&paginator.pagers[0]); | let context = paginator.build_paginator_context(&paginator.pagers[0]); | ||||
@@ -332,11 +326,7 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn test_can_create_paginator_for_taxonomy() { | fn test_can_create_paginator_for_taxonomy() { | ||||
let pages = vec![ | |||||
Page::default(), | |||||
Page::default(), | |||||
Page::default(), | |||||
]; | |||||
let (_, library) = create_library(false); | |||||
let taxonomy_def = TaxonomyConfig { | let taxonomy_def = TaxonomyConfig { | ||||
name: "tags".to_string(), | name: "tags".to_string(), | ||||
paginate_by: Some(2), | paginate_by: Some(2), | ||||
@@ -346,10 +336,10 @@ mod tests { | |||||
name: "Something".to_string(), | name: "Something".to_string(), | ||||
slug: "something".to_string(), | slug: "something".to_string(), | ||||
permalink: "https://vincent.is/tags/something/".to_string(), | permalink: "https://vincent.is/tags/something/".to_string(), | ||||
pages, | |||||
pages: library.pages().keys().collect(), | |||||
}; | }; | ||||
let taxonomy = Taxonomy { kind: taxonomy_def, items: vec![taxonomy_item.clone()] }; | let taxonomy = Taxonomy { kind: taxonomy_def, items: vec![taxonomy_item.clone()] }; | ||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item); | |||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library); | |||||
assert_eq!(paginator.pagers.len(), 2); | assert_eq!(paginator.pagers.len(), 2); | ||||
assert_eq!(paginator.pagers[0].index, 1); | assert_eq!(paginator.pagers[0].index, 1); |
@@ -0,0 +1,223 @@ | |||||
use std::cmp::Ordering; | |||||
use chrono::NaiveDateTime; | |||||
use rayon::prelude::*; | |||||
use slotmap::Key; | |||||
use content::Page; | |||||
/// Used by the RSS feed | |||||
/// There to not have to import sorting stuff in the site crate | |||||
pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { | |||||
let ord = b.meta.datetime.unwrap().cmp(&a.meta.datetime.unwrap()); | |||||
if ord == Ordering::Equal { | |||||
a.permalink.cmp(&b.permalink) | |||||
} else { | |||||
ord | |||||
} | |||||
} | |||||
/// Takes a list of (page key, date, permalink) and sort them by dates if possible | |||||
/// Pages without date will be put in the unsortable bucket | |||||
/// The permalink is used to break ties | |||||
pub fn sort_pages_by_date(pages: Vec<(&Key, Option<NaiveDateTime>, &str)>) -> (Vec<Key>, Vec<Key>) { | |||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = | |||||
pages.into_par_iter().partition(|page| page.1.is_some()); | |||||
can_be_sorted.par_sort_unstable_by(|a, b| { | |||||
let ord = b.1.unwrap().cmp(&a.1.unwrap()); | |||||
if ord == Ordering::Equal { | |||||
a.2.cmp(&b.2) | |||||
} else { | |||||
ord | |||||
} | |||||
}); | |||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) | |||||
} | |||||
/// Takes a list of (page key, weight, permalink) and sort them by weight if possible | |||||
/// Pages without weight will be put in the unsortable bucket | |||||
/// The permalink is used to break ties | |||||
pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key>, Vec<Key>) { | |||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = | |||||
pages.into_par_iter().partition(|page| page.1.is_some()); | |||||
can_be_sorted.par_sort_unstable_by(|a, b| { | |||||
let ord = a.1.unwrap().cmp(&b.1.unwrap()); | |||||
if ord == Ordering::Equal { | |||||
a.2.cmp(&b.2) | |||||
} else { | |||||
ord | |||||
} | |||||
}); | |||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) | |||||
} | |||||
/// Find the lighter/heavier and earlier/later pages for all pages having a date/weight | |||||
/// and that are not drafts. | |||||
pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option<Key>)> { | |||||
let mut res = Vec::with_capacity(sorted.len()); | |||||
let length = sorted.len(); | |||||
for (i, (key, is_draft)) in sorted.iter().enumerate() { | |||||
if *is_draft { | |||||
res.push((**key, None, None)); | |||||
continue; | |||||
} | |||||
let mut with_siblings = (**key, None, None); | |||||
if i > 0 { | |||||
let mut j = i; | |||||
loop { | |||||
if j == 0 { | |||||
break; | |||||
} | |||||
j -= 1; | |||||
if sorted[j].1 { | |||||
continue; | |||||
} | |||||
// lighter / later | |||||
with_siblings.1 = Some(*sorted[j].0); | |||||
break; | |||||
} | |||||
} | |||||
if i < length - 1 { | |||||
let mut j = i; | |||||
loop { | |||||
if j == length - 1 { | |||||
break; | |||||
} | |||||
j += 1; | |||||
if sorted[j].1 { | |||||
continue; | |||||
} | |||||
// heavier/earlier | |||||
with_siblings.2 = Some(*sorted[j].0); | |||||
break; | |||||
} | |||||
} | |||||
res.push(with_siblings); | |||||
} | |||||
res | |||||
} | |||||
#[cfg(test)] | |||||
mod tests { | |||||
use slotmap::DenseSlotMap; | |||||
use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; | |||||
use content::Page; | |||||
use front_matter::PageFrontMatter; | |||||
fn create_page_with_date(date: &str) -> Page { | |||||
let mut front_matter = PageFrontMatter::default(); | |||||
front_matter.date = Some(date.to_string()); | |||||
front_matter.date_to_datetime(); | |||||
Page::new("content/hello.md", front_matter) | |||||
} | |||||
fn create_page_with_weight(weight: usize) -> Page { | |||||
let mut front_matter = PageFrontMatter::default(); | |||||
front_matter.weight = Some(weight); | |||||
Page::new("content/hello.md", front_matter) | |||||
} | |||||
#[test] | |||||
fn can_sort_by_dates() { | |||||
let mut dense = DenseSlotMap::new(); | |||||
let page1 = create_page_with_date("2018-01-01"); | |||||
let key1 = dense.insert(page1.clone()); | |||||
let page2 = create_page_with_date("2017-01-01"); | |||||
let key2 = dense.insert(page2.clone()); | |||||
let page3 = create_page_with_date("2019-01-01"); | |||||
let key3 = dense.insert(page3.clone()); | |||||
let input = vec![ | |||||
(&key1, page1.meta.datetime, page1.permalink.as_ref()), | |||||
(&key2, page2.meta.datetime, page2.permalink.as_ref()), | |||||
(&key3, page3.meta.datetime, page3.permalink.as_ref()), | |||||
]; | |||||
let (pages, _) = sort_pages_by_date(input); | |||||
// Should be sorted by date | |||||
assert_eq!(pages[0], key3); | |||||
assert_eq!(pages[1], key1); | |||||
assert_eq!(pages[2], key2); | |||||
} | |||||
#[test] | |||||
fn can_sort_by_weight() { | |||||
let mut dense = DenseSlotMap::new(); | |||||
let page1 = create_page_with_weight(2); | |||||
let key1 = dense.insert(page1.clone()); | |||||
let page2 = create_page_with_weight(3); | |||||
let key2 = dense.insert(page2.clone()); | |||||
let page3 = create_page_with_weight(1); | |||||
let key3 = dense.insert(page3.clone()); | |||||
let input = vec![ | |||||
(&key1, page1.meta.weight, page1.permalink.as_ref()), | |||||
(&key2, page2.meta.weight, page2.permalink.as_ref()), | |||||
(&key3, page3.meta.weight, page3.permalink.as_ref()), | |||||
]; | |||||
let (pages, _) = sort_pages_by_weight(input); | |||||
// Should be sorted by weight | |||||
assert_eq!(pages[0], key3); | |||||
assert_eq!(pages[1], key1); | |||||
assert_eq!(pages[2], key2); | |||||
} | |||||
#[test] | |||||
fn ignore_page_with_missing_field() { | |||||
let mut dense = DenseSlotMap::new(); | |||||
let page1 = create_page_with_weight(2); | |||||
let key1 = dense.insert(page1.clone()); | |||||
let page2 = create_page_with_weight(3); | |||||
let key2 = dense.insert(page2.clone()); | |||||
let page3 = create_page_with_date("2019-01-01"); | |||||
let key3 = dense.insert(page3.clone()); | |||||
let input = vec![ | |||||
(&key1, page1.meta.weight, page1.permalink.as_ref()), | |||||
(&key2, page2.meta.weight, page2.permalink.as_ref()), | |||||
(&key3, page3.meta.weight, page3.permalink.as_ref()), | |||||
]; | |||||
let (pages, unsorted) = sort_pages_by_weight(input); | |||||
assert_eq!(pages.len(), 2); | |||||
assert_eq!(unsorted.len(), 1); | |||||
} | |||||
#[test] | |||||
fn can_find_siblings() { | |||||
let mut dense = DenseSlotMap::new(); | |||||
let page1 = create_page_with_weight(1); | |||||
let key1 = dense.insert(page1.clone()); | |||||
let page2 = create_page_with_weight(2); | |||||
let key2 = dense.insert(page2.clone()); | |||||
let page3 = create_page_with_weight(3); | |||||
let key3 = dense.insert(page3.clone()); | |||||
let input = | |||||
vec![(&key1, page1.is_draft()), (&key2, page2.is_draft()), (&key3, page3.is_draft())]; | |||||
let pages = find_siblings(input); | |||||
assert_eq!(pages[0].1, None); | |||||
assert_eq!(pages[0].2, Some(key2)); | |||||
assert_eq!(pages[1].1, Some(key1)); | |||||
assert_eq!(pages[1].2, Some(key3)); | |||||
assert_eq!(pages[2].1, Some(key2)); | |||||
assert_eq!(pages[2].2, None); | |||||
} | |||||
} |
@@ -1,61 +1,94 @@ | |||||
#[macro_use] | |||||
extern crate serde_derive; | |||||
extern crate tera; | |||||
extern crate slug; | |||||
#[macro_use] | |||||
extern crate errors; | |||||
extern crate config; | |||||
extern crate content; | |||||
extern crate front_matter; | |||||
extern crate utils; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use slotmap::Key; | |||||
use slug::slugify; | use slug::slugify; | ||||
use tera::{Context, Tera}; | use tera::{Context, Tera}; | ||||
use config::{Config, Taxonomy as TaxonomyConfig}; | use config::{Config, Taxonomy as TaxonomyConfig}; | ||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
use content::{Page, sort_pages}; | |||||
use front_matter::SortBy; | |||||
use utils::templates::render_template; | use utils::templates::render_template; | ||||
use content::SerializingPage; | |||||
use library::Library; | |||||
use sorting::sort_pages_by_date; | |||||
#[derive(Debug, Clone, PartialEq, Serialize)] | |||||
struct SerializedTaxonomyItem<'a> { | |||||
name: &'a str, | |||||
slug: &'a str, | |||||
permalink: &'a str, | |||||
pages: Vec<SerializingPage<'a>>, | |||||
} | |||||
impl<'a> SerializedTaxonomyItem<'a> { | |||||
pub fn from_item(item: &'a TaxonomyItem, library: &'a Library) -> Self { | |||||
let mut pages = vec![]; | |||||
/// A tag or category | |||||
#[derive(Debug, Clone, Serialize, PartialEq)] | |||||
for key in &item.pages { | |||||
let page = library.get_page_by_key(*key); | |||||
pages.push(page.to_serialized_basic(library)); | |||||
} | |||||
SerializedTaxonomyItem { | |||||
name: &item.name, | |||||
slug: &item.slug, | |||||
permalink: &item.permalink, | |||||
pages, | |||||
} | |||||
} | |||||
} | |||||
/// A taxonomy with all its pages | |||||
#[derive(Debug, Clone, PartialEq)] | |||||
pub struct TaxonomyItem { | pub struct TaxonomyItem { | ||||
pub name: String, | pub name: String, | ||||
pub slug: String, | pub slug: String, | ||||
pub permalink: String, | pub permalink: String, | ||||
pub pages: Vec<Page>, | |||||
pub pages: Vec<Key>, | |||||
} | } | ||||
impl TaxonomyItem { | impl TaxonomyItem { | ||||
pub fn new(name: &str, path: &str, config: &Config, pages: Vec<Page>) -> TaxonomyItem { | |||||
pub fn new(name: &str, path: &str, config: &Config, keys: Vec<Key>, library: &Library) -> Self { | |||||
// Taxonomy are almost always used for blogs so we filter by dates | // Taxonomy are almost always used for blogs so we filter by dates | ||||
// and it's not like we can sort things across sections by anything other | // and it's not like we can sort things across sections by anything other | ||||
// than dates | // than dates | ||||
let (mut pages, ignored_pages) = sort_pages(pages, SortBy::Date); | |||||
let data = keys | |||||
.iter() | |||||
.map(|k| { | |||||
if let Some(page) = library.pages().get(*k) { | |||||
(k, page.meta.datetime, page.permalink.as_ref()) | |||||
} else { | |||||
unreachable!("Sorting got an unknown page") | |||||
} | |||||
}) | |||||
.collect(); | |||||
let (mut pages, ignored_pages) = sort_pages_by_date(data); | |||||
let slug = slugify(name); | let slug = slugify(name); | ||||
let permalink = { | |||||
config.make_permalink(&format!("/{}/{}", path, slug)) | |||||
}; | |||||
let permalink = config.make_permalink(&format!("/{}/{}", path, slug)); | |||||
// We still append pages without dates at the end | // We still append pages without dates at the end | ||||
pages.extend(ignored_pages); | pages.extend(ignored_pages); | ||||
TaxonomyItem { | |||||
name: name.to_string(), | |||||
permalink, | |||||
slug, | |||||
pages, | |||||
} | |||||
TaxonomyItem { name: name.to_string(), permalink, slug, pages } | |||||
} | } | ||||
} | } | ||||
/// All the tags or categories | |||||
#[derive(Debug, Clone, PartialEq, Serialize)] | #[derive(Debug, Clone, PartialEq, Serialize)] | ||||
pub struct SerializedTaxonomy<'a> { | |||||
kind: &'a TaxonomyConfig, | |||||
items: Vec<SerializedTaxonomyItem<'a>>, | |||||
} | |||||
impl<'a> SerializedTaxonomy<'a> { | |||||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { | |||||
let items: Vec<SerializedTaxonomyItem> = | |||||
taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); | |||||
SerializedTaxonomy { kind: &taxonomy.kind, items } | |||||
} | |||||
} | |||||
/// All different taxonomies we have and their content | |||||
#[derive(Debug, Clone, PartialEq)] | |||||
pub struct Taxonomy { | pub struct Taxonomy { | ||||
pub kind: TaxonomyConfig, | pub kind: TaxonomyConfig, | ||||
// this vec is sorted by the count of item | // this vec is sorted by the count of item | ||||
@@ -63,19 +96,19 @@ pub struct Taxonomy { | |||||
} | } | ||||
impl Taxonomy { | impl Taxonomy { | ||||
fn new(kind: TaxonomyConfig, config: &Config, items: HashMap<String, Vec<Page>>) -> Taxonomy { | |||||
fn new( | |||||
kind: TaxonomyConfig, | |||||
config: &Config, | |||||
items: HashMap<String, Vec<Key>>, | |||||
library: &Library, | |||||
) -> Taxonomy { | |||||
let mut sorted_items = vec![]; | let mut sorted_items = vec![]; | ||||
for (name, pages) in items { | for (name, pages) in items { | ||||
sorted_items.push( | |||||
TaxonomyItem::new(&name, &kind.name, config, pages) | |||||
); | |||||
sorted_items.push(TaxonomyItem::new(&name, &kind.name, config, pages, library)); | |||||
} | } | ||||
sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); | sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); | ||||
Taxonomy { | |||||
kind, | |||||
items: sorted_items, | |||||
} | |||||
Taxonomy { kind, items: sorted_items } | |||||
} | } | ||||
pub fn len(&self) -> usize { | pub fn len(&self) -> usize { | ||||
@@ -86,32 +119,52 @@ impl Taxonomy { | |||||
self.len() == 0 | self.len() == 0 | ||||
} | } | ||||
pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config) -> Result<String> { | |||||
pub fn render_term( | |||||
&self, | |||||
item: &TaxonomyItem, | |||||
tera: &Tera, | |||||
config: &Config, | |||||
library: &Library, | |||||
) -> Result<String> { | |||||
let mut context = Context::new(); | let mut context = Context::new(); | ||||
context.add("config", config); | |||||
context.add("term", item); | |||||
context.add("taxonomy", &self.kind); | |||||
context.add("current_url", &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug))); | |||||
context.add("current_path", &format!("/{}/{}", self.kind.name, item.slug)); | |||||
context.insert("config", config); | |||||
context.insert("term", &SerializedTaxonomyItem::from_item(item, library)); | |||||
context.insert("taxonomy", &self.kind); | |||||
context.insert( | |||||
"current_url", | |||||
&config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)), | |||||
); | |||||
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug)); | |||||
render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme) | render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme) | ||||
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name)) | .chain_err(|| format!("Failed to render single term {} page.", self.kind.name)) | ||||
} | } | ||||
pub fn render_all_terms(&self, tera: &Tera, config: &Config) -> Result<String> { | |||||
pub fn render_all_terms( | |||||
&self, | |||||
tera: &Tera, | |||||
config: &Config, | |||||
library: &Library, | |||||
) -> Result<String> { | |||||
let mut context = Context::new(); | let mut context = Context::new(); | ||||
context.add("config", config); | |||||
context.add("terms", &self.items); | |||||
context.add("taxonomy", &self.kind); | |||||
context.add("current_url", &config.make_permalink(&self.kind.name)); | |||||
context.add("current_path", &self.kind.name); | |||||
context.insert("config", config); | |||||
let terms: Vec<SerializedTaxonomyItem> = | |||||
self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); | |||||
context.insert("terms", &terms); | |||||
context.insert("taxonomy", &self.kind); | |||||
context.insert("current_url", &config.make_permalink(&self.kind.name)); | |||||
context.insert("current_path", &self.kind.name); | |||||
render_template(&format!("{}/list.html", self.kind.name), tera, &context, &config.theme) | render_template(&format!("{}/list.html", self.kind.name), tera, &context, &config.theme) | ||||
.chain_err(|| format!("Failed to render a list of {} page.", self.kind.name)) | .chain_err(|| format!("Failed to render a list of {} page.", self.kind.name)) | ||||
} | } | ||||
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { | |||||
SerializedTaxonomy::from_taxonomy(self, library) | |||||
} | |||||
} | } | ||||
pub fn find_taxonomies(config: &Config, all_pages: &[Page]) -> Result<Vec<Taxonomy>> { | |||||
pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonomy>> { | |||||
let taxonomies_def = { | let taxonomies_def = { | ||||
let mut m = HashMap::new(); | let mut m = HashMap::new(); | ||||
for t in &config.taxonomies { | for t in &config.taxonomies { | ||||
@@ -121,23 +174,30 @@ pub fn find_taxonomies(config: &Config, all_pages: &[Page]) -> Result<Vec<Taxono | |||||
}; | }; | ||||
let mut all_taxonomies = HashMap::new(); | let mut all_taxonomies = HashMap::new(); | ||||
// Find all the taxonomies first | |||||
for page in all_pages { | |||||
for (key, page) in library.pages() { | |||||
// Draft are not part of taxonomies | |||||
if page.is_draft() { | |||||
continue; | |||||
} | |||||
for (name, val) in &page.meta.taxonomies { | for (name, val) in &page.meta.taxonomies { | ||||
if taxonomies_def.contains_key(name) { | if taxonomies_def.contains_key(name) { | ||||
all_taxonomies | |||||
.entry(name) | |||||
.or_insert_with(|| HashMap::new()); | |||||
all_taxonomies.entry(name).or_insert_with(HashMap::new); | |||||
for v in val { | for v in val { | ||||
all_taxonomies.get_mut(name) | |||||
all_taxonomies | |||||
.get_mut(name) | |||||
.unwrap() | .unwrap() | ||||
.entry(v.to_string()) | .entry(v.to_string()) | ||||
.or_insert_with(|| vec![]) | .or_insert_with(|| vec![]) | ||||
.push(page.clone()); | |||||
.push(key); | |||||
} | } | ||||
} else { | } else { | ||||
bail!("Page `{}` has taxonomy `{}` which is not defined in config.toml", page.file.path.display(), name); | |||||
bail!( | |||||
"Page `{}` has taxonomy `{}` which is not defined in config.toml", | |||||
page.file.path.display(), | |||||
name | |||||
); | |||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -145,47 +205,54 @@ pub fn find_taxonomies(config: &Config, all_pages: &[Page]) -> Result<Vec<Taxono | |||||
let mut taxonomies = vec![]; | let mut taxonomies = vec![]; | ||||
for (name, taxo) in all_taxonomies { | for (name, taxo) in all_taxonomies { | ||||
taxonomies.push(Taxonomy::new(taxonomies_def[name].clone(), config, taxo)); | |||||
taxonomies.push(Taxonomy::new(taxonomies_def[name].clone(), config, taxo, library)); | |||||
} | } | ||||
Ok(taxonomies) | Ok(taxonomies) | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use super::*; | use super::*; | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use config::{Config, Taxonomy}; | |||||
use config::{Config, Taxonomy as TaxonomyConfig}; | |||||
use content::Page; | use content::Page; | ||||
use library::Library; | |||||
#[test] | #[test] | ||||
fn can_make_taxonomies() { | fn can_make_taxonomies() { | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
let mut library = Library::new(2, 0); | |||||
config.taxonomies = vec![ | config.taxonomies = vec![ | ||||
Taxonomy { name: "categories".to_string(), ..Taxonomy::default() }, | |||||
Taxonomy { name: "tags".to_string(), ..Taxonomy::default() }, | |||||
Taxonomy { name: "authors".to_string(), ..Taxonomy::default() }, | |||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, | |||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, | |||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, | |||||
]; | ]; | ||||
let mut page1 = Page::default(); | let mut page1 = Page::default(); | ||||
let mut taxo_page1 = HashMap::new(); | let mut taxo_page1 = HashMap::new(); | ||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); | taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); | ||||
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); | taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); | ||||
page1.meta.taxonomies = taxo_page1; | page1.meta.taxonomies = taxo_page1; | ||||
library.insert_page(page1); | |||||
let mut page2 = Page::default(); | let mut page2 = Page::default(); | ||||
let mut taxo_page2 = HashMap::new(); | let mut taxo_page2 = HashMap::new(); | ||||
taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); | taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); | ||||
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); | taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); | ||||
page2.meta.taxonomies = taxo_page2; | page2.meta.taxonomies = taxo_page2; | ||||
library.insert_page(page2); | |||||
let mut page3 = Page::default(); | let mut page3 = Page::default(); | ||||
let mut taxo_page3 = HashMap::new(); | let mut taxo_page3 = HashMap::new(); | ||||
taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); | taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); | ||||
taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); | taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); | ||||
page3.meta.taxonomies = taxo_page3; | page3.meta.taxonomies = taxo_page3; | ||||
let pages = vec![page1, page2, page3]; | |||||
library.insert_page(page3); | |||||
let taxonomies = find_taxonomies(&config, &pages).unwrap(); | |||||
let taxonomies = find_taxonomies(&config, &library).unwrap(); | |||||
let (tags, categories, authors) = { | let (tags, categories, authors) = { | ||||
let mut t = None; | let mut t = None; | ||||
let mut c = None; | let mut c = None; | ||||
@@ -226,25 +293,33 @@ mod tests { | |||||
assert_eq!(categories.items[1].name, "Programming tutorials"); | assert_eq!(categories.items[1].name, "Programming tutorials"); | ||||
assert_eq!(categories.items[1].slug, "programming-tutorials"); | assert_eq!(categories.items[1].slug, "programming-tutorials"); | ||||
assert_eq!(categories.items[1].permalink, "http://a-website.com/categories/programming-tutorials/"); | |||||
assert_eq!( | |||||
categories.items[1].permalink, | |||||
"http://a-website.com/categories/programming-tutorials/" | |||||
); | |||||
assert_eq!(categories.items[1].pages.len(), 1); | assert_eq!(categories.items[1].pages.len(), 1); | ||||
} | } | ||||
#[test] | #[test] | ||||
fn errors_on_unknown_taxonomy() { | fn errors_on_unknown_taxonomy() { | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.taxonomies = vec![ | |||||
Taxonomy { name: "authors".to_string(), ..Taxonomy::default() }, | |||||
]; | |||||
let mut library = Library::new(2, 0); | |||||
config.taxonomies = | |||||
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }]; | |||||
let mut page1 = Page::default(); | let mut page1 = Page::default(); | ||||
let mut taxo_page1 = HashMap::new(); | let mut taxo_page1 = HashMap::new(); | ||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); | taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); | ||||
page1.meta.taxonomies = taxo_page1; | page1.meta.taxonomies = taxo_page1; | ||||
library.insert_page(page1); | |||||
let taxonomies = find_taxonomies(&config, &vec![page1]); | |||||
let taxonomies = find_taxonomies(&config, &library); | |||||
assert!(taxonomies.is_err()); | assert!(taxonomies.is_err()); | ||||
let err = taxonomies.unwrap_err(); | let err = taxonomies.unwrap_err(); | ||||
// no path as this is created by Default | // no path as this is created by Default | ||||
assert_eq!(err.description(), "Page `` has taxonomy `tags` which is not defined in config.toml"); | |||||
assert_eq!( | |||||
err.description(), | |||||
"Page `` has taxonomy `tags` which is not defined in config.toml" | |||||
); | |||||
} | } | ||||
} | } |
@@ -4,5 +4,5 @@ version = "0.1.0" | |||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | ||||
[dependencies] | [dependencies] | ||||
reqwest = "0.8" | |||||
reqwest = "0.9" | |||||
lazy_static = "1" | lazy_static = "1" |
@@ -2,8 +2,8 @@ extern crate reqwest; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate lazy_static; | extern crate lazy_static; | ||||
use reqwest::header::{qitem, Accept, Headers}; | |||||
use reqwest::{mime, StatusCode}; | |||||
use reqwest::header::{HeaderMap, ACCEPT}; | |||||
use reqwest::StatusCode; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::error::Error; | use std::error::Error; | ||||
use std::sync::{Arc, RwLock}; | use std::sync::{Arc, RwLock}; | ||||
@@ -54,21 +54,16 @@ pub fn check_url(url: &str) -> LinkResult { | |||||
} | } | ||||
} | } | ||||
let mut headers = Headers::new(); | |||||
headers.set(Accept(vec![qitem(mime::TEXT_HTML), qitem(mime::STAR_STAR)])); | |||||
let mut headers = HeaderMap::new(); | |||||
headers.insert(ACCEPT, "text/html".parse().unwrap()); | |||||
headers.append(ACCEPT, "*/*".parse().unwrap()); | |||||
let client = reqwest::Client::new(); | let client = reqwest::Client::new(); | ||||
// Need to actually do the link checking | // Need to actually do the link checking | ||||
let res = match client.get(url).headers(headers).send() { | let res = match client.get(url).headers(headers).send() { | ||||
Ok(response) => LinkResult { | |||||
code: Some(response.status()), | |||||
error: None, | |||||
}, | |||||
Err(e) => LinkResult { | |||||
code: None, | |||||
error: Some(e.description().to_string()), | |||||
}, | |||||
Ok(response) => LinkResult { code: Some(response.status()), error: None }, | |||||
Err(e) => LinkResult { code: None, error: Some(e.description().to_string()) }, | |||||
}; | }; | ||||
LINKS.write().unwrap().insert(url.to_string(), res.clone()); | LINKS.write().unwrap().insert(url.to_string(), res.clone()); | ||||
@@ -1,18 +0,0 @@ | |||||
[package] | |||||
name = "pagination" | |||||
version = "0.1.0" | |||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
[dependencies] | |||||
tera = "0.11" | |||||
serde = "1" | |||||
serde_derive = "1" | |||||
errors = { path = "../errors" } | |||||
config = { path = "../config" } | |||||
content = { path = "../content" } | |||||
utils = { path = "../utils" } | |||||
taxonomies = { path = "../taxonomies" } | |||||
[dev-dependencies] | |||||
front_matter = { path = "../front_matter" } |
@@ -6,8 +6,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
[dependencies] | [dependencies] | ||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
front_matter = { path = "../front_matter" } | front_matter = { path = "../front_matter" } | ||||
highlighting = { path = "../highlighting" } | |||||
content = { path = "../content" } | |||||
library = { path = "../library" } | |||||
site = { path = "../site" } | site = { path = "../site" } | ||||
[dev-dependencies] | [dev-dependencies] | ||||
@@ -1,28 +1,15 @@ | |||||
extern crate site; | extern crate site; | ||||
#[macro_use] | #[macro_use] | ||||
extern crate errors; | extern crate errors; | ||||
extern crate content; | |||||
extern crate front_matter; | extern crate front_matter; | ||||
extern crate library; | |||||
use std::path::{Path, Component}; | |||||
use std::path::{Component, Path}; | |||||
use errors::Result; | use errors::Result; | ||||
use site::Site; | |||||
use content::{Page, Section}; | |||||
use front_matter::{PageFrontMatter, SectionFrontMatter}; | use front_matter::{PageFrontMatter, SectionFrontMatter}; | ||||
/// Finds the section that contains the page given if there is one | |||||
pub fn find_parent_section<'a>(site: &'a Site, page: &Page) -> Option<&'a Section> { | |||||
for section in site.sections.values() { | |||||
if section.is_child_page(&page.file.path) { | |||||
return Some(section); | |||||
} | |||||
} | |||||
None | |||||
} | |||||
use library::{Page, Section}; | |||||
use site::Site; | |||||
#[derive(Debug, Clone, Copy, PartialEq)] | #[derive(Debug, Clone, Copy, PartialEq)] | ||||
pub enum PageChangesNeeded { | pub enum PageChangesNeeded { | ||||
@@ -44,18 +31,27 @@ pub enum SectionChangesNeeded { | |||||
RenderWithPages, | RenderWithPages, | ||||
/// Setting `render` to false | /// Setting `render` to false | ||||
Delete, | Delete, | ||||
/// Changing `transparent` | |||||
Transparent, | |||||
} | } | ||||
/// Evaluates all the params in the front matter that changed so we can do the smallest | /// Evaluates all the params in the front matter that changed so we can do the smallest | ||||
/// delta in the serve command | /// delta in the serve command | ||||
/// Order matters as the actions will be done in insertion order | /// Order matters as the actions will be done in insertion order | ||||
fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &SectionFrontMatter) -> Vec<SectionChangesNeeded> { | |||||
fn find_section_front_matter_changes( | |||||
current: &SectionFrontMatter, | |||||
new: &SectionFrontMatter, | |||||
) -> Vec<SectionChangesNeeded> { | |||||
let mut changes_needed = vec![]; | let mut changes_needed = vec![]; | ||||
if current.sort_by != new.sort_by { | if current.sort_by != new.sort_by { | ||||
changes_needed.push(SectionChangesNeeded::Sort); | changes_needed.push(SectionChangesNeeded::Sort); | ||||
} | } | ||||
if current.transparent != new.transparent { | |||||
changes_needed.push(SectionChangesNeeded::Transparent); | |||||
} | |||||
// We want to hide the section | // We want to hide the section | ||||
// TODO: what to do on redirect_path change? | // TODO: what to do on redirect_path change? | ||||
if current.render && !new.render { | if current.render && !new.render { | ||||
@@ -66,7 +62,8 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section | |||||
if current.paginate_by != new.paginate_by | if current.paginate_by != new.paginate_by | ||||
|| current.paginate_path != new.paginate_path | || current.paginate_path != new.paginate_path | ||||
|| current.insert_anchor_links != new.insert_anchor_links { | |||||
|| current.insert_anchor_links != new.insert_anchor_links | |||||
{ | |||||
changes_needed.push(SectionChangesNeeded::RenderWithPages); | changes_needed.push(SectionChangesNeeded::RenderWithPages); | ||||
// Nothing else we can do | // Nothing else we can do | ||||
return changes_needed; | return changes_needed; | ||||
@@ -80,14 +77,18 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section | |||||
/// Evaluates all the params in the front matter that changed so we can do the smallest | /// Evaluates all the params in the front matter that changed so we can do the smallest | ||||
/// delta in the serve command | /// delta in the serve command | ||||
/// Order matters as the actions will be done in insertion order | /// Order matters as the actions will be done in insertion order | ||||
fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMatter) -> Vec<PageChangesNeeded> { | |||||
fn find_page_front_matter_changes( | |||||
current: &PageFrontMatter, | |||||
other: &PageFrontMatter, | |||||
) -> Vec<PageChangesNeeded> { | |||||
let mut changes_needed = vec![]; | let mut changes_needed = vec![]; | ||||
if current.taxonomies != other.taxonomies { | if current.taxonomies != other.taxonomies { | ||||
changes_needed.push(PageChangesNeeded::Taxonomies); | changes_needed.push(PageChangesNeeded::Taxonomies); | ||||
} | } | ||||
if current.date != other.date || current.order != other.order || current.weight != other.weight { | |||||
if current.date != other.date || current.order != other.order || current.weight != other.weight | |||||
{ | |||||
changes_needed.push(PageChangesNeeded::Sort); | changes_needed.push(PageChangesNeeded::Sort); | ||||
} | } | ||||
@@ -98,145 +99,130 @@ fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMa | |||||
/// Handles a path deletion: could be a page, a section, a folder | /// Handles a path deletion: could be a page, a section, a folder | ||||
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> { | fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> { | ||||
// Ignore the event if this path was not known | // Ignore the event if this path was not known | ||||
if !site.sections.contains_key(path) && !site.pages.contains_key(path) { | |||||
if !site.library.contains_section(&path.to_path_buf()) | |||||
&& !site.library.contains_page(&path.to_path_buf()) | |||||
{ | |||||
return Ok(()); | return Ok(()); | ||||
} | } | ||||
if is_section { | if is_section { | ||||
if let Some(s) = site.pages.remove(path) { | |||||
if let Some(s) = site.library.remove_section(&path.to_path_buf()) { | |||||
site.permalinks.remove(&s.file.relative); | site.permalinks.remove(&s.file.relative); | ||||
site.populate_sections(); | |||||
} | } | ||||
} else { | |||||
if let Some(p) = site.pages.remove(path) { | |||||
site.permalinks.remove(&p.file.relative); | |||||
if !p.meta.taxonomies.is_empty() { | |||||
site.populate_taxonomies()?; | |||||
} | |||||
} else if let Some(p) = site.library.remove_page(&path.to_path_buf()) { | |||||
site.permalinks.remove(&p.file.relative); | |||||
// if there is a parent section, we will need to re-render it | |||||
// most likely | |||||
if find_parent_section(site, &p).is_some() { | |||||
site.populate_sections(); | |||||
} | |||||
}; | |||||
if !p.meta.taxonomies.is_empty() { | |||||
site.populate_taxonomies()?; | |||||
} | |||||
} | } | ||||
site.populate_sections(); | |||||
site.populate_taxonomies()?; | |||||
// Ensure we have our fn updated so it doesn't contain the permalink(s)/section/page deleted | // Ensure we have our fn updated so it doesn't contain the permalink(s)/section/page deleted | ||||
site.register_early_global_fns(); | |||||
site.register_tera_global_fns(); | site.register_tera_global_fns(); | ||||
// Deletion is something that doesn't happen all the time so we | // Deletion is something that doesn't happen all the time so we | ||||
// don't need to optimise it too much | // don't need to optimise it too much | ||||
return site.build(); | |||||
site.build() | |||||
} | } | ||||
/// Handles a `_index.md` (a section) being edited in some ways | /// Handles a `_index.md` (a section) being edited in some ways | ||||
fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { | fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { | ||||
let section = Section::from_file(path, &site.config)?; | let section = Section::from_file(path, &site.config)?; | ||||
let pathbuf = path.to_path_buf(); | |||||
match site.add_section(section, true)? { | match site.add_section(section, true)? { | ||||
// Updating a section | // Updating a section | ||||
Some(prev) => { | Some(prev) => { | ||||
// Copy the section data so we don't end up with an almost empty object | |||||
site.sections.get_mut(path).unwrap().pages = prev.pages; | |||||
site.sections.get_mut(path).unwrap().ignored_pages = prev.ignored_pages; | |||||
site.sections.get_mut(path).unwrap().subsections = prev.subsections; | |||||
site.populate_sections(); | |||||
if site.sections[path].meta == prev.meta { | |||||
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta { | |||||
// Front matter didn't change, only content did | // Front matter didn't change, only content did | ||||
// so we render only the section page, not its pages | // so we render only the section page, not its pages | ||||
return site.render_section(&site.sections[path], false); | |||||
return site.render_section(&site.library.get_section(&pathbuf).unwrap(), false); | |||||
} | } | ||||
// Front matter changed | // Front matter changed | ||||
for changes in find_section_front_matter_changes(&site.sections[path].meta, &prev.meta) { | |||||
for changes in find_section_front_matter_changes( | |||||
&site.library.get_section(&pathbuf).unwrap().meta, | |||||
&prev.meta, | |||||
) { | |||||
// Sort always comes first if present so the rendering will be fine | // Sort always comes first if present so the rendering will be fine | ||||
match changes { | match changes { | ||||
SectionChangesNeeded::Sort => { | SectionChangesNeeded::Sort => { | ||||
site.sort_sections_pages(Some(path)); | |||||
site.register_tera_global_fns(); | site.register_tera_global_fns(); | ||||
} | } | ||||
SectionChangesNeeded::Render => site.render_section(&site.sections[path], false)?, | |||||
SectionChangesNeeded::RenderWithPages => site.render_section(&site.sections[path], true)?, | |||||
SectionChangesNeeded::Render => { | |||||
site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)? | |||||
} | |||||
SectionChangesNeeded::RenderWithPages => { | |||||
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)? | |||||
} | |||||
// not a common enough operation to make it worth optimizing | // not a common enough operation to make it worth optimizing | ||||
SectionChangesNeeded::Delete => { | |||||
site.populate_sections(); | |||||
SectionChangesNeeded::Delete | SectionChangesNeeded::Transparent => { | |||||
site.build()?; | site.build()?; | ||||
} | } | ||||
}; | }; | ||||
} | } | ||||
return Ok(()); | |||||
Ok(()) | |||||
} | } | ||||
// New section, only render that one | // New section, only render that one | ||||
None => { | None => { | ||||
site.populate_sections(); | site.populate_sections(); | ||||
site.register_tera_global_fns(); | site.register_tera_global_fns(); | ||||
return site.render_section(&site.sections[path], true); | |||||
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true) | |||||
} | } | ||||
}; | |||||
} | |||||
} | } | ||||
macro_rules! render_parent_section { | macro_rules! render_parent_section { | ||||
($site: expr, $path: expr) => { | ($site: expr, $path: expr) => { | ||||
match find_parent_section($site, &$site.pages[$path]) { | |||||
Some(s) => { | |||||
$site.render_section(s, false)?; | |||||
}, | |||||
None => (), | |||||
if let Some(s) = $site.library.find_parent_section($path) { | |||||
$site.render_section(s, false)?; | |||||
}; | }; | ||||
} | |||||
}; | |||||
} | } | ||||
/// Handles a page being edited in some ways | /// Handles a page being edited in some ways | ||||
fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { | fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { | ||||
let page = Page::from_file(path, &site.config)?; | let page = Page::from_file(path, &site.config)?; | ||||
let pathbuf = path.to_path_buf(); | |||||
match site.add_page(page, true)? { | match site.add_page(page, true)? { | ||||
// Updating a page | // Updating a page | ||||
Some(prev) => { | Some(prev) => { | ||||
site.populate_sections(); | |||||
site.populate_taxonomies()?; | |||||
// Front matter didn't change, only content did | // Front matter didn't change, only content did | ||||
if site.pages[path].meta == prev.meta { | |||||
if site.library.get_page(&pathbuf).unwrap().meta == prev.meta { | |||||
// Other than the page itself, the summary might be seen | // Other than the page itself, the summary might be seen | ||||
// on a paginated list for a blog for example | // on a paginated list for a blog for example | ||||
if site.pages[path].summary.is_some() { | |||||
if site.library.get_page(&pathbuf).unwrap().summary.is_some() { | |||||
render_parent_section!(site, path); | render_parent_section!(site, path); | ||||
} | } | ||||
// TODO: register_tera_global_fns is expensive as it involves lots of cloning | |||||
// I can't think of a valid usecase where you would need the content | |||||
// of a page through a global fn so it's commented out for now | |||||
// site.register_tera_global_fns(); | |||||
return site.render_page(&site.pages[path]); | |||||
site.register_tera_global_fns(); | |||||
return site.render_page(&site.library.get_page(&pathbuf).unwrap()); | |||||
} | } | ||||
// Front matter changed | // Front matter changed | ||||
let mut sections_populated = false; | |||||
for changes in find_page_front_matter_changes(&site.pages[path].meta, &prev.meta) { | |||||
for changes in find_page_front_matter_changes( | |||||
&site.library.get_page(&pathbuf).unwrap().meta, | |||||
&prev.meta, | |||||
) { | |||||
site.register_tera_global_fns(); | |||||
// Sort always comes first if present so the rendering will be fine | // Sort always comes first if present so the rendering will be fine | ||||
match changes { | match changes { | ||||
PageChangesNeeded::Taxonomies => { | PageChangesNeeded::Taxonomies => { | ||||
site.populate_taxonomies()?; | site.populate_taxonomies()?; | ||||
site.register_tera_global_fns(); | |||||
site.render_taxonomies()?; | site.render_taxonomies()?; | ||||
} | } | ||||
PageChangesNeeded::Sort => { | PageChangesNeeded::Sort => { | ||||
let section_path = match find_parent_section(site, &site.pages[path]) { | |||||
Some(s) => s.file.path.clone(), | |||||
None => continue // Do nothing if it's an orphan page | |||||
}; | |||||
if !sections_populated { | |||||
site.populate_sections(); | |||||
sections_populated = true; | |||||
} | |||||
site.sort_sections_pages(Some(§ion_path)); | |||||
site.register_tera_global_fns(); | |||||
site.render_index()?; | site.render_index()?; | ||||
} | } | ||||
PageChangesNeeded::Render => { | PageChangesNeeded::Render => { | ||||
if !sections_populated { | |||||
site.populate_sections(); | |||||
sections_populated = true; | |||||
} | |||||
site.register_tera_global_fns(); | |||||
render_parent_section!(site, path); | render_parent_section!(site, path); | ||||
site.render_page(&site.pages[path])?; | |||||
site.render_page(&site.library.get_page(&path.to_path_buf()).unwrap())?; | |||||
} | } | ||||
}; | }; | ||||
} | } | ||||
@@ -246,6 +232,7 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { | |||||
None => { | None => { | ||||
site.populate_sections(); | site.populate_sections(); | ||||
site.populate_taxonomies()?; | site.populate_taxonomies()?; | ||||
site.register_early_global_fns(); | |||||
site.register_tera_global_fns(); | site.register_tera_global_fns(); | ||||
// No need to optimise that yet, we can revisit if it becomes an issue | // No need to optimise that yet, we can revisit if it becomes an issue | ||||
site.build() | site.build() | ||||
@@ -253,8 +240,58 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { | |||||
} | } | ||||
} | } | ||||
/// What happens when we rename a file/folder in the content directory. | |||||
/// Note that this is only called for folders when it isn't empty | |||||
pub fn after_content_rename(site: &mut Site, old: &Path, new: &Path) -> Result<()> { | |||||
let new_path = if new.is_dir() { | |||||
if new.join("_index.md").exists() { | |||||
// This is a section keep the dir folder to differentiate from renaming _index.md | |||||
// which doesn't do the same thing | |||||
new.to_path_buf() | |||||
} else if new.join("index.md").exists() { | |||||
new.join("index.md") | |||||
} else { | |||||
bail!("Got unexpected folder {:?} while handling renaming that was not expected", new); | |||||
} | |||||
} else { | |||||
new.to_path_buf() | |||||
}; | |||||
// A section folder has been renamed: just reload the whole site and rebuild it as we | |||||
// do not really know what needs to be rendered | |||||
if new_path.is_dir() { | |||||
site.load()?; | |||||
return site.build(); | |||||
} | |||||
/// What happens when a section or a page is changed | |||||
// We ignore renames on non-markdown files for now | |||||
if let Some(ext) = new_path.extension() { | |||||
if ext != "md" { | |||||
return Ok(()); | |||||
} | |||||
} | |||||
// Renaming a file to _index.md, let the section editing do something and hope for the best | |||||
if new_path.file_name().unwrap() == "_index.md" { | |||||
// We aren't entirely sure where the original thing was so just try to delete whatever was | |||||
// at the old path | |||||
site.library.remove_page(&old.to_path_buf()); | |||||
site.library.remove_section(&old.to_path_buf()); | |||||
return handle_section_editing(site, &new_path); | |||||
} | |||||
// If it is a page, just delete what was there before and | |||||
// fake it's a new page | |||||
let old_path = if new_path.file_name().unwrap() == "index.md" { | |||||
old.join("index.md") | |||||
} else { | |||||
old.to_path_buf() | |||||
}; | |||||
site.library.remove_page(&old_path); | |||||
return handle_page_editing(site, &new_path); | |||||
} | |||||
/// What happens when a section or a page is created/edited | |||||
pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> { | pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> { | ||||
let is_section = path.file_name().unwrap() == "_index.md"; | let is_section = path.file_name().unwrap() == "_index.md"; | ||||
let is_md = path.extension().unwrap() == "md"; | let is_md = path.extension().unwrap() == "md"; | ||||
@@ -293,12 +330,10 @@ pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> { | |||||
} else { | } else { | ||||
handle_page_editing(site, path) | handle_page_editing(site, path) | ||||
} | } | ||||
} else if index.exists() { | |||||
handle_page_editing(site, &index) | |||||
} else { | } else { | ||||
if index.exists() { | |||||
handle_page_editing(site, &index) | |||||
} else { | |||||
Ok(()) | |||||
} | |||||
Ok(()) | |||||
} | } | ||||
} | } | ||||
@@ -309,7 +344,7 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> { | |||||
match filename { | match filename { | ||||
"sitemap.xml" => site.render_sitemap(), | "sitemap.xml" => site.render_sitemap(), | ||||
"rss.xml" => site.render_rss_feed(None, None), | |||||
"rss.xml" => site.render_rss_feed(site.library.pages_values(), None), | |||||
"robots.txt" => site.render_robots(), | "robots.txt" => site.render_robots(), | ||||
"single.html" | "list.html" => site.render_taxonomies(), | "single.html" | "list.html" => site.render_taxonomies(), | ||||
"page.html" => { | "page.html" => { | ||||
@@ -325,10 +360,11 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> { | |||||
// because we have no clue which one needs rebuilding | // because we have no clue which one needs rebuilding | ||||
// TODO: look if there the shortcode is used in the markdown instead of re-rendering | // TODO: look if there the shortcode is used in the markdown instead of re-rendering | ||||
// everything | // everything | ||||
if path.components().collect::<Vec<_>>().contains(&Component::Normal("shortcodes".as_ref())) { | |||||
if path.components().any(|x| x == Component::Normal("shortcodes".as_ref())) { | |||||
site.render_markdown()?; | site.render_markdown()?; | ||||
} | } | ||||
site.populate_sections(); | site.populate_sections(); | ||||
site.populate_taxonomies()?; | |||||
site.render_sections()?; | site.render_sections()?; | ||||
site.render_orphan_pages()?; | site.render_orphan_pages()?; | ||||
site.render_taxonomies() | site.render_taxonomies() | ||||
@@ -336,16 +372,15 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> { | |||||
} | } | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy}; | |||||
use super::{ | use super::{ | ||||
find_page_front_matter_changes, find_section_front_matter_changes, | |||||
PageChangesNeeded, SectionChangesNeeded, | |||||
find_page_front_matter_changes, find_section_front_matter_changes, PageChangesNeeded, | |||||
SectionChangesNeeded, | |||||
}; | }; | ||||
use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy}; | |||||
#[test] | #[test] | ||||
fn can_find_taxonomy_changes_in_page_frontmatter() { | fn can_find_taxonomy_changes_in_page_frontmatter() { | ||||
@@ -362,7 +397,10 @@ mod tests { | |||||
taxonomies.insert("categories".to_string(), vec!["a category".to_string()]); | taxonomies.insert("categories".to_string(), vec!["a category".to_string()]); | ||||
let current = PageFrontMatter { taxonomies, order: Some(1), ..PageFrontMatter::default() }; | let current = PageFrontMatter { taxonomies, order: Some(1), ..PageFrontMatter::default() }; | ||||
let changes = find_page_front_matter_changes(¤t, &PageFrontMatter::default()); | let changes = find_page_front_matter_changes(¤t, &PageFrontMatter::default()); | ||||
assert_eq!(changes, vec![PageChangesNeeded::Taxonomies, PageChangesNeeded::Sort, PageChangesNeeded::Render]); | |||||
assert_eq!( | |||||
changes, | |||||
vec![PageChangesNeeded::Taxonomies, PageChangesNeeded::Sort, PageChangesNeeded::Render] | |||||
); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -1,89 +1,99 @@ | |||||
extern crate fs_extra; | |||||
extern crate rebuild; | extern crate rebuild; | ||||
extern crate site; | extern crate site; | ||||
extern crate tempfile; | extern crate tempfile; | ||||
extern crate fs_extra; | |||||
use std::env; | use std::env; | ||||
use std::fs::{remove_dir_all, File}; | |||||
use std::fs::{self, File}; | |||||
use std::io::prelude::*; | use std::io::prelude::*; | ||||
use fs_extra::dir; | use fs_extra::dir; | ||||
use tempfile::tempdir; | |||||
use site::Site; | use site::Site; | ||||
use tempfile::tempdir; | |||||
use rebuild::after_content_change; | |||||
use rebuild::{after_content_change, after_content_rename}; | |||||
// Loads the test_site in a tempdir and build it there | // Loads the test_site in a tempdir and build it there | ||||
// Returns (site_path_in_tempdir, site) | // Returns (site_path_in_tempdir, site) | ||||
macro_rules! load_and_build_site { | macro_rules! load_and_build_site { | ||||
($tmp_dir: expr) => { | |||||
{ | |||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); | |||||
path.push("test_site"); | |||||
let mut options = dir::CopyOptions::new(); | |||||
options.copy_inside = true; | |||||
dir::copy(&path, &$tmp_dir, &options).unwrap(); | |||||
let site_path = $tmp_dir.path().join("test_site"); | |||||
// delete useless sections for those tests | |||||
remove_dir_all(site_path.join("content").join("paginated")).unwrap(); | |||||
remove_dir_all(site_path.join("content").join("posts")).unwrap(); | |||||
let mut site = Site::new(&site_path, "config.toml").unwrap(); | |||||
site.load().unwrap(); | |||||
let public = &site_path.join("public"); | |||||
site.set_output_path(&public); | |||||
site.build().unwrap(); | |||||
(site_path, site) | |||||
} | |||||
} | |||||
($tmp_dir: expr) => {{ | |||||
let mut path = | |||||
env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); | |||||
path.push("test_site"); | |||||
let mut options = dir::CopyOptions::new(); | |||||
options.copy_inside = true; | |||||
dir::copy(&path, &$tmp_dir, &options).unwrap(); | |||||
let site_path = $tmp_dir.path().join("test_site"); | |||||
let mut site = Site::new(&site_path, "config.toml").unwrap(); | |||||
site.load().unwrap(); | |||||
let public = &site_path.join("public"); | |||||
site.set_output_path(&public); | |||||
site.build().unwrap(); | |||||
(site_path, site) | |||||
}}; | |||||
} | } | ||||
/// Replace the file at the path (starting from root) by the given content | /// Replace the file at the path (starting from root) by the given content | ||||
/// and return the file path that was modified | /// and return the file path that was modified | ||||
macro_rules! edit_file { | macro_rules! edit_file { | ||||
($site_path: expr, $path: expr, $content: expr) => { | |||||
{ | |||||
let mut t = $site_path.clone(); | |||||
for c in $path.split('/') { | |||||
t.push(c); | |||||
} | |||||
let mut file = File::create(&t).expect("Could not open/create file"); | |||||
file.write_all($content).expect("Could not write to the file"); | |||||
t | |||||
($site_path: expr, $path: expr, $content: expr) => {{ | |||||
let mut t = $site_path.clone(); | |||||
for c in $path.split('/') { | |||||
t.push(c); | |||||
} | } | ||||
} | |||||
let mut file = File::create(&t).expect("Could not open/create file"); | |||||
file.write_all($content).expect("Could not write to the file"); | |||||
t | |||||
}}; | |||||
} | } | ||||
macro_rules! file_contains { | macro_rules! file_contains { | ||||
($site_path: expr, $path: expr, $text: expr) => { | |||||
{ | |||||
let mut path = $site_path.clone(); | |||||
for component in $path.split("/") { | |||||
path.push(component); | |||||
} | |||||
let mut file = File::open(&path).unwrap(); | |||||
let mut s = String::new(); | |||||
file.read_to_string(&mut s).unwrap(); | |||||
println!("{:?} -> {}", path, s); | |||||
s.contains($text) | |||||
($site_path: expr, $path: expr, $text: expr) => {{ | |||||
let mut path = $site_path.clone(); | |||||
for component in $path.split("/") { | |||||
path.push(component); | |||||
} | } | ||||
} | |||||
let mut file = File::open(&path).unwrap(); | |||||
let mut s = String::new(); | |||||
file.read_to_string(&mut s).unwrap(); | |||||
println!("{:?} -> {}", path, s); | |||||
s.contains($text) | |||||
}}; | |||||
} | |||||
/// Rename a file or a folder to the new given name | |||||
macro_rules! rename { | |||||
($site_path: expr, $path: expr, $new_name: expr) => {{ | |||||
let mut t = $site_path.clone(); | |||||
for c in $path.split('/') { | |||||
t.push(c); | |||||
} | |||||
let mut new_path = t.parent().unwrap().to_path_buf(); | |||||
new_path.push($new_name); | |||||
fs::rename(&t, &new_path).unwrap(); | |||||
println!("Renamed {:?} to {:?}", t, new_path); | |||||
(t, new_path) | |||||
}}; | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_rebuild_after_simple_change_to_page_content() { | fn can_rebuild_after_simple_change_to_page_content() { | ||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | let (site_path, mut site) = load_and_build_site!(tmp_dir); | ||||
let file_path = edit_file!(site_path, "content/rebuild/first.md", br#" | |||||
let file_path = edit_file!( | |||||
site_path, | |||||
"content/rebuild/first.md", | |||||
br#" | |||||
+++ | +++ | ||||
title = "first" | title = "first" | ||||
weight = 1 | weight = 1 | ||||
date = 2017-01-01 | date = 2017-01-01 | ||||
+++ | +++ | ||||
Some content"#); | |||||
Some content"# | |||||
); | |||||
let res = after_content_change(&mut site, &file_path); | let res = after_content_change(&mut site, &file_path); | ||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
@@ -94,14 +104,18 @@ Some content"#); | |||||
fn can_rebuild_after_title_change_page_global_func_usage() { | fn can_rebuild_after_title_change_page_global_func_usage() { | ||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | let (site_path, mut site) = load_and_build_site!(tmp_dir); | ||||
let file_path = edit_file!(site_path, "content/rebuild/first.md", br#" | |||||
let file_path = edit_file!( | |||||
site_path, | |||||
"content/rebuild/first.md", | |||||
br#" | |||||
+++ | +++ | ||||
title = "Premier" | title = "Premier" | ||||
weight = 10 | weight = 10 | ||||
date = 2017-01-01 | date = 2017-01-01 | ||||
+++ | +++ | ||||
# A title"#); | |||||
# A title"# | |||||
); | |||||
let res = after_content_change(&mut site, &file_path); | let res = after_content_change(&mut site, &file_path); | ||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
@@ -112,15 +126,111 @@ date = 2017-01-01 | |||||
fn can_rebuild_after_sort_change_in_section() { | fn can_rebuild_after_sort_change_in_section() { | ||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | let (site_path, mut site) = load_and_build_site!(tmp_dir); | ||||
let file_path = edit_file!(site_path, "content/rebuild/_index.md", br#" | |||||
let file_path = edit_file!( | |||||
site_path, | |||||
"content/rebuild/_index.md", | |||||
br#" | |||||
+++ | +++ | ||||
paginate_by = 1 | paginate_by = 1 | ||||
sort_by = "weight" | sort_by = "weight" | ||||
template = "rebuild.html" | template = "rebuild.html" | ||||
+++ | +++ | ||||
"#); | |||||
"# | |||||
); | |||||
let res = after_content_change(&mut site, &file_path); | let res = after_content_change(&mut site, &file_path); | ||||
assert!(res.is_ok()); | assert!(res.is_ok()); | ||||
assert!(file_contains!(site_path, "public/rebuild/index.html", "<h1>first</h1><h1>second</h1>")); | |||||
assert!(file_contains!( | |||||
site_path, | |||||
"public/rebuild/index.html", | |||||
"<h1>first</h1><h1>second</h1>" | |||||
)); | |||||
} | |||||
#[test] | |||||
fn can_rebuild_after_transparent_change() { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | |||||
let file_path = edit_file!( | |||||
site_path, | |||||
"content/posts/2018/_index.md", | |||||
br#" | |||||
+++ | |||||
transparent = false | |||||
render = false | |||||
+++ | |||||
"# | |||||
); | |||||
// Also remove pagination from posts section so we check whether the transparent page title | |||||
// is there or not without dealing with pagination | |||||
edit_file!( | |||||
site_path, | |||||
"content/posts/_index.md", | |||||
br#" | |||||
+++ | |||||
template = "section.html" | |||||
insert_anchor_links = "left" | |||||
+++ | |||||
"# | |||||
); | |||||
let res = after_content_change(&mut site, &file_path); | |||||
assert!(res.is_ok()); | |||||
assert!(!file_contains!(site_path, "public/posts/index.html", "A transparent page")); | |||||
} | |||||
#[test] | |||||
fn can_rebuild_after_renaming_page() { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | |||||
let (old_path, new_path) = rename!(site_path, "content/posts/simple.md", "hard.md"); | |||||
let res = after_content_rename(&mut site, &old_path, &new_path); | |||||
println!("{:?}", res); | |||||
assert!(res.is_ok()); | |||||
assert!(file_contains!(site_path, "public/posts/hard/index.html", "A simple page")); | |||||
} | |||||
// https://github.com/Keats/gutenberg/issues/385 | |||||
#[test] | |||||
fn can_rebuild_after_renaming_colocated_asset_folder() { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | |||||
let (old_path, new_path) = | |||||
rename!(site_path, "content/posts/with-assets", "with-assets-updated"); | |||||
assert!(file_contains!(site_path, "content/posts/with-assets-updated/index.md", "Hello")); | |||||
let res = after_content_rename(&mut site, &old_path, &new_path); | |||||
println!("{:?}", res); | |||||
assert!(res.is_ok()); | |||||
assert!(file_contains!( | |||||
site_path, | |||||
"public/posts/with-assets-updated/index.html", | |||||
"Hello world" | |||||
)); | |||||
} | |||||
// https://github.com/Keats/gutenberg/issues/385 | |||||
#[test] | |||||
fn can_rebuild_after_renaming_section_folder() { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | |||||
let (old_path, new_path) = rename!(site_path, "content/posts", "new-posts"); | |||||
assert!(file_contains!(site_path, "content/new-posts/simple.md", "simple")); | |||||
let res = after_content_rename(&mut site, &old_path, &new_path); | |||||
assert!(res.is_ok()); | |||||
assert!(file_contains!(site_path, "public/new-posts/simple/index.html", "simple")); | |||||
} | |||||
#[test] | |||||
fn can_rebuild_after_renaming_non_md_asset_in_colocated_folder() { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let (site_path, mut site) = load_and_build_site!(tmp_dir); | |||||
let (old_path, new_path) = rename!(site_path, "content/posts/with-assets/zola.png", "gutenberg.png"); | |||||
// Testing that we don't try to load some images as markdown or something | |||||
let res = after_content_rename(&mut site, &old_path, &new_path); | |||||
assert!(res.is_ok()); | |||||
} | } |
@@ -5,17 +5,18 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
[dependencies] | [dependencies] | ||||
tera = { version = "0.11", features = ["preserve_order"] } | tera = { version = "0.11", features = ["preserve_order"] } | ||||
syntect = "2" | |||||
pulldown-cmark = "0" | |||||
syntect = "3" | |||||
pulldown-cmark = "0.2" | |||||
slug = "0.1" | slug = "0.1" | ||||
serde = "1" | serde = "1" | ||||
serde_derive = "1" | serde_derive = "1" | ||||
pest = "1" | |||||
pest_derive = "1" | |||||
pest = "2" | |||||
pest_derive = "2" | |||||
regex = "1" | |||||
lazy_static = "1" | |||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
front_matter = { path = "../front_matter" } | front_matter = { path = "../front_matter" } | ||||
highlighting = { path = "../highlighting"} | |||||
utils = { path = "../utils" } | utils = { path = "../utils" } | ||||
config = { path = "../config" } | config = { path = "../config" } | ||||
link_checker = { path = "../link_checker" } | link_checker = { path = "../link_checker" } | ||||
@@ -1,18 +1,18 @@ | |||||
#![feature(test)] | #![feature(test)] | ||||
extern crate test; | |||||
extern crate tera; | extern crate tera; | ||||
extern crate test; | |||||
extern crate rendering; | |||||
extern crate config; | extern crate config; | ||||
extern crate front_matter; | extern crate front_matter; | ||||
extern crate rendering; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::Path; | use std::path::Path; | ||||
use tera::Tera; | |||||
use rendering::{RenderContext, render_content, render_shortcodes}; | |||||
use front_matter::InsertAnchor; | |||||
use config::Config; | use config::Config; | ||||
use front_matter::InsertAnchor; | |||||
use rendering::{render_content, render_shortcodes, RenderContext}; | |||||
use tera::Tera; | |||||
static CONTENT: &'static str = r#" | static CONTENT: &'static str = r#" | ||||
# Modus cognitius profanam ne duae virtutis mundi | # Modus cognitius profanam ne duae virtutis mundi | ||||
@@ -92,7 +92,8 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) { | |||||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); | tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
let context = | |||||
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
b.iter(|| render_content(CONTENT, &context).unwrap()); | b.iter(|| render_content(CONTENT, &context).unwrap()); | ||||
} | } | ||||
@@ -103,7 +104,8 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) { | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.highlight_code = false; | config.highlight_code = false; | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
let context = | |||||
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
b.iter(|| render_content(CONTENT, &context).unwrap()); | b.iter(|| render_content(CONTENT, &context).unwrap()); | ||||
} | } | ||||
@@ -114,7 +116,8 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) { | |||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.highlight_code = false; | config.highlight_code = false; | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
let context = | |||||
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
b.iter(|| render_content(&content2, &context).unwrap()); | b.iter(|| render_content(&content2, &context).unwrap()); | ||||
} | } | ||||
@@ -125,8 +128,8 @@ fn bench_render_shortcodes_one_present(b: &mut test::Bencher) { | |||||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); | tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
let context = | |||||
RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); | |||||
b.iter(|| render_shortcodes(CONTENT, &context)); | b.iter(|| render_shortcodes(CONTENT, &context)); | ||||
} | } | ||||
@@ -1,6 +1,5 @@ | |||||
// Partly taken from Tera | // Partly taken from Tera | ||||
whitespace = _{ " " | "\t" | "\r" | "\n" } | |||||
WHITESPACE = _{ " " | "\t" | "\r" | "\n" } | |||||
/// LITERALS | /// LITERALS | ||||
int = @{ "-" ? ~ ("0" | '1'..'9' ~ '0'..'9' * ) } | int = @{ "-" ? ~ ("0" | '1'..'9' ~ '0'..'9' * ) } | ||||
@@ -12,11 +11,11 @@ float = @{ | |||||
) | ) | ||||
} | } | ||||
// matches anything between 2 double quotes | // matches anything between 2 double quotes | ||||
double_quoted_string = @{ "\"" ~ (!("\"") ~ any)* ~ "\""} | |||||
double_quoted_string = @{ "\"" ~ (!("\"") ~ ANY)* ~ "\""} | |||||
// matches anything between 2 single quotes | // matches anything between 2 single quotes | ||||
single_quoted_string = @{ "\'" ~ (!("\'") ~ any)* ~ "\'"} | |||||
single_quoted_string = @{ "\'" ~ (!("\'") ~ ANY)* ~ "\'"} | |||||
// matches anything between 2 backquotes\backticks | // matches anything between 2 backquotes\backticks | ||||
backquoted_quoted_string = @{ "`" ~ (!("`") ~ any)* ~ "`"} | |||||
backquoted_quoted_string = @{ "`" ~ (!("`") ~ ANY)* ~ "`"} | |||||
string = @{ | string = @{ | ||||
double_quoted_string | | double_quoted_string | | ||||
@@ -37,7 +36,7 @@ ident = @{ | |||||
all_chars* | all_chars* | ||||
} | } | ||||
/// Now specific to Gutenberg | |||||
/// Now specific to Zola | |||||
// shortcode is abbreviated to sc to keep things short | // shortcode is abbreviated to sc to keep things short | ||||
@@ -54,11 +53,11 @@ ignored_sc_body_start = !{ "{%/*" ~ sc_def ~ "*/%}" } | |||||
ignored_sc_body_end = !{ "{%/*" ~ "end" ~ "*/%}" } | ignored_sc_body_end = !{ "{%/*" ~ "end" ~ "*/%}" } | ||||
shortcode_with_body = !{ sc_body_start ~ text_in_body_sc ~ sc_body_end } | shortcode_with_body = !{ sc_body_start ~ text_in_body_sc ~ sc_body_end } | ||||
ignored_shortcode_with_body = !{ ignored_sc_body_start ~ text_in_ignored_body_sc ~ ignored_sc_body_end } | |||||
ignored_shortcode_with_body = { ignored_sc_body_start ~ text_in_ignored_body_sc ~ ignored_sc_body_end } | |||||
text_in_body_sc = ${ (!(sc_body_end) ~ any)+ } | |||||
text_in_ignored_body_sc = ${ (!(ignored_sc_body_end) ~ any)+ } | |||||
text = ${ (!(inline_shortcode | ignored_inline_shortcode | sc_body_start | ignored_sc_body_start) ~ any)+ } | |||||
text_in_body_sc = ${ (!(sc_body_end) ~ ANY)+ } | |||||
text_in_ignored_body_sc = ${ (!(ignored_sc_body_end) ~ ANY)+ } | |||||
text = ${ (!(inline_shortcode | ignored_inline_shortcode | shortcode_with_body | ignored_shortcode_with_body) ~ ANY)+ } | |||||
content = _{ | content = _{ | ||||
ignored_inline_shortcode | | ignored_inline_shortcode | | ||||
@@ -69,4 +68,4 @@ content = _{ | |||||
} | } | ||||
page = ${ soi ~ content* ~ eoi } | |||||
page = ${ SOI ~ content* ~ EOI } |
@@ -1,12 +1,10 @@ | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::Path; | |||||
use tera::{Tera, Context}; | |||||
use front_matter::InsertAnchor; | |||||
use config::Config; | use config::Config; | ||||
use front_matter::InsertAnchor; | |||||
use tera::{Context, Tera}; | |||||
/// All the information from the gutenberg site that is needed to render HTML from markdown | |||||
/// All the information from the zola site that is needed to render HTML from markdown | |||||
#[derive(Debug)] | #[derive(Debug)] | ||||
pub struct RenderContext<'a> { | pub struct RenderContext<'a> { | ||||
pub tera: &'a Tera, | pub tera: &'a Tera, | ||||
@@ -14,7 +12,6 @@ pub struct RenderContext<'a> { | |||||
pub tera_context: Context, | pub tera_context: Context, | ||||
pub current_page_permalink: &'a str, | pub current_page_permalink: &'a str, | ||||
pub permalinks: &'a HashMap<String, String>, | pub permalinks: &'a HashMap<String, String>, | ||||
pub base_path: &'a Path, | |||||
pub insert_anchor: InsertAnchor, | pub insert_anchor: InsertAnchor, | ||||
} | } | ||||
@@ -24,7 +21,6 @@ impl<'a> RenderContext<'a> { | |||||
config: &'a Config, | config: &'a Config, | ||||
current_page_permalink: &'a str, | current_page_permalink: &'a str, | ||||
permalinks: &'a HashMap<String, String>, | permalinks: &'a HashMap<String, String>, | ||||
base_path: &'a Path, | |||||
insert_anchor: InsertAnchor, | insert_anchor: InsertAnchor, | ||||
) -> RenderContext<'a> { | ) -> RenderContext<'a> { | ||||
let mut tera_context = Context::new(); | let mut tera_context = Context::new(); | ||||
@@ -35,7 +31,6 @@ impl<'a> RenderContext<'a> { | |||||
current_page_permalink, | current_page_permalink, | ||||
permalinks, | permalinks, | ||||
insert_anchor, | insert_anchor, | ||||
base_path, | |||||
config, | config, | ||||
} | } | ||||
} | } | ||||
@@ -1,39 +1,41 @@ | |||||
extern crate tera; | |||||
extern crate syntect; | |||||
extern crate pulldown_cmark; | extern crate pulldown_cmark; | ||||
extern crate slug; | extern crate slug; | ||||
extern crate syntect; | |||||
extern crate tera; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate serde_derive; | extern crate serde_derive; | ||||
extern crate serde; | |||||
extern crate pest; | extern crate pest; | ||||
extern crate serde; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate pest_derive; | extern crate pest_derive; | ||||
extern crate regex; | |||||
#[macro_use] | |||||
extern crate lazy_static; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate errors; | extern crate errors; | ||||
extern crate front_matter; | |||||
extern crate highlighting; | |||||
extern crate utils; | |||||
extern crate config; | extern crate config; | ||||
extern crate front_matter; | |||||
extern crate link_checker; | extern crate link_checker; | ||||
extern crate utils; | |||||
#[cfg(test)] | #[cfg(test)] | ||||
extern crate templates; | extern crate templates; | ||||
mod context; | mod context; | ||||
mod markdown; | mod markdown; | ||||
mod table_of_contents; | |||||
mod shortcode; | mod shortcode; | ||||
mod table_of_contents; | |||||
use errors::Result; | use errors::Result; | ||||
pub use context::RenderContext; | |||||
use markdown::markdown_to_html; | use markdown::markdown_to_html; | ||||
pub use table_of_contents::Header; | |||||
pub use shortcode::render_shortcodes; | pub use shortcode::render_shortcodes; | ||||
pub use context::RenderContext; | |||||
pub use table_of_contents::Header; | |||||
pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown::Rendered> { | pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown::Rendered> { | ||||
// Don't do anything if there is nothing like a shortcode in the content | |||||
// Don't do shortcodes if there is nothing like a shortcode in the content | |||||
if content.contains("{{") || content.contains("{%") { | if content.contains("{{") || content.contains("{%") { | ||||
let rendered = render_shortcodes(content, context)?; | let rendered = render_shortcodes(content, context)?; | ||||
return markdown_to_html(&rendered, context); | return markdown_to_html(&rendered, context); | ||||
@@ -1,18 +1,20 @@ | |||||
use std::borrow::Cow::{Owned, Borrowed}; | |||||
use std::borrow::Cow::{Borrowed, Owned}; | |||||
use self::cmark::{Event, Options, Parser, Tag}; | |||||
use pulldown_cmark as cmark; | use pulldown_cmark as cmark; | ||||
use self::cmark::{Parser, Event, Tag, Options, OPTION_ENABLE_TABLES, OPTION_ENABLE_FOOTNOTES}; | |||||
use slug::slugify; | use slug::slugify; | ||||
use syntect::easy::HighlightLines; | use syntect::easy::HighlightLines; | ||||
use syntect::html::{start_coloured_html_snippet, styles_to_coloured_html, IncludeBackground}; | |||||
use syntect::html::{ | |||||
start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground, | |||||
}; | |||||
use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET}; | |||||
use errors::Result; | use errors::Result; | ||||
use utils::site::resolve_internal_link; | |||||
use highlighting::{get_highlighter, THEME_SET}; | |||||
use link_checker::check_url; | use link_checker::check_url; | ||||
use utils::site::resolve_internal_link; | |||||
use table_of_contents::{TempHeader, Header, make_table_of_contents}; | |||||
use context::RenderContext; | use context::RenderContext; | ||||
use table_of_contents::{make_table_of_contents, Header, TempHeader}; | |||||
const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n"; | const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n"; | ||||
@@ -20,7 +22,7 @@ const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n"; | |||||
pub struct Rendered { | pub struct Rendered { | ||||
pub body: String, | pub body: String, | ||||
pub summary_len: Option<usize>, | pub summary_len: Option<usize>, | ||||
pub toc: Vec<Header> | |||||
pub toc: Vec<Header>, | |||||
} | } | ||||
// We might have cases where the slug is already present in our list of anchor | // We might have cases where the slug is already present in our list of anchor | ||||
@@ -40,8 +42,10 @@ fn find_anchor(anchors: &[String], name: String, level: u8) -> String { | |||||
find_anchor(anchors, name, level + 1) | find_anchor(anchors, name, level + 1) | ||||
} | } | ||||
// Colocated asset links refers to the files in the same directory, | |||||
// there it should be a filename only | |||||
fn is_colocated_asset_link(link: &str) -> bool { | fn is_colocated_asset_link(link: &str) -> bool { | ||||
!link.contains("/") // http://, ftp://, ../ etc | |||||
!link.contains('/') // http://, ftp://, ../ etc | |||||
&& !link.starts_with("mailto:") | && !link.starts_with("mailto:") | ||||
} | } | ||||
@@ -51,7 +55,8 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
// Set while parsing | // Set while parsing | ||||
let mut error = None; | let mut error = None; | ||||
let mut highlighter: Option<HighlightLines> = None; | |||||
let mut background = IncludeBackground::Yes; | |||||
let mut highlighter: Option<(HighlightLines, bool)> = None; | |||||
// If we get text in header, we need to insert the id and a anchor | // If we get text in header, we need to insert the id and a anchor | ||||
let mut in_header = false; | let mut in_header = false; | ||||
// pulldown_cmark can send several text events for a title if there are markdown | // pulldown_cmark can send several text events for a title if there are markdown | ||||
@@ -66,8 +71,8 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
let mut opts = Options::empty(); | let mut opts = Options::empty(); | ||||
let mut has_summary = false; | let mut has_summary = false; | ||||
opts.insert(OPTION_ENABLE_TABLES); | |||||
opts.insert(OPTION_ENABLE_FOOTNOTES); | |||||
opts.insert(Options::ENABLE_TABLES); | |||||
opts.insert(Options::ENABLE_FOOTNOTES); | |||||
{ | { | ||||
let parser = Parser::new_ext(content, opts).map(|event| { | let parser = Parser::new_ext(content, opts).map(|event| { | ||||
@@ -76,24 +81,28 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
// Header first | // Header first | ||||
if in_header { | if in_header { | ||||
if header_created { | if header_created { | ||||
temp_header.push(&text); | |||||
temp_header.add_text(&text); | |||||
return Event::Html(Borrowed("")); | return Event::Html(Borrowed("")); | ||||
} | } | ||||
let id = find_anchor(&anchors, slugify(&text), 0); | |||||
anchors.push(id.clone()); | |||||
// update the header and add it to the list | |||||
temp_header.permalink = format!("{}#{}", context.current_page_permalink, id); | |||||
temp_header.id = id; | |||||
// += as we might have some <code> or other things already there | // += as we might have some <code> or other things already there | ||||
temp_header.title += &text; | |||||
temp_header.add_text(&text); | |||||
header_created = true; | header_created = true; | ||||
return Event::Html(Borrowed("")); | return Event::Html(Borrowed("")); | ||||
} | } | ||||
// if we are in the middle of a code block | // if we are in the middle of a code block | ||||
if let Some(ref mut highlighter) = highlighter { | |||||
let highlighted = &highlighter.highlight(&text); | |||||
let html = styles_to_coloured_html(highlighted, IncludeBackground::Yes); | |||||
if let Some((ref mut highlighter, in_extra)) = highlighter { | |||||
let highlighted = if in_extra { | |||||
if let Some(ref extra) = context.config.extra_syntax_set { | |||||
highlighter.highlight(&text, &extra) | |||||
} else { | |||||
unreachable!("Got a highlighter from extra syntaxes but no extra?"); | |||||
} | |||||
} else { | |||||
highlighter.highlight(&text, &SYNTAX_SET) | |||||
}; | |||||
//let highlighted = &highlighter.highlight(&text, ss); | |||||
let html = styled_line_to_highlighted_html(&highlighted, background); | |||||
return Event::Html(Owned(html)); | return Event::Html(Owned(html)); | ||||
} | } | ||||
@@ -106,15 +115,13 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
} | } | ||||
let theme = &THEME_SET.themes[&context.config.highlight_theme]; | let theme = &THEME_SET.themes[&context.config.highlight_theme]; | ||||
match get_highlighter(&theme, info, context.base_path, &context.config.extra_syntaxes) { | |||||
Ok(h) => highlighter = Some(h), | |||||
Err(err) => { | |||||
error = Some(format!("Could not load syntax: {}", err).into()); | |||||
return Event::Html(Borrowed("")); | |||||
} | |||||
} | |||||
let snippet = start_coloured_html_snippet(theme); | |||||
Event::Html(Owned(snippet)) | |||||
highlighter = Some(get_highlighter(info, &context.config)); | |||||
// This selects the background color the same way that start_coloured_html_snippet does | |||||
let color = | |||||
theme.settings.background.unwrap_or(::syntect::highlighting::Color::WHITE); | |||||
background = IncludeBackground::IfDifferent(color); | |||||
let snippet = start_highlighted_html_snippet(theme); | |||||
Event::Html(Owned(snippet.0)) | |||||
} | } | ||||
Event::End(Tag::CodeBlock(_)) => { | Event::End(Tag::CodeBlock(_)) => { | ||||
if !context.config.highlight_code { | if !context.config.highlight_code { | ||||
@@ -126,12 +133,10 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
} | } | ||||
Event::Start(Tag::Image(src, title)) => { | Event::Start(Tag::Image(src, title)) => { | ||||
if is_colocated_asset_link(&src) { | if is_colocated_asset_link(&src) { | ||||
return Event::Start( | |||||
Tag::Image( | |||||
Owned(format!("{}{}", context.current_page_permalink, src)), | |||||
title, | |||||
) | |||||
); | |||||
return Event::Start(Tag::Image( | |||||
Owned(format!("{}{}", context.current_page_permalink, src)), | |||||
title, | |||||
)); | |||||
} | } | ||||
Event::Start(Tag::Image(src, title)) | Event::Start(Tag::Image(src, title)) | ||||
@@ -153,20 +158,21 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
} | } | ||||
} else if is_colocated_asset_link(&link) { | } else if is_colocated_asset_link(&link) { | ||||
format!("{}{}", context.current_page_permalink, link) | format!("{}{}", context.current_page_permalink, link) | ||||
} else { | |||||
if context.config.check_external_links && !link.starts_with('#') { | |||||
let res = check_url(&link); | |||||
if res.is_valid() { | |||||
link.to_string() | |||||
} else { | |||||
error = Some( | |||||
format!("Link {} is not valid: {}", link, res.message()).into() | |||||
); | |||||
String::new() | |||||
} | |||||
} else { | |||||
} else if context.config.check_external_links | |||||
&& !link.starts_with('#') | |||||
&& !link.starts_with("mailto:") | |||||
{ | |||||
let res = check_url(&link); | |||||
if res.is_valid() { | |||||
link.to_string() | link.to_string() | ||||
} else { | |||||
error = Some( | |||||
format!("Link {} is not valid: {}", link, res.message()).into(), | |||||
); | |||||
String::new() | |||||
} | } | ||||
} else { | |||||
link.to_string() | |||||
}; | }; | ||||
if in_header { | if in_header { | ||||
@@ -175,7 +181,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
} else { | } else { | ||||
format!("<a href=\"{}\" title=\"{}\">", fixed_link, title) | format!("<a href=\"{}\" title=\"{}\">", fixed_link, title) | ||||
}; | }; | ||||
temp_header.push(&html); | |||||
temp_header.add_html(&html); | |||||
return Event::Html(Borrowed("")); | return Event::Html(Borrowed("")); | ||||
} | } | ||||
@@ -183,21 +189,21 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
} | } | ||||
Event::End(Tag::Link(_, _)) => { | Event::End(Tag::Link(_, _)) => { | ||||
if in_header { | if in_header { | ||||
temp_header.push("</a>"); | |||||
temp_header.add_html("</a>"); | |||||
return Event::Html(Borrowed("")); | return Event::Html(Borrowed("")); | ||||
} | } | ||||
event | event | ||||
} | } | ||||
Event::Start(Tag::Code) => { | Event::Start(Tag::Code) => { | ||||
if in_header { | if in_header { | ||||
temp_header.push("<code>"); | |||||
temp_header.add_html("<code>"); | |||||
return Event::Html(Borrowed("")); | return Event::Html(Borrowed("")); | ||||
} | } | ||||
event | event | ||||
} | } | ||||
Event::End(Tag::Code) => { | Event::End(Tag::Code) => { | ||||
if in_header { | if in_header { | ||||
temp_header.push("</code>"); | |||||
temp_header.add_html("</code>"); | |||||
return Event::Html(Borrowed("")); | return Event::Html(Borrowed("")); | ||||
} | } | ||||
event | event | ||||
@@ -208,8 +214,13 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
Event::Html(Borrowed("")) | Event::Html(Borrowed("")) | ||||
} | } | ||||
Event::End(Tag::Header(_)) => { | Event::End(Tag::Header(_)) => { | ||||
// End of a header, reset all the things and return the stringified | |||||
// version of the header | |||||
// End of a header, reset all the things and return the header string | |||||
let id = find_anchor(&anchors, slugify(&temp_header.title), 0); | |||||
anchors.push(id.clone()); | |||||
temp_header.permalink = format!("{}#{}", context.current_page_permalink, id); | |||||
temp_header.id = id; | |||||
in_header = false; | in_header = false; | ||||
header_created = false; | header_created = false; | ||||
let val = temp_header.to_string(context.tera, context.insert_anchor); | let val = temp_header.to_string(context.tera, context.insert_anchor); | ||||
@@ -229,13 +240,12 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||||
} | } | ||||
if let Some(e) = error { | if let Some(e) = error { | ||||
return Err(e) | |||||
return Err(e); | |||||
} else { | } else { | ||||
html = html.replace("<p></p>", "").replace("</p></p>", "</p>"); | |||||
Ok(Rendered { | Ok(Rendered { | ||||
summary_len: if has_summary { html.find(CONTINUE_READING) } else { None }, | summary_len: if has_summary { html.find(CONTINUE_READING) } else { None }, | ||||
body: html, | body: html, | ||||
toc: make_table_of_contents(&headers) | |||||
toc: make_table_of_contents(&headers), | |||||
}) | }) | ||||
} | } | ||||
} | } |
@@ -1,9 +1,10 @@ | |||||
use pest::Parser; | |||||
use pest::iterators::Pair; | use pest::iterators::Pair; | ||||
use tera::{Map, Context, Value, to_value}; | |||||
use pest::Parser; | |||||
use tera::{to_value, Context, Map, Value}; | |||||
use regex::Regex; | |||||
use context::RenderContext; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
use ::context::RenderContext; | |||||
// This include forces recompiling this source file if the grammar file changes. | // This include forces recompiling this source file if the grammar file changes. | ||||
// Uncomment it when doing changes to the .pest file | // Uncomment it when doing changes to the .pest file | ||||
@@ -13,6 +14,9 @@ const _GRAMMAR: &str = include_str!("content.pest"); | |||||
#[grammar = "content.pest"] | #[grammar = "content.pest"] | ||||
pub struct ContentParser; | pub struct ContentParser; | ||||
lazy_static! { | |||||
static ref MULTIPLE_NEWLINE_RE: Regex = Regex::new(r"\n\s*\n").unwrap(); | |||||
} | |||||
fn replace_string_markers(input: &str) -> String { | fn replace_string_markers(input: &str) -> String { | ||||
match input.chars().next().unwrap() { | match input.chars().next().unwrap() { | ||||
@@ -39,7 +43,7 @@ fn parse_literal(pair: Pair<Rule>) -> Value { | |||||
Rule::int => { | Rule::int => { | ||||
val = Some(to_value(p.as_str().parse::<i64>().unwrap()).unwrap()); | val = Some(to_value(p.as_str().parse::<i64>().unwrap()).unwrap()); | ||||
} | } | ||||
_ => unreachable!("Unknown literal: {:?}", p) | |||||
_ => unreachable!("Unknown literal: {:?}", p), | |||||
}; | }; | ||||
} | } | ||||
@@ -53,20 +57,29 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) { | |||||
for p in pair.into_inner() { | for p in pair.into_inner() { | ||||
match p.as_rule() { | match p.as_rule() { | ||||
Rule::ident => { name = Some(p.into_span().as_str().to_string()); } | |||||
Rule::ident => { | |||||
name = Some(p.into_span().as_str().to_string()); | |||||
} | |||||
Rule::kwarg => { | Rule::kwarg => { | ||||
let mut arg_name = None; | let mut arg_name = None; | ||||
let mut arg_val = None; | let mut arg_val = None; | ||||
for p2 in p.into_inner() { | for p2 in p.into_inner() { | ||||
match p2.as_rule() { | match p2.as_rule() { | ||||
Rule::ident => { arg_name = Some(p2.into_span().as_str().to_string()); } | |||||
Rule::literal => { arg_val = Some(parse_literal(p2)); } | |||||
Rule::ident => { | |||||
arg_name = Some(p2.into_span().as_str().to_string()); | |||||
} | |||||
Rule::literal => { | |||||
arg_val = Some(parse_literal(p2)); | |||||
} | |||||
Rule::array => { | Rule::array => { | ||||
let mut vals = vec![]; | let mut vals = vec![]; | ||||
for p3 in p2.into_inner() { | for p3 in p2.into_inner() { | ||||
match p3.as_rule() { | match p3.as_rule() { | ||||
Rule::literal => vals.push(parse_literal(p3)), | Rule::literal => vals.push(parse_literal(p3)), | ||||
_ => unreachable!("Got something other than literal in an array: {:?}", p3), | |||||
_ => unreachable!( | |||||
"Got something other than literal in an array: {:?}", | |||||
p3 | |||||
), | |||||
} | } | ||||
} | } | ||||
arg_val = Some(Value::Array(vals)); | arg_val = Some(Value::Array(vals)); | ||||
@@ -77,14 +90,18 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) { | |||||
args.insert(arg_name.unwrap(), arg_val.unwrap()); | args.insert(arg_name.unwrap(), arg_val.unwrap()); | ||||
} | } | ||||
_ => unreachable!("Got something unexpected in a shortcode: {:?}", p) | |||||
_ => unreachable!("Got something unexpected in a shortcode: {:?}", p), | |||||
} | } | ||||
} | } | ||||
(name.unwrap(), args) | (name.unwrap(), args) | ||||
} | } | ||||
fn render_shortcode(name: String, args: Map<String, Value>, context: &RenderContext, body: Option<&str>) -> Result<String> { | |||||
fn render_shortcode( | |||||
name: &str, | |||||
args: &Map<String, Value>, | |||||
context: &RenderContext, | |||||
body: Option<&str>, | |||||
) -> Result<String> { | |||||
let mut tera_context = Context::new(); | let mut tera_context = Context::new(); | ||||
for (key, value) in args.iter() { | for (key, value) in args.iter() { | ||||
tera_context.insert(key, value); | tera_context.insert(key, value); | ||||
@@ -96,13 +113,17 @@ fn render_shortcode(name: String, args: Map<String, Value>, context: &RenderCont | |||||
tera_context.extend(context.tera_context.clone()); | tera_context.extend(context.tera_context.clone()); | ||||
let tpl_name = format!("shortcodes/{}.html", name); | let tpl_name = format!("shortcodes/{}.html", name); | ||||
let res = context.tera | |||||
let res = context | |||||
.tera | |||||
.render(&tpl_name, &tera_context) | .render(&tpl_name, &tera_context) | ||||
.chain_err(|| format!("Failed to render {} shortcode", name))?; | .chain_err(|| format!("Failed to render {} shortcode", name))?; | ||||
// We trim left every single line of a shortcode to avoid the accidental | |||||
// shortcode counted as code block because of 4 spaces left padding | |||||
Ok(res.lines().map(|s| s.trim_left()).collect()) | |||||
// Small hack to avoid having multiple blank lines because of Tera tags for example | |||||
// A blank like will cause the markdown parser to think we're out of HTML and start looking | |||||
// at indentation, making the output a code block. | |||||
let res = MULTIPLE_NEWLINE_RE.replace_all(&res, "\n"); | |||||
Ok(res.to_string()) | |||||
} | } | ||||
pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<String> { | pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<String> { | ||||
@@ -111,22 +132,36 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||||
let mut pairs = match ContentParser::parse(Rule::page, content) { | let mut pairs = match ContentParser::parse(Rule::page, content) { | ||||
Ok(p) => p, | Ok(p) => p, | ||||
Err(e) => { | Err(e) => { | ||||
let fancy_e = e.renamed_rules(|rule| { | |||||
match *rule { | |||||
Rule::int => "an integer".to_string(), | |||||
Rule::float => "a float".to_string(), | |||||
Rule::string => "a string".to_string(), | |||||
Rule::literal => "a literal (int, float, string, bool)".to_string(), | |||||
Rule::array => "an array".to_string(), | |||||
Rule::kwarg => "a keyword argument".to_string(), | |||||
Rule::ident => "an identifier".to_string(), | |||||
Rule::inline_shortcode => "an inline shortcode".to_string(), | |||||
Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(), | |||||
Rule::sc_body_start => "the start of a shortcode".to_string(), | |||||
Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(), | |||||
Rule::text => "some text".to_string(), | |||||
_ => format!("TODO error: {:?}", rule).to_string(), | |||||
} | |||||
let fancy_e = e.renamed_rules(|rule| match *rule { | |||||
Rule::int => "an integer".to_string(), | |||||
Rule::float => "a float".to_string(), | |||||
Rule::string => "a string".to_string(), | |||||
Rule::literal => "a literal (int, float, string, bool)".to_string(), | |||||
Rule::array => "an array".to_string(), | |||||
Rule::kwarg => "a keyword argument".to_string(), | |||||
Rule::ident => "an identifier".to_string(), | |||||
Rule::inline_shortcode => "an inline shortcode".to_string(), | |||||
Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(), | |||||
Rule::sc_body_start => "the start of a shortcode".to_string(), | |||||
Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(), | |||||
Rule::text => "some text".to_string(), | |||||
Rule::EOI => "end of input".to_string(), | |||||
Rule::double_quoted_string => "double quoted string".to_string(), | |||||
Rule::single_quoted_string => "single quoted string".to_string(), | |||||
Rule::backquoted_quoted_string => "backquoted quoted string".to_string(), | |||||
Rule::boolean => "a boolean (true, false)".to_string(), | |||||
Rule::all_chars => "a alphanumerical character".to_string(), | |||||
Rule::kwargs => "a list of keyword arguments".to_string(), | |||||
Rule::sc_def => "a shortcode definition".to_string(), | |||||
Rule::shortcode_with_body => "a shortcode with body".to_string(), | |||||
Rule::ignored_shortcode_with_body => "an ignored shortcode with body".to_string(), | |||||
Rule::sc_body_end => "{% end %}".to_string(), | |||||
Rule::ignored_sc_body_end => "{%/* end */%}".to_string(), | |||||
Rule::text_in_body_sc => "text in a shortcode body".to_string(), | |||||
Rule::text_in_ignored_body_sc => "text in an ignored shortcode body".to_string(), | |||||
Rule::content => "some content".to_string(), | |||||
Rule::page => "a page".to_string(), | |||||
Rule::WHITESPACE => "whitespace".to_string(), | |||||
}); | }); | ||||
bail!("{}", fancy_e); | bail!("{}", fancy_e); | ||||
} | } | ||||
@@ -135,10 +170,10 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||||
// We have at least a `page` pair | // We have at least a `page` pair | ||||
for p in pairs.next().unwrap().into_inner() { | for p in pairs.next().unwrap().into_inner() { | ||||
match p.as_rule() { | match p.as_rule() { | ||||
Rule::text | Rule::text_in_ignored_body_sc | Rule::text_in_body_sc => res.push_str(p.into_span().as_str()), | |||||
Rule::text => res.push_str(p.into_span().as_str()), | |||||
Rule::inline_shortcode => { | Rule::inline_shortcode => { | ||||
let (name, args) = parse_shortcode_call(p); | let (name, args) = parse_shortcode_call(p); | ||||
res.push_str(&render_shortcode(name, args, context, None)?); | |||||
res.push_str(&render_shortcode(&name, &args, context, None)?); | |||||
} | } | ||||
Rule::shortcode_with_body => { | Rule::shortcode_with_body => { | ||||
let mut inner = p.into_inner(); | let mut inner = p.into_inner(); | ||||
@@ -146,13 +181,11 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||||
// we don't care about the closing tag | // we don't care about the closing tag | ||||
let (name, args) = parse_shortcode_call(inner.next().unwrap()); | let (name, args) = parse_shortcode_call(inner.next().unwrap()); | ||||
let body = inner.next().unwrap().into_span().as_str(); | let body = inner.next().unwrap().into_span().as_str(); | ||||
res.push_str(&render_shortcode(name, args, context, Some(body))?); | |||||
res.push_str(&render_shortcode(&name, &args, context, Some(body))?); | |||||
} | } | ||||
Rule::ignored_inline_shortcode => { | Rule::ignored_inline_shortcode => { | ||||
res.push_str( | res.push_str( | ||||
&p.into_span().as_str() | |||||
.replacen("{{/*", "{{", 1) | |||||
.replacen("*/}}", "}}", 1) | |||||
&p.into_span().as_str().replacen("{{/*", "{{", 1).replacen("*/}}", "}}", 1), | |||||
); | ); | ||||
} | } | ||||
Rule::ignored_shortcode_with_body => { | Rule::ignored_shortcode_with_body => { | ||||
@@ -160,9 +193,10 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||||
match p2.as_rule() { | match p2.as_rule() { | ||||
Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => { | Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => { | ||||
res.push_str( | res.push_str( | ||||
&p2.into_span().as_str() | |||||
&p2.into_span() | |||||
.as_str() | |||||
.replacen("{%/*", "{%", 1) | .replacen("{%/*", "{%", 1) | ||||
.replacen("*/%}", "%}", 1) | |||||
.replacen("*/%}", "%}", 1), | |||||
); | ); | ||||
} | } | ||||
Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()), | Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()), | ||||
@@ -170,6 +204,7 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||||
} | } | ||||
} | } | ||||
} | } | ||||
Rule::EOI => (), | |||||
_ => unreachable!("unexpected page rule: {:?}", p.as_rule()), | _ => unreachable!("unexpected page rule: {:?}", p.as_rule()), | ||||
} | } | ||||
} | } | ||||
@@ -180,12 +215,11 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::Path; | |||||
use tera::Tera; | |||||
use super::*; | |||||
use config::Config; | use config::Config; | ||||
use front_matter::InsertAnchor; | use front_matter::InsertAnchor; | ||||
use super::*; | |||||
use tera::Tera; | |||||
macro_rules! assert_lex_rule { | macro_rules! assert_lex_rule { | ||||
($rule: expr, $input: expr) => { | ($rule: expr, $input: expr) => { | ||||
@@ -204,7 +238,7 @@ mod tests { | |||||
fn render_shortcodes(code: &str, tera: &Tera) -> String { | fn render_shortcodes(code: &str, tera: &Tera) -> String { | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let permalinks = HashMap::new(); | let permalinks = HashMap::new(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks, InsertAnchor::None); | |||||
super::render_shortcodes(code, &context).unwrap() | super::render_shortcodes(code, &context).unwrap() | ||||
} | } | ||||
@@ -283,7 +317,7 @@ mod tests { | |||||
{% hello() %} | {% hello() %} | ||||
Body {{ var }} | Body {{ var }} | ||||
{% end %} | {% end %} | ||||
"# | |||||
"#, | |||||
]; | ]; | ||||
for i in inputs { | for i in inputs { | ||||
assert_lex_rule!(Rule::page, i); | assert_lex_rule!(Rule::page, i); | ||||
@@ -304,38 +338,46 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_unignore_shortcode_with_body() { | fn can_unignore_shortcode_with_body() { | ||||
let res = render_shortcodes(r#" | |||||
let res = render_shortcodes( | |||||
r#" | |||||
Hello World | Hello World | ||||
{%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#, &Tera::default()); | |||||
{%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#, | |||||
&Tera::default(), | |||||
); | |||||
assert_eq!(res, "\nHello World\n{% youtube() %}Some body {{ hello() }}{% end %}"); | assert_eq!(res, "\nHello World\n{% youtube() %}Some body {{ hello() }}{% end %}"); | ||||
} | } | ||||
// https://github.com/Keats/gutenberg/issues/383 | |||||
#[test] | |||||
fn unignore_shortcode_with_body_does_not_swallow_initial_whitespace() { | |||||
let res = render_shortcodes( | |||||
r#" | |||||
Hello World | |||||
{%/* youtube() */%} | |||||
Some body {{ hello() }}{%/* end */%}"#, | |||||
&Tera::default(), | |||||
); | |||||
assert_eq!(res, "\nHello World\n{% youtube() %}\nSome body {{ hello() }}{% end %}"); | |||||
} | |||||
#[test] | #[test] | ||||
fn can_parse_shortcode_arguments() { | fn can_parse_shortcode_arguments() { | ||||
let inputs = vec![ | let inputs = vec![ | ||||
("{{ youtube() }}", "youtube", Map::new()), | ("{{ youtube() }}", "youtube", Map::new()), | ||||
( | |||||
"{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}", | |||||
"youtube", | |||||
{ | |||||
let mut m = Map::new(); | |||||
m.insert("id".to_string(), to_value(1).unwrap()); | |||||
m.insert("autoplay".to_string(), to_value(true).unwrap()); | |||||
m.insert("hello".to_string(), to_value("salut").unwrap()); | |||||
m.insert("float".to_string(), to_value(1.2).unwrap()); | |||||
m | |||||
} | |||||
), | |||||
( | |||||
"{{ gallery(photos=['something', 'else'], fullscreen=true) }}", | |||||
"gallery", | |||||
{ | |||||
let mut m = Map::new(); | |||||
m.insert("photos".to_string(), to_value(["something", "else"]).unwrap()); | |||||
m.insert("fullscreen".to_string(), to_value(true).unwrap()); | |||||
m | |||||
} | |||||
), | |||||
("{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}", "youtube", { | |||||
let mut m = Map::new(); | |||||
m.insert("id".to_string(), to_value(1).unwrap()); | |||||
m.insert("autoplay".to_string(), to_value(true).unwrap()); | |||||
m.insert("hello".to_string(), to_value("salut").unwrap()); | |||||
m.insert("float".to_string(), to_value(1.2).unwrap()); | |||||
m | |||||
}), | |||||
("{{ gallery(photos=['something', 'else'], fullscreen=true) }}", "gallery", { | |||||
let mut m = Map::new(); | |||||
m.insert("photos".to_string(), to_value(["something", "else"]).unwrap()); | |||||
m.insert("fullscreen".to_string(), to_value(true).unwrap()); | |||||
m | |||||
}), | |||||
]; | ]; | ||||
for (i, n, a) in inputs { | for (i, n, a) in inputs { | ||||
@@ -361,4 +403,13 @@ Hello World | |||||
let res = render_shortcodes("Body\n {% youtube() %}Hey!{% end %}", &tera); | let res = render_shortcodes("Body\n {% youtube() %}Hey!{% end %}", &tera); | ||||
assert_eq!(res, "Body\n Hey!"); | assert_eq!(res, "Body\n Hey!"); | ||||
} | } | ||||
// https://github.com/Keats/gutenberg/issues/462 | |||||
#[test] | |||||
fn shortcodes_with_body_do_not_eat_newlines() { | |||||
let mut tera = Tera::default(); | |||||
tera.add_raw_template("shortcodes/youtube.html", "{{body | safe}}").unwrap(); | |||||
let res = render_shortcodes("Body\n {% youtube() %}\nHello \n World{% end %}", &tera); | |||||
assert_eq!(res, "Body\n Hello \n World"); | |||||
} | |||||
} | } |
@@ -1,6 +1,5 @@ | |||||
use tera::{Tera, Context as TeraContext}; | |||||
use front_matter::InsertAnchor; | use front_matter::InsertAnchor; | ||||
use tera::{Context as TeraContext, Tera}; | |||||
#[derive(Debug, PartialEq, Clone, Serialize)] | #[derive(Debug, PartialEq, Clone, Serialize)] | ||||
pub struct Header { | pub struct Header { | ||||
@@ -31,6 +30,7 @@ pub struct TempHeader { | |||||
pub id: String, | pub id: String, | ||||
pub permalink: String, | pub permalink: String, | ||||
pub title: String, | pub title: String, | ||||
pub html: String, | |||||
} | } | ||||
impl TempHeader { | impl TempHeader { | ||||
@@ -40,10 +40,16 @@ impl TempHeader { | |||||
id: String::new(), | id: String::new(), | ||||
permalink: String::new(), | permalink: String::new(), | ||||
title: String::new(), | title: String::new(), | ||||
html: String::new(), | |||||
} | } | ||||
} | } | ||||
pub fn push(&mut self, val: &str) { | |||||
pub fn add_html(&mut self, val: &str) { | |||||
self.html += val; | |||||
} | |||||
pub fn add_text(&mut self, val: &str) { | |||||
self.html += val; | |||||
self.title += val; | self.title += val; | ||||
} | } | ||||
@@ -51,16 +57,33 @@ impl TempHeader { | |||||
pub fn to_string(&self, tera: &Tera, insert_anchor: InsertAnchor) -> String { | pub fn to_string(&self, tera: &Tera, insert_anchor: InsertAnchor) -> String { | ||||
let anchor_link = if insert_anchor != InsertAnchor::None { | let anchor_link = if insert_anchor != InsertAnchor::None { | ||||
let mut c = TeraContext::new(); | let mut c = TeraContext::new(); | ||||
c.add("id", &self.id); | |||||
c.insert("id", &self.id); | |||||
tera.render("anchor-link.html", &c).unwrap() | tera.render("anchor-link.html", &c).unwrap() | ||||
} else { | } else { | ||||
String::new() | String::new() | ||||
}; | }; | ||||
match insert_anchor { | match insert_anchor { | ||||
InsertAnchor::None => format!("<h{lvl} id=\"{id}\">{t}</h{lvl}>\n", lvl = self.level, t = self.title, id = self.id), | |||||
InsertAnchor::Left => format!("<h{lvl} id=\"{id}\">{a}{t}</h{lvl}>\n", lvl = self.level, a = anchor_link, t = self.title, id = self.id), | |||||
InsertAnchor::Right => format!("<h{lvl} id=\"{id}\">{t}{a}</h{lvl}>\n", lvl = self.level, a = anchor_link, t = self.title, id = self.id), | |||||
InsertAnchor::None => format!( | |||||
"<h{lvl} id=\"{id}\">{t}</h{lvl}>\n", | |||||
lvl = self.level, | |||||
t = self.html, | |||||
id = self.id | |||||
), | |||||
InsertAnchor::Left => format!( | |||||
"<h{lvl} id=\"{id}\">{a}{t}</h{lvl}>\n", | |||||
lvl = self.level, | |||||
a = anchor_link, | |||||
t = self.html, | |||||
id = self.id | |||||
), | |||||
InsertAnchor::Right => format!( | |||||
"<h{lvl} id=\"{id}\">{t}{a}</h{lvl}>\n", | |||||
lvl = self.level, | |||||
a = anchor_link, | |||||
t = self.html, | |||||
id = self.id | |||||
), | |||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -71,9 +94,12 @@ impl Default for TempHeader { | |||||
} | } | ||||
} | } | ||||
/// Recursively finds children of a header | /// Recursively finds children of a header | ||||
fn find_children(parent_level: i32, start_at: usize, temp_headers: &[TempHeader]) -> (usize, Vec<Header>) { | |||||
fn find_children( | |||||
parent_level: i32, | |||||
start_at: usize, | |||||
temp_headers: &[TempHeader], | |||||
) -> (usize, Vec<Header>) { | |||||
let mut headers = vec![]; | let mut headers = vec![]; | ||||
let mut start_at = start_at; | let mut start_at = start_at; | ||||
@@ -117,7 +143,6 @@ fn find_children(parent_level: i32, start_at: usize, temp_headers: &[TempHeader] | |||||
(start_at, headers) | (start_at, headers) | ||||
} | } | ||||
/// Converts the flat temp headers into a nested set of headers | /// Converts the flat temp headers into a nested set of headers | ||||
/// representing the hierarchy | /// representing the hierarchy | ||||
pub fn make_table_of_contents(temp_headers: &[TempHeader]) -> Vec<Header> { | pub fn make_table_of_contents(temp_headers: &[TempHeader]) -> Vec<Header> { | ||||
@@ -141,11 +166,7 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_make_basic_toc() { | fn can_make_basic_toc() { | ||||
let input = vec![ | |||||
TempHeader::new(1), | |||||
TempHeader::new(1), | |||||
TempHeader::new(1), | |||||
]; | |||||
let input = vec![TempHeader::new(1), TempHeader::new(1), TempHeader::new(1)]; | |||||
let toc = make_table_of_contents(&input); | let toc = make_table_of_contents(&input); | ||||
assert_eq!(toc.len(), 3); | assert_eq!(toc.len(), 3); | ||||
} | } | ||||
@@ -1,26 +1,24 @@ | |||||
extern crate tera; | |||||
extern crate config; | |||||
extern crate front_matter; | extern crate front_matter; | ||||
extern crate templates; | |||||
extern crate rendering; | extern crate rendering; | ||||
extern crate config; | |||||
extern crate templates; | |||||
extern crate tera; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::Path; | |||||
use tera::Tera; | use tera::Tera; | ||||
use config::Config; | use config::Config; | ||||
use front_matter::InsertAnchor; | use front_matter::InsertAnchor; | ||||
use templates::GUTENBERG_TERA; | |||||
use rendering::{RenderContext, render_content}; | |||||
use rendering::{render_content, RenderContext}; | |||||
use templates::ZOLA_TERA; | |||||
#[test] | #[test] | ||||
fn can_do_render_content_simple() { | fn can_do_render_content_simple() { | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("hello", &context).unwrap(); | let res = render_content("hello", &context).unwrap(); | ||||
assert_eq!(res.body, "<p>hello</p>\n"); | assert_eq!(res.body, "<p>hello</p>\n"); | ||||
} | } | ||||
@@ -31,24 +29,22 @@ fn doesnt_highlight_code_block_with_highlighting_off() { | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.highlight_code = false; | config.highlight_code = false; | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("```\n$ gutenberg server\n```", &context).unwrap(); | let res = render_content("```\n$ gutenberg server\n```", &context).unwrap(); | ||||
assert_eq!( | |||||
res.body, | |||||
"<pre><code>$ gutenberg server\n</code></pre>\n" | |||||
); | |||||
assert_eq!(res.body, "<pre><code>$ gutenberg server\n</code></pre>\n"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_highlight_code_block_no_lang() { | fn can_highlight_code_block_no_lang() { | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let mut config = Config::default(); | |||||
config.highlight_code = true; | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("```\n$ gutenberg server\n$ ping\n```", &context).unwrap(); | let res = render_content("```\n$ gutenberg server\n$ ping\n```", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<pre style=\"background-color:#2b303b\">\n<span style=\"background-color:#2b303b;color:#c0c5ce;\">$ gutenberg server\n</span><span style=\"background-color:#2b303b;color:#c0c5ce;\">$ ping\n</span></pre>" | |||||
"<pre style=\"background-color:#2b303b;\">\n<span style=\"color:#c0c5ce;\">$ gutenberg server\n</span><span style=\"color:#c0c5ce;\">$ ping\n</span></pre>" | |||||
); | ); | ||||
} | } | ||||
@@ -56,12 +52,13 @@ fn can_highlight_code_block_no_lang() { | |||||
fn can_highlight_code_block_with_lang() { | fn can_highlight_code_block_with_lang() { | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let mut config = Config::default(); | |||||
config.highlight_code = true; | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("```python\nlist.append(1)\n```", &context).unwrap(); | let res = render_content("```python\nlist.append(1)\n```", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<pre style=\"background-color:#2b303b\">\n<span style=\"background-color:#2b303b;color:#c0c5ce;\">list.</span><span style=\"background-color:#2b303b;color:#bf616a;\">append</span><span style=\"background-color:#2b303b;color:#c0c5ce;\">(</span><span style=\"background-color:#2b303b;color:#d08770;\">1</span><span style=\"background-color:#2b303b;color:#c0c5ce;\">)\n</span></pre>" | |||||
"<pre style=\"background-color:#2b303b;\">\n<span style=\"color:#c0c5ce;\">list.</span><span style=\"color:#bf616a;\">append</span><span style=\"color:#c0c5ce;\">(</span><span style=\"color:#d08770;\">1</span><span style=\"color:#c0c5ce;\">)\n</span></pre>" | |||||
); | ); | ||||
} | } | ||||
@@ -69,13 +66,14 @@ fn can_highlight_code_block_with_lang() { | |||||
fn can_higlight_code_block_with_unknown_lang() { | fn can_higlight_code_block_with_unknown_lang() { | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let mut config = Config::default(); | |||||
config.highlight_code = true; | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("```yolo\nlist.append(1)\n```", &context).unwrap(); | let res = render_content("```yolo\nlist.append(1)\n```", &context).unwrap(); | ||||
// defaults to plain text | // defaults to plain text | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<pre style=\"background-color:#2b303b\">\n<span style=\"background-color:#2b303b;color:#c0c5ce;\">list.append(1)\n</span></pre>" | |||||
"<pre style=\"background-color:#2b303b;\">\n<span style=\"color:#c0c5ce;\">list.append(1)\n</span></pre>" | |||||
); | ); | ||||
} | } | ||||
@@ -83,12 +81,16 @@ fn can_higlight_code_block_with_unknown_lang() { | |||||
fn can_render_shortcode() { | fn can_render_shortcode() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content(r#" | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content( | |||||
r#" | |||||
Hello | Hello | ||||
{{ youtube(id="ub36ffWAqgQ") }} | {{ youtube(id="ub36ffWAqgQ") }} | ||||
"#, &context).unwrap(); | |||||
"#, | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
assert!(res.body.contains("<p>Hello</p>\n<div >")); | assert!(res.body.contains("<p>Hello</p>\n<div >")); | ||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#)); | assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#)); | ||||
} | } | ||||
@@ -97,15 +99,11 @@ Hello | |||||
fn can_render_shortcode_with_markdown_char_in_args_name() { | fn can_render_shortcode_with_markdown_char_in_args_name() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let input = vec![ | |||||
"name", | |||||
"na_me", | |||||
"n_a_me", | |||||
"n1", | |||||
]; | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let input = vec!["name", "na_me", "n_a_me", "n1"]; | |||||
for i in input { | for i in input { | ||||
let res = render_content(&format!("{{{{ youtube(id=\"hey\", {}=1) }}}}", i), &context).unwrap(); | |||||
let res = | |||||
render_content(&format!("{{{{ youtube(id=\"hey\", {}=1) }}}}", i), &context).unwrap(); | |||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/hey""#)); | assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/hey""#)); | ||||
} | } | ||||
} | } | ||||
@@ -114,7 +112,7 @@ fn can_render_shortcode_with_markdown_char_in_args_name() { | |||||
fn can_render_shortcode_with_markdown_char_in_args_value() { | fn can_render_shortcode_with_markdown_char_in_args_value() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let input = vec![ | let input = vec![ | ||||
"ub36ffWAqgQ-hey", | "ub36ffWAqgQ-hey", | ||||
"ub36ffWAqgQ_hey", | "ub36ffWAqgQ_hey", | ||||
@@ -124,7 +122,9 @@ fn can_render_shortcode_with_markdown_char_in_args_value() { | |||||
]; | ]; | ||||
for i in input { | for i in input { | ||||
let res = render_content(&format!("{{{{ youtube(id=\"{}\") }}}}", i), &context).unwrap(); | let res = render_content(&format!("{{{{ youtube(id=\"{}\") }}}}", i), &context).unwrap(); | ||||
assert!(res.body.contains(&format!(r#"<iframe src="https://www.youtube.com/embed/{}""#, i))); | |||||
assert!(res | |||||
.body | |||||
.contains(&format!(r#"<iframe src="https://www.youtube.com/embed/{}""#, i))); | |||||
} | } | ||||
} | } | ||||
@@ -132,18 +132,21 @@ fn can_render_shortcode_with_markdown_char_in_args_value() { | |||||
fn can_render_body_shortcode_with_markdown_char_in_name() { | fn can_render_body_shortcode_with_markdown_char_in_name() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut tera = Tera::default(); | let mut tera = Tera::default(); | ||||
tera.extend(&GUTENBERG_TERA).unwrap(); | |||||
let input = vec![ | |||||
"quo_te", | |||||
"qu_o_te", | |||||
]; | |||||
tera.extend(&ZOLA_TERA).unwrap(); | |||||
let input = vec!["quo_te", "qu_o_te"]; | |||||
let config = Config::default(); | let config = Config::default(); | ||||
for i in input { | for i in input { | ||||
tera.add_raw_template(&format!("shortcodes/{}.html", i), "<blockquote>{{ body }} - {{ author}}</blockquote>").unwrap(); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content(&format!("{{% {}(author=\"Bob\") %}}\nhey\n{{% end %}}", i), &context).unwrap(); | |||||
tera.add_raw_template( | |||||
&format!("shortcodes/{}.html", i), | |||||
"<blockquote>{{ body }} - {{ author}}</blockquote>", | |||||
) | |||||
.unwrap(); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = | |||||
render_content(&format!("{{% {}(author=\"Bob\") %}}\nhey\n{{% end %}}", i), &context) | |||||
.unwrap(); | |||||
println!("{:?}", res); | println!("{:?}", res); | ||||
assert!(res.body.contains("<blockquote>hey - Bob</blockquote>")); | assert!(res.body.contains("<blockquote>hey - Bob</blockquote>")); | ||||
} | } | ||||
@@ -153,7 +156,7 @@ fn can_render_body_shortcode_with_markdown_char_in_name() { | |||||
fn can_render_body_shortcode_and_paragraph_after() { | fn can_render_body_shortcode_and_paragraph_after() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut tera = Tera::default(); | let mut tera = Tera::default(); | ||||
tera.extend(&GUTENBERG_TERA).unwrap(); | |||||
tera.extend(&ZOLA_TERA).unwrap(); | |||||
let shortcode = "<p>{{ body }}</p>"; | let shortcode = "<p>{{ body }}</p>"; | ||||
let markdown_string = r#" | let markdown_string = r#" | ||||
@@ -170,7 +173,7 @@ Here is another paragraph. | |||||
tera.add_raw_template(&format!("shortcodes/{}.html", "figure"), shortcode).unwrap(); | tera.add_raw_template(&format!("shortcodes/{}.html", "figure"), shortcode).unwrap(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content(markdown_string, &context).unwrap(); | let res = render_content(markdown_string, &context).unwrap(); | ||||
println!("{:?}", res); | println!("{:?}", res); | ||||
@@ -181,7 +184,7 @@ Here is another paragraph. | |||||
fn can_render_two_body_shortcode_and_paragraph_after_with_line_break_between() { | fn can_render_two_body_shortcode_and_paragraph_after_with_line_break_between() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut tera = Tera::default(); | let mut tera = Tera::default(); | ||||
tera.extend(&GUTENBERG_TERA).unwrap(); | |||||
tera.extend(&ZOLA_TERA).unwrap(); | |||||
let shortcode = "<p>{{ body }}</p>"; | let shortcode = "<p>{{ body }}</p>"; | ||||
let markdown_string = r#" | let markdown_string = r#" | ||||
@@ -203,7 +206,7 @@ Here is another paragraph. | |||||
tera.add_raw_template(&format!("shortcodes/{}.html", "figure"), shortcode).unwrap(); | tera.add_raw_template(&format!("shortcodes/{}.html", "figure"), shortcode).unwrap(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content(markdown_string, &context).unwrap(); | let res = render_content(markdown_string, &context).unwrap(); | ||||
println!("{:?}", res); | println!("{:?}", res); | ||||
@@ -214,8 +217,9 @@ Here is another paragraph. | |||||
fn can_render_several_shortcode_in_row() { | fn can_render_several_shortcode_in_row() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content(r#" | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content( | |||||
r#" | |||||
Hello | Hello | ||||
{{ youtube(id="ub36ffWAqgQ") }} | {{ youtube(id="ub36ffWAqgQ") }} | ||||
@@ -228,10 +232,15 @@ Hello | |||||
{{ gist(url="https://gist.github.com/Keats/32d26f699dcc13ebd41b") }} | {{ gist(url="https://gist.github.com/Keats/32d26f699dcc13ebd41b") }} | ||||
"#, &context).unwrap(); | |||||
"#, | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
assert!(res.body.contains("<p>Hello</p>\n<div >")); | assert!(res.body.contains("<p>Hello</p>\n<div >")); | ||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#)); | assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#)); | ||||
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ?autoplay=1""#)); | |||||
assert!(res | |||||
.body | |||||
.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ?autoplay=1""#)); | |||||
assert!(res.body.contains(r#"<iframe src="https://www.streamable.com/e/c0ic""#)); | assert!(res.body.contains(r#"<iframe src="https://www.streamable.com/e/c0ic""#)); | ||||
assert!(res.body.contains(r#"//player.vimeo.com/video/210073083""#)); | assert!(res.body.contains(r#"//player.vimeo.com/video/210073083""#)); | ||||
} | } | ||||
@@ -241,7 +250,7 @@ fn doesnt_render_ignored_shortcodes() { | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.highlight_code = false; | config.highlight_code = false; | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content(r#"```{{/* youtube(id="w7Ft2ymGmfc") */}}```"#, &context).unwrap(); | let res = render_content(r#"```{{/* youtube(id="w7Ft2ymGmfc") */}}```"#, &context).unwrap(); | ||||
assert_eq!(res.body, "<p><code>{{ youtube(id="w7Ft2ymGmfc") }}</code></p>\n"); | assert_eq!(res.body, "<p><code>{{ youtube(id="w7Ft2ymGmfc") }}</code></p>\n"); | ||||
} | } | ||||
@@ -249,18 +258,26 @@ fn doesnt_render_ignored_shortcodes() { | |||||
#[test] | #[test] | ||||
fn can_render_shortcode_with_body() { | fn can_render_shortcode_with_body() { | ||||
let mut tera = Tera::default(); | let mut tera = Tera::default(); | ||||
tera.extend(&GUTENBERG_TERA).unwrap(); | |||||
tera.add_raw_template("shortcodes/quote.html", "<blockquote>{{ body }} - {{ author }}</blockquote>").unwrap(); | |||||
tera.extend(&ZOLA_TERA).unwrap(); | |||||
tera.add_raw_template( | |||||
"shortcodes/quote.html", | |||||
"<blockquote>{{ body }} - {{ author }}</blockquote>", | |||||
) | |||||
.unwrap(); | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content(r#" | |||||
let res = render_content( | |||||
r#" | |||||
Hello | Hello | ||||
{% quote(author="Keats") %} | {% quote(author="Keats") %} | ||||
A quote | A quote | ||||
{% end %} | {% end %} | ||||
"#, &context).unwrap(); | |||||
"#, | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
assert_eq!(res.body, "<p>Hello</p>\n<blockquote>A quote - Keats</blockquote>\n"); | assert_eq!(res.body, "<p>Hello</p>\n<blockquote>A quote - Keats</blockquote>\n"); | ||||
} | } | ||||
@@ -269,7 +286,7 @@ fn errors_rendering_unknown_shortcode() { | |||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("{{ hello(flash=true) }}", &context); | let res = render_content("{{ hello(flash=true) }}", &context); | ||||
assert!(res.is_err()); | assert!(res.is_err()); | ||||
} | } | ||||
@@ -280,11 +297,12 @@ fn can_make_valid_relative_link() { | |||||
permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about".to_string()); | permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about".to_string()); | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None); | |||||
let res = render_content( | let res = render_content( | ||||
r#"[rel link](./pages/about.md), [abs link](https://vincent.is/about)"#, | r#"[rel link](./pages/about.md), [abs link](https://vincent.is/about)"#, | ||||
&context, | &context, | ||||
).unwrap(); | |||||
) | |||||
.unwrap(); | |||||
assert!( | assert!( | ||||
res.body.contains(r#"<p><a href="https://vincent.is/about">rel link</a>, <a href="https://vincent.is/about">abs link</a></p>"#) | res.body.contains(r#"<p><a href="https://vincent.is/about">rel link</a>, <a href="https://vincent.is/about">abs link</a></p>"#) | ||||
@@ -297,12 +315,10 @@ fn can_make_relative_links_with_anchors() { | |||||
permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about".to_string()); | permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about".to_string()); | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None); | |||||
let res = render_content(r#"[rel link](./pages/about.md#cv)"#, &context).unwrap(); | let res = render_content(r#"[rel link](./pages/about.md#cv)"#, &context).unwrap(); | ||||
assert!( | |||||
res.body.contains(r#"<p><a href="https://vincent.is/about#cv">rel link</a></p>"#) | |||||
); | |||||
assert!(res.body.contains(r#"<p><a href="https://vincent.is/about#cv">rel link</a></p>"#)); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -310,7 +326,7 @@ fn errors_relative_link_inexistant() { | |||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("[rel link](./pages/about.md)", &context); | let res = render_content("[rel link](./pages/about.md)", &context); | ||||
assert!(res.is_err()); | assert!(res.is_err()); | ||||
} | } | ||||
@@ -320,7 +336,7 @@ fn can_add_id_to_headers() { | |||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content(r#"# Hello"#, &context).unwrap(); | let res = render_content(r#"# Hello"#, &context).unwrap(); | ||||
assert_eq!(res.body, "<h1 id=\"hello\">Hello</h1>\n"); | assert_eq!(res.body, "<h1 id=\"hello\">Hello</h1>\n"); | ||||
} | } | ||||
@@ -330,7 +346,7 @@ fn can_add_id_to_headers_same_slug() { | |||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("# Hello\n# Hello", &context).unwrap(); | let res = render_content("# Hello\n# Hello", &context).unwrap(); | ||||
assert_eq!(res.body, "<h1 id=\"hello\">Hello</h1>\n<h1 id=\"hello-1\">Hello</h1>\n"); | assert_eq!(res.body, "<h1 id=\"hello\">Hello</h1>\n<h1 id=\"hello-1\">Hello</h1>\n"); | ||||
} | } | ||||
@@ -339,11 +355,11 @@ fn can_add_id_to_headers_same_slug() { | |||||
fn can_insert_anchor_left() { | fn can_insert_anchor_left() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::Left); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::Left); | |||||
let res = render_content("# Hello", &context).unwrap(); | let res = render_content("# Hello", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<h1 id=\"hello\"><a class=\"gutenberg-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\nHello</h1>\n" | |||||
"<h1 id=\"hello\"><a class=\"zola-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\nHello</h1>\n" | |||||
); | ); | ||||
} | } | ||||
@@ -351,11 +367,11 @@ fn can_insert_anchor_left() { | |||||
fn can_insert_anchor_right() { | fn can_insert_anchor_right() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::Right); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::Right); | |||||
let res = render_content("# Hello", &context).unwrap(); | let res = render_content("# Hello", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<h1 id=\"hello\">Hello<a class=\"gutenberg-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\n</h1>\n" | |||||
"<h1 id=\"hello\">Hello<a class=\"zola-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\n</h1>\n" | |||||
); | ); | ||||
} | } | ||||
@@ -364,11 +380,11 @@ fn can_insert_anchor_right() { | |||||
fn can_insert_anchor_with_exclamation_mark() { | fn can_insert_anchor_with_exclamation_mark() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::Left); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::Left); | |||||
let res = render_content("# Hello!", &context).unwrap(); | let res = render_content("# Hello!", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<h1 id=\"hello\"><a class=\"gutenberg-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\nHello!</h1>\n" | |||||
"<h1 id=\"hello\"><a class=\"zola-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\nHello!</h1>\n" | |||||
); | ); | ||||
} | } | ||||
@@ -377,11 +393,11 @@ fn can_insert_anchor_with_exclamation_mark() { | |||||
fn can_insert_anchor_with_link() { | fn can_insert_anchor_with_link() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::Left); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::Left); | |||||
let res = render_content("## [Rust](https://rust-lang.org)", &context).unwrap(); | let res = render_content("## [Rust](https://rust-lang.org)", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<h2 id=\"rust\"><a class=\"gutenberg-anchor\" href=\"#rust\" aria-label=\"Anchor link for: rust\">đź”—</a>\n<a href=\"https://rust-lang.org\">Rust</a></h2>\n" | |||||
"<h2 id=\"rust\"><a class=\"zola-anchor\" href=\"#rust\" aria-label=\"Anchor link for: rust\">đź”—</a>\n<a href=\"https://rust-lang.org\">Rust</a></h2>\n" | |||||
); | ); | ||||
} | } | ||||
@@ -389,11 +405,11 @@ fn can_insert_anchor_with_link() { | |||||
fn can_insert_anchor_with_other_special_chars() { | fn can_insert_anchor_with_other_special_chars() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::Left); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::Left); | |||||
let res = render_content("# Hello*_()", &context).unwrap(); | let res = render_content("# Hello*_()", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<h1 id=\"hello\"><a class=\"gutenberg-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\nHello*_()</h1>\n" | |||||
"<h1 id=\"hello\"><a class=\"zola-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">đź”—</a>\nHello*_()</h1>\n" | |||||
); | ); | ||||
} | } | ||||
@@ -402,15 +418,15 @@ fn can_make_toc() { | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new( | let context = RenderContext::new( | ||||
&GUTENBERG_TERA, | |||||
&ZOLA_TERA, | |||||
&config, | &config, | ||||
"https://mysite.com/something", | "https://mysite.com/something", | ||||
&permalinks_ctx, | &permalinks_ctx, | ||||
Path::new("something"), | |||||
InsertAnchor::Left, | InsertAnchor::Left, | ||||
); | ); | ||||
let res = render_content(r#" | |||||
let res = render_content( | |||||
r#" | |||||
# Header 1 | # Header 1 | ||||
## Header 2 | ## Header 2 | ||||
@@ -418,7 +434,10 @@ fn can_make_toc() { | |||||
## Another Header 2 | ## Another Header 2 | ||||
### Last one | ### Last one | ||||
"#, &context).unwrap(); | |||||
"#, | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
let toc = res.toc; | let toc = res.toc; | ||||
assert_eq!(toc.len(), 1); | assert_eq!(toc.len(), 1); | ||||
@@ -426,28 +445,58 @@ fn can_make_toc() { | |||||
assert_eq!(toc[0].children[1].children.len(), 1); | assert_eq!(toc[0].children[1].children.len(), 1); | ||||
} | } | ||||
#[test] | |||||
fn can_ignore_tags_in_toc() { | |||||
let permalinks_ctx = HashMap::new(); | |||||
let config = Config::default(); | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://mysite.com/something", | |||||
&permalinks_ctx, | |||||
InsertAnchor::Left, | |||||
); | |||||
let res = render_content( | |||||
r#" | |||||
## header with `code` | |||||
## [anchor](https://duckduckgo.com/) in header | |||||
## **bold** and *italics* | |||||
"#, | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
let toc = res.toc; | |||||
assert_eq!(toc[0].id, "header-with-code"); | |||||
assert_eq!(toc[0].title, "header with code"); | |||||
assert_eq!(toc[1].id, "anchor-in-header"); | |||||
assert_eq!(toc[1].title, "anchor in header"); | |||||
assert_eq!(toc[2].id, "bold-and-italics"); | |||||
assert_eq!(toc[2].title, "bold and italics"); | |||||
} | |||||
#[test] | #[test] | ||||
fn can_understand_backtick_in_titles() { | fn can_understand_backtick_in_titles() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("# `Hello`", &context).unwrap(); | let res = render_content("# `Hello`", &context).unwrap(); | ||||
assert_eq!( | |||||
res.body, | |||||
"<h1 id=\"hello\"><code>Hello</code></h1>\n" | |||||
); | |||||
assert_eq!(res.body, "<h1 id=\"hello\"><code>Hello</code></h1>\n"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_understand_backtick_in_paragraphs() { | fn can_understand_backtick_in_paragraphs() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("Hello `world`", &context).unwrap(); | let res = render_content("Hello `world`", &context).unwrap(); | ||||
assert_eq!( | |||||
res.body, | |||||
"<p>Hello <code>world</code></p>\n" | |||||
); | |||||
assert_eq!(res.body, "<p>Hello <code>world</code></p>\n"); | |||||
} | } | ||||
// https://github.com/Keats/gutenberg/issues/297 | // https://github.com/Keats/gutenberg/issues/297 | ||||
@@ -455,20 +504,18 @@ fn can_understand_backtick_in_paragraphs() { | |||||
fn can_understand_links_in_header() { | fn can_understand_links_in_header() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content("# [Rust](https://rust-lang.org)", &context).unwrap(); | let res = render_content("# [Rust](https://rust-lang.org)", &context).unwrap(); | ||||
assert_eq!( | |||||
res.body, | |||||
"<h1 id=\"rust\"><a href=\"https://rust-lang.org\">Rust</a></h1>\n" | |||||
); | |||||
assert_eq!(res.body, "<h1 id=\"rust\"><a href=\"https://rust-lang.org\">Rust</a></h1>\n"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_understand_link_with_title_in_header() { | fn can_understand_link_with_title_in_header() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content("# [Rust](https://rust-lang.org \"Rust homepage\")", &context).unwrap(); | |||||
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = | |||||
render_content("# [Rust](https://rust-lang.org \"Rust homepage\")", &context).unwrap(); | |||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<h1 id=\"rust\"><a href=\"https://rust-lang.org\" title=\"Rust homepage\">Rust</a></h1>\n" | "<h1 id=\"rust\"><a href=\"https://rust-lang.org\" title=\"Rust homepage\">Rust</a></h1>\n" | ||||
@@ -481,11 +528,8 @@ fn can_make_valid_relative_link_in_header() { | |||||
permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about/".to_string()); | permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about/".to_string()); | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content( | |||||
r#" # [rel link](./pages/about.md)"#, | |||||
&context, | |||||
).unwrap(); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks, InsertAnchor::None); | |||||
let res = render_content(r#" # [rel link](./pages/about.md)"#, &context).unwrap(); | |||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
@@ -497,19 +541,28 @@ fn can_make_valid_relative_link_in_header() { | |||||
fn can_make_permalinks_with_colocated_assets_for_link() { | fn can_make_permalinks_with_colocated_assets_for_link() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content("[an image](image.jpg)", &context).unwrap(); | |||||
assert_eq!( | |||||
res.body, | |||||
"<p><a href=\"https://vincent.is/about/image.jpg\">an image</a></p>\n" | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | ); | ||||
let res = render_content("[an image](image.jpg)", &context).unwrap(); | |||||
assert_eq!(res.body, "<p><a href=\"https://vincent.is/about/image.jpg\">an image</a></p>\n"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_make_permalinks_with_colocated_assets_for_image() { | fn can_make_permalinks_with_colocated_assets_for_image() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | |||||
let res = render_content("![alt text](image.jpg)", &context).unwrap(); | let res = render_content("![alt text](image.jpg)", &context).unwrap(); | ||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
@@ -521,8 +574,15 @@ fn can_make_permalinks_with_colocated_assets_for_image() { | |||||
fn markdown_doesnt_wrap_html_in_paragraph() { | fn markdown_doesnt_wrap_html_in_paragraph() { | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content(r#" | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | |||||
let res = render_content( | |||||
r#" | |||||
Some text | Some text | ||||
<h1>Helo</h1> | <h1>Helo</h1> | ||||
@@ -532,7 +592,10 @@ Some text | |||||
<img src="mobx-flow.png" alt="MobX flow"> | <img src="mobx-flow.png" alt="MobX flow"> | ||||
</a> | </a> | ||||
</div> | </div> | ||||
"#, &context).unwrap(); | |||||
"#, | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<p>Some text</p>\n<h1>Helo</h1>\n<div>\n<a href=\"mobx-flow.png\">\n <img src=\"mobx-flow.png\" alt=\"MobX flow\">\n </a>\n</div>\n" | "<p>Some text</p>\n<h1>Helo</h1>\n<div>\n<a href=\"mobx-flow.png\">\n <img src=\"mobx-flow.png\" alt=\"MobX flow\">\n </a>\n</div>\n" | ||||
@@ -544,12 +607,15 @@ fn can_validate_valid_external_links() { | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.check_external_links = true; | config.check_external_links = true; | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content("[a link](http://google.com)", &context).unwrap(); | |||||
assert_eq!( | |||||
res.body, | |||||
"<p><a href=\"http://google.com\">a link</a></p>\n" | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | ); | ||||
let res = render_content("[a link](http://google.com)", &context).unwrap(); | |||||
assert_eq!(res.body, "<p><a href=\"http://google.com\">a link</a></p>\n"); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -557,20 +623,62 @@ fn can_show_error_message_for_invalid_external_links() { | |||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
config.check_external_links = true; | config.check_external_links = true; | ||||
let context = RenderContext::new(&GUTENBERG_TERA, &config, "https://vincent.is/about/", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | |||||
let res = render_content("[a link](http://google.comy)", &context); | let res = render_content("[a link](http://google.comy)", &context); | ||||
assert!(res.is_err()); | assert!(res.is_err()); | ||||
let err = res.unwrap_err(); | let err = res.unwrap_err(); | ||||
assert!(err.description().contains("Link http://google.comy is not valid")); | assert!(err.description().contains("Link http://google.comy is not valid")); | ||||
} | } | ||||
#[test] | |||||
fn doesnt_try_to_validate_email_links_mailto() { | |||||
let permalinks_ctx = HashMap::new(); | |||||
let mut config = Config::default(); | |||||
config.check_external_links = true; | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | |||||
let res = render_content("Email: [foo@bar.baz](mailto:foo@bar.baz)", &context).unwrap(); | |||||
assert_eq!(res.body, "<p>Email: <a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>\n"); | |||||
} | |||||
#[test] | |||||
fn doesnt_try_to_validate_email_links_angled_brackets() { | |||||
let permalinks_ctx = HashMap::new(); | |||||
let mut config = Config::default(); | |||||
config.check_external_links = true; | |||||
let context = RenderContext::new( | |||||
&ZOLA_TERA, | |||||
&config, | |||||
"https://vincent.is/about/", | |||||
&permalinks_ctx, | |||||
InsertAnchor::None, | |||||
); | |||||
let res = render_content("Email: <foo@bar.baz>", &context).unwrap(); | |||||
assert_eq!(res.body, "<p>Email: <a href=\"mailto:foo@bar.baz\">foo@bar.baz</a></p>\n"); | |||||
} | |||||
#[test] | #[test] | ||||
fn can_handle_summaries() { | fn can_handle_summaries() { | ||||
let tera_ctx = Tera::default(); | let tera_ctx = Tera::default(); | ||||
let permalinks_ctx = HashMap::new(); | let permalinks_ctx = HashMap::new(); | ||||
let config = Config::default(); | let config = Config::default(); | ||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, Path::new("something"), InsertAnchor::None); | |||||
let res = render_content("Hello [world]\n\n<!-- more -->\n\nBla bla\n\n[world]: https://vincent.is/about/", &context).unwrap(); | |||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content( | |||||
"Hello [world]\n\n<!-- more -->\n\nBla bla\n\n[world]: https://vincent.is/about/", | |||||
&context, | |||||
) | |||||
.unwrap(); | |||||
assert_eq!( | assert_eq!( | ||||
res.body, | res.body, | ||||
"<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n<p><a name=\"continue-reading\"></a></p>\n<p>Bla bla</p>\n" | "<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n<p><a name=\"continue-reading\"></a></p>\n<p>Bla bla</p>\n" | ||||
@@ -580,3 +688,36 @@ fn can_handle_summaries() { | |||||
Some("<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n".len()) | Some("<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n".len()) | ||||
); | ); | ||||
} | } | ||||
// https://github.com/Keats/gutenberg/issues/522 | |||||
#[test] | |||||
fn doesnt_try_to_highlight_content_from_shortcode() { | |||||
let permalinks_ctx = HashMap::new(); | |||||
let mut tera = Tera::default(); | |||||
tera.extend(&ZOLA_TERA).unwrap(); | |||||
let shortcode = r#" | |||||
<figure> | |||||
{% if width %} | |||||
<img src="/images/{{ src }}" alt="{{ caption }}" width="{{ width }}" /> | |||||
{% else %} | |||||
<img src="/images/{{ src }}" alt="{{ caption }}" /> | |||||
{% endif %} | |||||
<figcaption>{{ caption }}</figcaption> | |||||
</figure>"#; | |||||
let markdown_string = r#"{{ figure(src="spherecluster.png", caption="Some spheres.") }}"#; | |||||
let expected = r#"<figure> | |||||
<img src="/images/spherecluster.png" alt="Some spheres." /> | |||||
<figcaption>Some spheres.</figcaption> | |||||
</figure>"#; | |||||
tera.add_raw_template(&format!("shortcodes/{}.html", "figure"), shortcode).unwrap(); | |||||
let config = Config::default(); | |||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None); | |||||
let res = render_content(markdown_string, &context).unwrap(); | |||||
assert_eq!(res.body, expected); | |||||
} |
@@ -9,4 +9,4 @@ ammonia = "1" | |||||
lazy_static = "1" | lazy_static = "1" | ||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
content = { path = "../content" } | |||||
library = { path = "../library" } |
@@ -2,20 +2,19 @@ extern crate elasticlunr; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate lazy_static; | extern crate lazy_static; | ||||
extern crate ammonia; | extern crate ammonia; | ||||
#[macro_use] | #[macro_use] | ||||
extern crate errors; | extern crate errors; | ||||
extern crate content; | |||||
extern crate library; | |||||
use std::collections::{HashMap, HashSet}; | use std::collections::{HashMap, HashSet}; | ||||
use std::path::PathBuf; | |||||
use elasticlunr::{Index, Language}; | use elasticlunr::{Index, Language}; | ||||
use content::Section; | |||||
use errors::Result; | use errors::Result; | ||||
use library::{Library, Section}; | |||||
pub const ELASTICLUNR_JS: &'static str = include_str!("elasticlunr.min.js"); | |||||
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js"); | |||||
lazy_static! { | lazy_static! { | ||||
static ref AMMONIA: ammonia::Builder<'static> = { | static ref AMMONIA: ammonia::Builder<'static> = { | ||||
@@ -34,27 +33,28 @@ lazy_static! { | |||||
}; | }; | ||||
} | } | ||||
/// Returns the generated JSON index with all the documents of the site added using | /// Returns the generated JSON index with all the documents of the site added using | ||||
/// the language given | /// the language given | ||||
/// Errors if the language given is not available in Elasticlunr | /// Errors if the language given is not available in Elasticlunr | ||||
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful? | /// TODO: is making `in_search_index` apply to subsections of a `false` section useful? | ||||
pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<String> { | |||||
pub fn build_index(lang: &str, library: &Library) -> Result<String> { | |||||
let language = match Language::from_code(lang) { | let language = match Language::from_code(lang) { | ||||
Some(l) => l, | Some(l) => l, | ||||
None => { bail!("Tried to build search index for language {} which is not supported", lang); } | |||||
None => { | |||||
bail!("Tried to build search index for language {} which is not supported", lang); | |||||
} | |||||
}; | }; | ||||
let mut index = Index::with_language(language, &["title", "body"]); | let mut index = Index::with_language(language, &["title", "body"]); | ||||
for section in sections.values() { | |||||
add_section_to_index(&mut index, section); | |||||
for section in library.sections_values() { | |||||
add_section_to_index(&mut index, section, library); | |||||
} | } | ||||
Ok(index.to_json()) | Ok(index.to_json()) | ||||
} | } | ||||
fn add_section_to_index(index: &mut Index, section: &Section) { | |||||
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library) { | |||||
if !section.meta.in_search_index { | if !section.meta.in_search_index { | ||||
return; | return; | ||||
} | } | ||||
@@ -63,18 +63,25 @@ fn add_section_to_index(index: &mut Index, section: &Section) { | |||||
if section.meta.redirect_to.is_none() { | if section.meta.redirect_to.is_none() { | ||||
index.add_doc( | index.add_doc( | ||||
§ion.permalink, | §ion.permalink, | ||||
&[§ion.meta.title.clone().unwrap_or(String::new()), &AMMONIA.clean(§ion.content).to_string()], | |||||
&[ | |||||
§ion.meta.title.clone().unwrap_or_default(), | |||||
&AMMONIA.clean(§ion.content).to_string(), | |||||
], | |||||
); | ); | ||||
} | } | ||||
for page in §ion.pages { | |||||
for key in §ion.pages { | |||||
let page = library.get_page_by_key(*key); | |||||
if !page.meta.in_search_index || page.meta.draft { | if !page.meta.in_search_index || page.meta.draft { | ||||
continue; | continue; | ||||
} | } | ||||
index.add_doc( | index.add_doc( | ||||
&page.permalink, | &page.permalink, | ||||
&[&page.meta.title.clone().unwrap_or(String::new()), &AMMONIA.clean(&page.content).to_string()], | |||||
&[ | |||||
&page.meta.title.clone().unwrap_or_default(), | |||||
&AMMONIA.clean(&page.content).to_string(), | |||||
], | |||||
); | ); | ||||
} | } | ||||
} | } |
@@ -16,11 +16,9 @@ config = { path = "../config" } | |||||
utils = { path = "../utils" } | utils = { path = "../utils" } | ||||
templates = { path = "../templates" } | templates = { path = "../templates" } | ||||
front_matter = { path = "../front_matter" } | front_matter = { path = "../front_matter" } | ||||
pagination = { path = "../pagination" } | |||||
taxonomies = { path = "../taxonomies" } | |||||
content = { path = "../content" } | |||||
search = { path = "../search" } | search = { path = "../search" } | ||||
imageproc = { path = "../imageproc" } | imageproc = { path = "../imageproc" } | ||||
library = { path = "../library" } | |||||
[dev-dependencies] | [dev-dependencies] | ||||
tempfile = "3" | tempfile = "3" |
@@ -54,7 +54,7 @@ sit submisso coniuge tristis ubi! | |||||
```rs | ```rs | ||||
#[derive(Debug)] | #[derive(Debug)] | ||||
pub struct Site { | pub struct Site { | ||||
/// The base path of the gutenberg site | |||||
/// The base path of the zola site | |||||
pub base_path: PathBuf, | pub base_path: PathBuf, | ||||
/// The parsed config for the site | /// The parsed config for the site | ||||
pub config: Config, | pub config: Config, | ||||
@@ -1,14 +1,13 @@ | |||||
//! Benchmarking loading/markdown rendering of generated sites of various sizes | //! Benchmarking loading/markdown rendering of generated sites of various sizes | ||||
#![feature(test)] | #![feature(test)] | ||||
extern crate test; | |||||
extern crate site; | extern crate site; | ||||
extern crate test; | |||||
use std::env; | use std::env; | ||||
use site::Site; | use site::Site; | ||||
#[bench] | #[bench] | ||||
fn bench_loading_small_blog(b: &mut test::Bencher) { | fn bench_loading_small_blog(b: &mut test::Bencher) { | ||||
let mut path = env::current_dir().unwrap().to_path_buf(); | let mut path = env::current_dir().unwrap().to_path_buf(); | ||||
@@ -1,83 +0,0 @@ | |||||
//! Benchmarking writing down on the hard drive sites of various sizes | |||||
#![feature(test)] | |||||
extern crate test; | |||||
extern crate site; | |||||
extern crate tempfile; | |||||
use std::env; | |||||
use site::Site; | |||||
use tempfile::tempdir; | |||||
#[bench] | |||||
fn bench_rendering_small_blog(b: &mut test::Bencher) { | |||||
let mut path = env::current_dir().unwrap().to_path_buf(); | |||||
path.push("benches"); | |||||
path.push("small-blog"); | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
site.load().unwrap(); | |||||
b.iter(|| site.build().unwrap()); | |||||
} | |||||
#[bench] | |||||
fn bench_rendering_medium_blog(b: &mut test::Bencher) { | |||||
let mut path = env::current_dir().unwrap().to_path_buf(); | |||||
path.push("benches"); | |||||
path.push("medium-blog"); | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
site.load().unwrap(); | |||||
b.iter(|| site.build().unwrap()); | |||||
} | |||||
//#[bench] | |||||
//fn bench_rendering_big_blog(b: &mut test::Bencher) { | |||||
// let mut path = env::current_dir().unwrap().to_path_buf(); | |||||
// path.push("benches"); | |||||
// path.push("big-blog"); | |||||
// let mut site = Site::new(&path, "config.toml").unwrap(); | |||||
// let tmp_dir = tempdir().expect("create temp dir"); | |||||
// let public = &tmp_dir.path().join("public"); | |||||
// site.set_output_path(&public); | |||||
// site.load().unwrap(); | |||||
// | |||||
// b.iter(|| site.build().unwrap()); | |||||
//} | |||||
#[bench] | |||||
fn bench_rendering_small_kb(b: &mut test::Bencher) { | |||||
let mut path = env::current_dir().unwrap().to_path_buf(); | |||||
path.push("benches"); | |||||
path.push("small-kb"); | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
site.load().unwrap(); | |||||
b.iter(|| site.build().unwrap()); | |||||
} | |||||
#[bench] | |||||
fn bench_rendering_medium_kb(b: &mut test::Bencher) { | |||||
let mut path = env::current_dir().unwrap().to_path_buf(); | |||||
path.push("benches"); | |||||
path.push("medium-kb"); | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
site.load().unwrap(); | |||||
b.iter(|| site.build().unwrap()); | |||||
} | |||||
@@ -1,15 +1,14 @@ | |||||
#![feature(test)] | #![feature(test)] | ||||
extern crate test; | |||||
extern crate library; | |||||
extern crate site; | extern crate site; | ||||
extern crate pagination; | |||||
extern crate tempfile; | extern crate tempfile; | ||||
extern crate test; | |||||
use std::env; | use std::env; | ||||
use tempfile::tempdir; | |||||
use library::Paginator; | |||||
use site::Site; | use site::Site; | ||||
use pagination::Paginator; | |||||
use tempfile::tempdir; | |||||
fn setup_site(name: &str) -> Site { | fn setup_site(name: &str) -> Site { | ||||
let mut path = env::current_dir().unwrap().to_path_buf(); | let mut path = env::current_dir().unwrap().to_path_buf(); | ||||
@@ -22,7 +21,7 @@ fn setup_site(name: &str) -> Site { | |||||
#[bench] | #[bench] | ||||
fn bench_render_aliases(b: &mut test::Bencher) { | fn bench_render_aliases(b: &mut test::Bencher) { | ||||
let mut site = setup_site("small-blog"); | |||||
let mut site = setup_site("big-blog"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
@@ -31,7 +30,7 @@ fn bench_render_aliases(b: &mut test::Bencher) { | |||||
#[bench] | #[bench] | ||||
fn bench_render_sitemap(b: &mut test::Bencher) { | fn bench_render_sitemap(b: &mut test::Bencher) { | ||||
let mut site = setup_site("small-blog"); | |||||
let mut site = setup_site("big-blog"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
@@ -40,11 +39,11 @@ fn bench_render_sitemap(b: &mut test::Bencher) { | |||||
#[bench] | #[bench] | ||||
fn bench_render_rss_feed(b: &mut test::Bencher) { | fn bench_render_rss_feed(b: &mut test::Bencher) { | ||||
let mut site = setup_site("small-blog"); | |||||
let mut site = setup_site("big-blog"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
b.iter(|| site.render_rss_feed(None, None).unwrap()); | |||||
b.iter(|| site.render_rss_feed(site.library.pages_values(), None).unwrap()); | |||||
} | } | ||||
#[bench] | #[bench] | ||||
@@ -62,8 +61,48 @@ fn bench_render_paginated(b: &mut test::Bencher) { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
let section = site.sections.values().collect::<Vec<_>>()[0]; | |||||
let paginator = Paginator::from_section(§ion.pages, section); | |||||
let section = site.library.sections_values()[0]; | |||||
let paginator = Paginator::from_section(§ion, &site.library); | |||||
b.iter(|| site.render_paginated(public, &paginator)); | b.iter(|| site.render_paginated(public, &paginator)); | ||||
} | } | ||||
#[bench] | |||||
fn bench_populate_sections_medium_blog(b: &mut test::Bencher) { | |||||
let mut site = setup_site("medium-blog"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
b.iter(|| site.populate_sections()); | |||||
} | |||||
#[bench] | |||||
fn bench_populate_sections_medium_kb(b: &mut test::Bencher) { | |||||
let mut site = setup_site("medium-kb"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
b.iter(|| site.populate_sections()); | |||||
} | |||||
#[bench] | |||||
fn bench_render_markdown_small_blog(b: &mut test::Bencher) { | |||||
let mut site = setup_site("small-blog"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
b.iter(|| site.render_markdown()); | |||||
} | |||||
#[bench] | |||||
fn bench_render_markdown_small_kb(b: &mut test::Bencher) { | |||||
let mut site = setup_site("small-kb"); | |||||
let tmp_dir = tempdir().expect("create temp dir"); | |||||
let public = &tmp_dir.path().join("public"); | |||||
site.set_output_path(&public); | |||||
b.iter(|| site.render_markdown()); | |||||
} |
@@ -1,7 +1,7 @@ | |||||
extern crate tera; | |||||
extern crate rayon; | |||||
extern crate glob; | extern crate glob; | ||||
extern crate rayon; | |||||
extern crate serde; | extern crate serde; | ||||
extern crate tera; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate serde_derive; | extern crate serde_derive; | ||||
extern crate sass_rs; | extern crate sass_rs; | ||||
@@ -9,41 +9,36 @@ extern crate sass_rs; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate errors; | extern crate errors; | ||||
extern crate config; | extern crate config; | ||||
extern crate utils; | |||||
extern crate front_matter; | extern crate front_matter; | ||||
extern crate templates; | |||||
extern crate pagination; | |||||
extern crate taxonomies; | |||||
extern crate content; | |||||
extern crate search; | |||||
extern crate imageproc; | extern crate imageproc; | ||||
extern crate library; | |||||
extern crate search; | |||||
extern crate templates; | |||||
extern crate utils; | |||||
#[cfg(test)] | #[cfg(test)] | ||||
extern crate tempfile; | extern crate tempfile; | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::fs::{create_dir_all, remove_dir_all, copy}; | |||||
use std::mem; | |||||
use std::fs::{copy, create_dir_all, remove_dir_all}; | |||||
use std::path::{Path, PathBuf}; | use std::path::{Path, PathBuf}; | ||||
use std::sync::{Arc, Mutex}; | use std::sync::{Arc, Mutex}; | ||||
use glob::glob; | use glob::glob; | ||||
use tera::{Tera, Context}; | |||||
use sass_rs::{Options as SassOptions, OutputStyle, compile_file}; | |||||
use rayon::prelude::*; | |||||
use sass_rs::{compile_file, Options as SassOptions, OutputStyle}; | |||||
use tera::{Context, Tera}; | |||||
use config::{get_config, Config}; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
use config::{Config, get_config}; | |||||
use utils::fs::{create_file, copy_directory, create_directory, ensure_directory_exists}; | |||||
use utils::templates::{render_template, rewrite_theme_paths}; | |||||
use front_matter::InsertAnchor; | |||||
use library::{ | |||||
find_taxonomies, sort_actual_pages_by_date, Library, Page, Paginator, Section, Taxonomy, | |||||
}; | |||||
use templates::{global_fns, render_redirect_template, ZOLA_TERA}; | |||||
use utils::fs::{copy_directory, create_directory, create_file, ensure_directory_exists}; | |||||
use utils::net::get_available_port; | use utils::net::get_available_port; | ||||
use content::{Page, Section, populate_siblings, sort_pages}; | |||||
use templates::{GUTENBERG_TERA, global_fns, render_redirect_template}; | |||||
use front_matter::{SortBy, InsertAnchor}; | |||||
use taxonomies::{Taxonomy, find_taxonomies}; | |||||
use pagination::Paginator; | |||||
use rayon::prelude::*; | |||||
use utils::templates::{render_template, rewrite_theme_paths}; | |||||
/// The sitemap only needs links and potentially date so we trim down | /// The sitemap only needs links and potentially date so we trim down | ||||
/// all pages to only that | /// all pages to only that | ||||
@@ -61,12 +56,10 @@ impl SitemapEntry { | |||||
#[derive(Debug)] | #[derive(Debug)] | ||||
pub struct Site { | pub struct Site { | ||||
/// The base path of the gutenberg site | |||||
/// The base path of the zola site | |||||
pub base_path: PathBuf, | pub base_path: PathBuf, | ||||
/// The parsed config for the site | /// The parsed config for the site | ||||
pub config: Config, | pub config: Config, | ||||
pub pages: HashMap<PathBuf, Page>, | |||||
pub sections: HashMap<PathBuf, Section>, | |||||
pub tera: Tera, | pub tera: Tera, | ||||
imageproc: Arc<Mutex<imageproc::Processor>>, | imageproc: Arc<Mutex<imageproc::Processor>>, | ||||
// the live reload port to be used if there is one | // the live reload port to be used if there is one | ||||
@@ -78,6 +71,8 @@ pub struct Site { | |||||
/// A map of all .md files (section and pages) and their permalink | /// A map of all .md files (section and pages) and their permalink | ||||
/// We need that if there are relative links in the content that need to be resolved | /// We need that if there are relative links in the content that need to be resolved | ||||
pub permalinks: HashMap<String, String>, | pub permalinks: HashMap<String, String>, | ||||
/// Contains all pages and sections of the site | |||||
pub library: Library, | |||||
} | } | ||||
impl Site { | impl Site { | ||||
@@ -86,12 +81,13 @@ impl Site { | |||||
pub fn new<P: AsRef<Path>>(path: P, config_file: &str) -> Result<Site> { | pub fn new<P: AsRef<Path>>(path: P, config_file: &str) -> Result<Site> { | ||||
let path = path.as_ref(); | let path = path.as_ref(); | ||||
let mut config = get_config(path, config_file); | let mut config = get_config(path, config_file); | ||||
config.load_extra_syntaxes(path)?; | |||||
let tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml"); | |||||
let tpl_glob = | |||||
format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml"); | |||||
// Only parsing as we might be extending templates from themes and that would error | // Only parsing as we might be extending templates from themes and that would error | ||||
// as we haven't loaded them yet | // as we haven't loaded them yet | ||||
let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?; | let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?; | ||||
if let Some(theme) = config.theme.clone() { | if let Some(theme) = config.theme.clone() { | ||||
// Grab data from the extra section of the theme | // Grab data from the extra section of the theme | ||||
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?; | config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?; | ||||
@@ -102,26 +98,41 @@ impl Site { | |||||
bail!("Theme `{}` is missing a templates folder", theme); | bail!("Theme `{}` is missing a templates folder", theme); | ||||
} | } | ||||
let theme_tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "themes/**/*.html"); | |||||
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?; | |||||
let theme_tpl_glob = format!( | |||||
"{}/{}", | |||||
path.to_string_lossy().replace("\\", "/"), | |||||
format!("themes/{}/templates/**/*.*ml", theme) | |||||
); | |||||
let mut tera_theme = | |||||
Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?; | |||||
rewrite_theme_paths(&mut tera_theme, &theme); | rewrite_theme_paths(&mut tera_theme, &theme); | ||||
// TODO: same as below | |||||
if theme_path.join("templates").join("robots.txt").exists() { | |||||
tera_theme | |||||
.add_template_file(theme_path.join("templates").join("robots.txt"), None)?; | |||||
} | |||||
tera_theme.build_inheritance_chains()?; | tera_theme.build_inheritance_chains()?; | ||||
tera.extend(&tera_theme)?; | tera.extend(&tera_theme)?; | ||||
} | } | ||||
tera.extend(&GUTENBERG_TERA)?; | |||||
tera.extend(&ZOLA_TERA)?; | |||||
// the `extend` above already does it but hey | // the `extend` above already does it but hey | ||||
tera.build_inheritance_chains()?; | tera.build_inheritance_chains()?; | ||||
// TODO: Tera doesn't use globset right now so we can load the robots.txt as part | |||||
// of the glob above, therefore we load it manually if it exists. | |||||
if path.join("templates").join("robots.txt").exists() { | |||||
tera.add_template_file(path.join("templates").join("robots.txt"), Some("robots.txt"))?; | |||||
} | |||||
let content_path = path.join("content"); | let content_path = path.join("content"); | ||||
let static_path = path.join("static"); | let static_path = path.join("static"); | ||||
let imageproc = imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url); | |||||
let imageproc = | |||||
imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url); | |||||
let site = Site { | let site = Site { | ||||
base_path: path.to_path_buf(), | base_path: path.to_path_buf(), | ||||
config, | config, | ||||
tera, | tera, | ||||
pages: HashMap::new(), | |||||
sections: HashMap::new(), | |||||
imageproc: Arc::new(Mutex::new(imageproc)), | imageproc: Arc::new(Mutex::new(imageproc)), | ||||
live_reload: None, | live_reload: None, | ||||
output_path: path.join("public"), | output_path: path.join("public"), | ||||
@@ -129,6 +140,8 @@ impl Site { | |||||
static_path, | static_path, | ||||
taxonomies: Vec::new(), | taxonomies: Vec::new(), | ||||
permalinks: HashMap::new(), | permalinks: HashMap::new(), | ||||
// We will allocate it properly later on | |||||
library: Library::new(0, 0), | |||||
}; | }; | ||||
Ok(site) | Ok(site) | ||||
@@ -139,26 +152,16 @@ impl Site { | |||||
self.content_path.join("_index.md") | self.content_path.join("_index.md") | ||||
} | } | ||||
pub fn enable_live_reload(&mut self) { | |||||
self.live_reload = get_available_port(); | |||||
/// We avoid the port the server is going to use as it's not bound yet | |||||
/// when calling this function and we could end up having tried to bind | |||||
/// both http and websocket server to the same port | |||||
pub fn enable_live_reload(&mut self, port_to_avoid: u16) { | |||||
self.live_reload = get_available_port(port_to_avoid); | |||||
} | } | ||||
/// Get all the orphan (== without section) pages in the site | /// Get all the orphan (== without section) pages in the site | ||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | ||||
let mut pages_in_sections = vec![]; | |||||
let mut orphans = vec![]; | |||||
for s in self.sections.values() { | |||||
pages_in_sections.extend(s.all_pages_path()); | |||||
} | |||||
for page in self.pages.values() { | |||||
if !pages_in_sections.contains(&page.file.path) { | |||||
orphans.push(page); | |||||
} | |||||
} | |||||
orphans | |||||
self.library.get_all_orphan_pages() | |||||
} | } | ||||
pub fn set_base_url(&mut self, base_url: String) { | pub fn set_base_url(&mut self, base_url: String) { | ||||
@@ -180,8 +183,11 @@ impl Site { | |||||
let (section_entries, page_entries): (Vec<_>, Vec<_>) = glob(&content_glob) | let (section_entries, page_entries): (Vec<_>, Vec<_>) = glob(&content_glob) | ||||
.unwrap() | .unwrap() | ||||
.filter_map(|e| e.ok()) | .filter_map(|e| e.ok()) | ||||
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.')) | |||||
.partition(|entry| entry.as_path().file_name().unwrap() == "_index.md"); | .partition(|entry| entry.as_path().file_name().unwrap() == "_index.md"); | ||||
self.library = Library::new(page_entries.len(), section_entries.len()); | |||||
let sections = { | let sections = { | ||||
let config = &self.config; | let config = &self.config; | ||||
@@ -216,7 +222,7 @@ impl Site { | |||||
// Insert a default index section if necessary so we don't need to create | // Insert a default index section if necessary so we don't need to create | ||||
// a _index.md to render the index page at the root of the site | // a _index.md to render the index page at the root of the site | ||||
let index_path = self.index_section_path(); | let index_path = self.index_section_path(); | ||||
if let Some(ref index_section) = self.sections.get(&index_path) { | |||||
if let Some(ref index_section) = self.library.get_section(&index_path) { | |||||
if self.config.build_search_index && !index_section.meta.in_search_index { | if self.config.build_search_index && !index_section.meta.in_search_index { | ||||
bail!( | bail!( | ||||
"You have enabled search in the config but disabled it in the index section: \ | "You have enabled search in the config but disabled it in the index section: \ | ||||
@@ -226,24 +232,28 @@ impl Site { | |||||
} | } | ||||
} | } | ||||
// Not in else because of borrow checker | // Not in else because of borrow checker | ||||
if !self.sections.contains_key(&index_path) { | |||||
if !self.library.contains_section(&index_path) { | |||||
let mut index_section = Section::default(); | let mut index_section = Section::default(); | ||||
index_section.permalink = self.config.make_permalink(""); | index_section.permalink = self.config.make_permalink(""); | ||||
index_section.file.path = self.content_path.join("_index.md"); | |||||
index_section.file.parent = self.content_path.clone(); | index_section.file.parent = self.content_path.clone(); | ||||
index_section.file.relative = "_index.md".to_string(); | index_section.file.relative = "_index.md".to_string(); | ||||
self.sections.insert(index_path, index_section); | |||||
self.library.insert_section(index_section); | |||||
} | } | ||||
let mut pages_insert_anchors = HashMap::new(); | let mut pages_insert_anchors = HashMap::new(); | ||||
for page in pages { | for page in pages { | ||||
let p = page?; | let p = page?; | ||||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone())); | |||||
pages_insert_anchors.insert( | |||||
p.file.path.clone(), | |||||
self.find_parent_section_insert_anchor(&p.file.parent.clone()), | |||||
); | |||||
self.add_page(p, false)?; | self.add_page(p, false)?; | ||||
} | } | ||||
self.register_early_global_fns(); | self.register_early_global_fns(); | ||||
self.render_markdown()?; | |||||
self.populate_sections(); | self.populate_sections(); | ||||
self.render_markdown()?; | |||||
self.populate_taxonomies()?; | self.populate_taxonomies()?; | ||||
self.register_tera_global_fns(); | self.register_tera_global_fns(); | ||||
@@ -258,52 +268,65 @@ impl Site { | |||||
let permalinks = &self.permalinks; | let permalinks = &self.permalinks; | ||||
let tera = &self.tera; | let tera = &self.tera; | ||||
let config = &self.config; | let config = &self.config; | ||||
let base_path = &self.base_path; | |||||
// TODO: avoid the duplication with function above for that part | |||||
// This is needed in the first place because of silly borrow checker | // This is needed in the first place because of silly borrow checker | ||||
let mut pages_insert_anchors = HashMap::new(); | let mut pages_insert_anchors = HashMap::new(); | ||||
for (_, p) in &self.pages { | |||||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone())); | |||||
for (_, p) in self.library.pages() { | |||||
pages_insert_anchors.insert( | |||||
p.file.path.clone(), | |||||
self.find_parent_section_insert_anchor(&p.file.parent.clone()), | |||||
); | |||||
} | } | ||||
self.pages.par_iter_mut() | |||||
.map(|(_, page)| { | |||||
self.library | |||||
.pages_mut() | |||||
.values_mut() | |||||
.collect::<Vec<_>>() | |||||
.par_iter_mut() | |||||
.map(|page| { | |||||
let insert_anchor = pages_insert_anchors[&page.file.path]; | let insert_anchor = pages_insert_anchors[&page.file.path]; | ||||
page.render_markdown(permalinks, tera, config, base_path, insert_anchor) | |||||
page.render_markdown(permalinks, tera, config, insert_anchor) | |||||
}) | }) | ||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and)?; | |||||
.collect::<Result<()>>()?; | |||||
self.sections.par_iter_mut() | |||||
.map(|(_, section)| section.render_markdown(permalinks, tera, config, base_path)) | |||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and)?; | |||||
self.library | |||||
.sections_mut() | |||||
.values_mut() | |||||
.collect::<Vec<_>>() | |||||
.par_iter_mut() | |||||
.map(|section| section.render_markdown(permalinks, tera, config)) | |||||
.collect::<Result<()>>()?; | |||||
Ok(()) | Ok(()) | ||||
} | } | ||||
/// Adds global fns that are to be available to shortcodes while rendering markdown | /// Adds global fns that are to be available to shortcodes while rendering markdown | ||||
pub fn register_early_global_fns(&mut self) { | pub fn register_early_global_fns(&mut self) { | ||||
self.tera.register_global_function( | |||||
"get_url", global_fns::make_get_url(self.permalinks.clone(), self.config.clone()), | |||||
self.tera.register_function( | |||||
"get_url", | |||||
global_fns::make_get_url(self.permalinks.clone(), self.config.clone()), | |||||
); | ); | ||||
self.tera.register_global_function( | |||||
"resize_image", global_fns::make_resize_image(self.imageproc.clone()), | |||||
self.tera.register_function( | |||||
"resize_image", | |||||
global_fns::make_resize_image(self.imageproc.clone()), | |||||
); | ); | ||||
} | } | ||||
pub fn register_tera_global_fns(&mut self) { | pub fn register_tera_global_fns(&mut self) { | ||||
self.tera.register_global_function("trans", global_fns::make_trans(self.config.clone())); | |||||
self.tera.register_global_function("get_page", global_fns::make_get_page(&self.pages)); | |||||
self.tera.register_global_function("get_section", global_fns::make_get_section(&self.sections)); | |||||
self.tera.register_global_function( | |||||
self.tera.register_function("trans", global_fns::make_trans(self.config.clone())); | |||||
self.tera.register_function("get_page", global_fns::make_get_page(&self.library)); | |||||
self.tera.register_function("get_section", global_fns::make_get_section(&self.library)); | |||||
self.tera.register_function( | |||||
"get_taxonomy", | "get_taxonomy", | ||||
global_fns::make_get_taxonomy(self.taxonomies.clone()), | |||||
global_fns::make_get_taxonomy(&self.taxonomies, &self.library), | |||||
); | ); | ||||
self.tera.register_global_function( | |||||
self.tera.register_function( | |||||
"get_taxonomy_url", | "get_taxonomy_url", | ||||
global_fns::make_get_taxonomy_url(self.taxonomies.clone()), | |||||
global_fns::make_get_taxonomy_url(&self.taxonomies), | |||||
); | |||||
self.tera.register_function( | |||||
"load_data", | |||||
global_fns::make_load_data(self.content_path.clone(), self.base_path.clone()), | |||||
); | ); | ||||
} | } | ||||
@@ -311,16 +334,14 @@ impl Site { | |||||
/// The `render` parameter is used in the serve command, when rebuilding a page. | /// The `render` parameter is used in the serve command, when rebuilding a page. | ||||
/// If `true`, it will also render the markdown for that page | /// If `true`, it will also render the markdown for that page | ||||
/// Returns the previous page struct if there was one at the same path | /// Returns the previous page struct if there was one at the same path | ||||
pub fn add_page(&mut self, page: Page, render: bool) -> Result<Option<Page>> { | |||||
let path = page.file.path.clone(); | |||||
pub fn add_page(&mut self, mut page: Page, render: bool) -> Result<Option<Page>> { | |||||
self.permalinks.insert(page.file.relative.clone(), page.permalink.clone()); | self.permalinks.insert(page.file.relative.clone(), page.permalink.clone()); | ||||
let prev = self.pages.insert(page.file.path.clone(), page); | |||||
if render { | if render { | ||||
let insert_anchor = self.find_parent_section_insert_anchor(&self.pages[&path].file.parent); | |||||
let page = self.pages.get_mut(&path).unwrap(); | |||||
page.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path, insert_anchor)?; | |||||
let insert_anchor = self.find_parent_section_insert_anchor(&page.file.parent); | |||||
page.render_markdown(&self.permalinks, &self.tera, &self.config, insert_anchor)?; | |||||
} | } | ||||
let prev = self.library.remove_page(&page.file.path); | |||||
self.library.insert_page(page); | |||||
Ok(prev) | Ok(prev) | ||||
} | } | ||||
@@ -329,15 +350,13 @@ impl Site { | |||||
/// The `render` parameter is used in the serve command, when rebuilding a page. | /// The `render` parameter is used in the serve command, when rebuilding a page. | ||||
/// If `true`, it will also render the markdown for that page | /// If `true`, it will also render the markdown for that page | ||||
/// Returns the previous section struct if there was one at the same path | /// Returns the previous section struct if there was one at the same path | ||||
pub fn add_section(&mut self, section: Section, render: bool) -> Result<Option<Section>> { | |||||
let path = section.file.path.clone(); | |||||
pub fn add_section(&mut self, mut section: Section, render: bool) -> Result<Option<Section>> { | |||||
self.permalinks.insert(section.file.relative.clone(), section.permalink.clone()); | self.permalinks.insert(section.file.relative.clone(), section.permalink.clone()); | ||||
let prev = self.sections.insert(section.file.path.clone(), section); | |||||
if render { | if render { | ||||
let section = self.sections.get_mut(&path).unwrap(); | |||||
section.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path)?; | |||||
section.render_markdown(&self.permalinks, &self.tera, &self.config)?; | |||||
} | } | ||||
let prev = self.library.remove_section(§ion.file.path); | |||||
self.library.insert_section(section); | |||||
Ok(prev) | Ok(prev) | ||||
} | } | ||||
@@ -345,68 +364,16 @@ impl Site { | |||||
/// Finds the insert_anchor for the parent section of the directory at `path`. | /// Finds the insert_anchor for the parent section of the directory at `path`. | ||||
/// Defaults to `AnchorInsert::None` if no parent section found | /// Defaults to `AnchorInsert::None` if no parent section found | ||||
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor { | pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor { | ||||
match self.sections.get(&parent_path.join("_index.md")) { | |||||
match self.library.get_section(&parent_path.join("_index.md")) { | |||||
Some(s) => s.meta.insert_anchor_links, | Some(s) => s.meta.insert_anchor_links, | ||||
None => InsertAnchor::None | |||||
None => InsertAnchor::None, | |||||
} | } | ||||
} | } | ||||
/// Find out the direct subsections of each subsection if there are some | /// Find out the direct subsections of each subsection if there are some | ||||
/// as well as the pages for each section | /// as well as the pages for each section | ||||
pub fn populate_sections(&mut self) { | pub fn populate_sections(&mut self) { | ||||
let mut grandparent_paths: HashMap<PathBuf, Vec<PathBuf>> = HashMap::new(); | |||||
for section in self.sections.values_mut() { | |||||
if let Some(ref grand_parent) = section.file.grand_parent { | |||||
grandparent_paths | |||||
.entry(grand_parent.to_path_buf()) | |||||
.or_insert_with(|| vec![]) | |||||
.push(section.file.path.clone()); | |||||
} | |||||
// Make sure the pages of a section are empty since we can call that many times on `serve` | |||||
section.pages = vec![]; | |||||
section.ignored_pages = vec![]; | |||||
} | |||||
for page in self.pages.values() { | |||||
let parent_section_path = page.file.parent.join("_index.md"); | |||||
if self.sections.contains_key(&parent_section_path) { | |||||
// TODO: use references instead of cloning to avoid having to call populate_section on | |||||
// content change | |||||
self.sections.get_mut(&parent_section_path).unwrap().pages.push(page.clone()); | |||||
} | |||||
} | |||||
self.sort_sections_pages(None); | |||||
// TODO: remove this clone | |||||
let sections = self.sections.clone(); | |||||
for section in self.sections.values_mut() { | |||||
if let Some(paths) = grandparent_paths.get(§ion.file.parent) { | |||||
section.subsections = paths | |||||
.iter() | |||||
.map(|p| sections[p].clone()) | |||||
.collect::<Vec<_>>(); | |||||
section.subsections | |||||
.sort_by(|a, b| a.meta.weight.cmp(&b.meta.weight)); | |||||
} | |||||
} | |||||
} | |||||
/// Sorts the pages of the section at the given path | |||||
/// By default will sort all sections but can be made to only sort a single one by providing a path | |||||
pub fn sort_sections_pages(&mut self, only: Option<&Path>) { | |||||
for (path, section) in &mut self.sections { | |||||
if let Some(p) = only { | |||||
if p != path { | |||||
continue; | |||||
} | |||||
} | |||||
let pages = mem::replace(&mut section.pages, vec![]); | |||||
let (sorted_pages, cannot_be_sorted_pages) = sort_pages(pages, section.meta.sort_by); | |||||
section.pages = populate_siblings(&sorted_pages, section.meta.sort_by); | |||||
section.ignored_pages = cannot_be_sorted_pages; | |||||
} | |||||
self.library.populate_sections(); | |||||
} | } | ||||
/// Find all the tags and categories if it's asked in the config | /// Find all the tags and categories if it's asked in the config | ||||
@@ -415,15 +382,7 @@ impl Site { | |||||
return Ok(()); | return Ok(()); | ||||
} | } | ||||
self.taxonomies = find_taxonomies( | |||||
&self.config, | |||||
self.pages | |||||
.values() | |||||
.filter(|p| !p.is_draft()) | |||||
.cloned() | |||||
.collect::<Vec<_>>() | |||||
.as_slice(), | |||||
)?; | |||||
self.taxonomies = find_taxonomies(&self.config, &self.library)?; | |||||
Ok(()) | Ok(()) | ||||
} | } | ||||
@@ -433,7 +392,10 @@ impl Site { | |||||
if let Some(port) = self.live_reload { | if let Some(port) = self.live_reload { | ||||
return html.replace( | return html.replace( | ||||
"</body>", | "</body>", | ||||
&format!(r#"<script src="/livereload.js?port={}&mindelay=10"></script></body>"#, port), | |||||
&format!( | |||||
r#"<script src="/livereload.js?port={}&mindelay=10"></script></body>"#, | |||||
port | |||||
), | |||||
); | ); | ||||
} | } | ||||
@@ -497,7 +459,7 @@ impl Site { | |||||
create_directory(¤t_path)?; | create_directory(¤t_path)?; | ||||
// Finally, create a index.html file there with the page rendered | // Finally, create a index.html file there with the page rendered | ||||
let output = page.render_html(&self.tera, &self.config)?; | |||||
let output = page.render_html(&self.tera, &self.config, &self.library)?; | |||||
create_file(¤t_path.join("index.html"), &self.inject_livereload(output))?; | create_file(¤t_path.join("index.html"), &self.inject_livereload(output))?; | ||||
// Copy any asset we found previously into the same directory as the index.html | // Copy any asset we found previously into the same directory as the index.html | ||||
@@ -518,7 +480,7 @@ impl Site { | |||||
self.render_orphan_pages()?; | self.render_orphan_pages()?; | ||||
self.render_sitemap()?; | self.render_sitemap()?; | ||||
if self.config.generate_rss { | if self.config.generate_rss { | ||||
self.render_rss_feed(None, None)?; | |||||
self.render_rss_feed(self.library.pages_values(), None)?; | |||||
} | } | ||||
self.render_404()?; | self.render_404()?; | ||||
self.render_robots()?; | self.render_robots()?; | ||||
@@ -551,15 +513,12 @@ impl Site { | |||||
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)), | &self.output_path.join(&format!("search_index.{}.js", self.config.default_language)), | ||||
&format!( | &format!( | ||||
"window.searchIndex = {};", | "window.searchIndex = {};", | ||||
search::build_index(&self.sections, &self.config.default_language)? | |||||
search::build_index(&self.config.default_language, &self.library)? | |||||
), | ), | ||||
)?; | )?; | ||||
// then elasticlunr.min.js | // then elasticlunr.min.js | ||||
create_file( | |||||
&self.output_path.join("elasticlunr.min.js"), | |||||
search::ELASTICLUNR_JS, | |||||
)?; | |||||
create_file(&self.output_path.join("elasticlunr.min.js"), search::ELASTICLUNR_JS)?; | |||||
Ok(()) | Ok(()) | ||||
} | } | ||||
@@ -575,10 +534,10 @@ impl Site { | |||||
let mut options = SassOptions::default(); | let mut options = SassOptions::default(); | ||||
options.output_style = OutputStyle::Compressed; | options.output_style = OutputStyle::Compressed; | ||||
let mut compiled_paths = self.compile_sass_glob(&sass_path, "scss", options.clone())?; | |||||
let mut compiled_paths = self.compile_sass_glob(&sass_path, "scss", &options.clone())?; | |||||
options.indented_syntax = true; | options.indented_syntax = true; | ||||
compiled_paths.extend(self.compile_sass_glob(&sass_path, "sass", options)?); | |||||
compiled_paths.extend(self.compile_sass_glob(&sass_path, "sass", &options)?); | |||||
compiled_paths.sort(); | compiled_paths.sort(); | ||||
for window in compiled_paths.windows(2) { | for window in compiled_paths.windows(2) { | ||||
@@ -595,12 +554,19 @@ impl Site { | |||||
Ok(()) | Ok(()) | ||||
} | } | ||||
fn compile_sass_glob(&self, sass_path: &Path, extension: &str, options: SassOptions) -> Result<Vec<(PathBuf, PathBuf)>> { | |||||
fn compile_sass_glob( | |||||
&self, | |||||
sass_path: &Path, | |||||
extension: &str, | |||||
options: &SassOptions, | |||||
) -> Result<Vec<(PathBuf, PathBuf)>> { | |||||
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension); | let glob_string = format!("{}/**/*.{}", sass_path.display(), extension); | ||||
let files = glob(&glob_string) | let files = glob(&glob_string) | ||||
.unwrap() | .unwrap() | ||||
.filter_map(|e| e.ok()) | .filter_map(|e| e.ok()) | ||||
.filter(|entry| !entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_')) | |||||
.filter(|entry| { | |||||
!entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_') | |||||
}) | |||||
.collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
let mut compiled_paths = Vec::new(); | let mut compiled_paths = Vec::new(); | ||||
@@ -623,7 +589,8 @@ impl Site { | |||||
} | } | ||||
pub fn render_aliases(&self) -> Result<()> { | pub fn render_aliases(&self) -> Result<()> { | ||||
for page in self.pages.values() { | |||||
ensure_directory_exists(&self.output_path)?; | |||||
for (_, page) in self.library.pages() { | |||||
for alias in &page.meta.aliases { | for alias in &page.meta.aliases { | ||||
let mut output_path = self.output_path.to_path_buf(); | let mut output_path = self.output_path.to_path_buf(); | ||||
let mut split = alias.split('/').collect::<Vec<_>>(); | let mut split = alias.split('/').collect::<Vec<_>>(); | ||||
@@ -636,7 +603,7 @@ impl Site { | |||||
split.push(part); | split.push(part); | ||||
"index.html" | "index.html" | ||||
} | } | ||||
None => "index.html" | |||||
None => "index.html", | |||||
}; | }; | ||||
for component in split { | for component in split { | ||||
@@ -646,7 +613,10 @@ impl Site { | |||||
create_directory(&output_path)?; | create_directory(&output_path)?; | ||||
} | } | ||||
} | } | ||||
create_file(&output_path.join(page_name), &render_redirect_template(&page.permalink, &self.tera)?)?; | |||||
create_file( | |||||
&output_path.join(page_name), | |||||
&render_redirect_template(&page.permalink, &self.tera)?, | |||||
)?; | |||||
} | } | ||||
} | } | ||||
Ok(()) | Ok(()) | ||||
@@ -666,9 +636,11 @@ impl Site { | |||||
/// Renders robots.txt | /// Renders robots.txt | ||||
pub fn render_robots(&self) -> Result<()> { | pub fn render_robots(&self) -> Result<()> { | ||||
ensure_directory_exists(&self.output_path)?; | ensure_directory_exists(&self.output_path)?; | ||||
let mut context = Context::new(); | |||||
context.insert("config", &self.config); | |||||
create_file( | create_file( | ||||
&self.output_path.join("robots.txt"), | &self.output_path.join("robots.txt"), | ||||
&render_template("robots.txt", &self.tera, &Context::new(), &self.config.theme)?, | |||||
&render_template("robots.txt", &self.tera, &context, &self.config.theme)?, | |||||
) | ) | ||||
} | } | ||||
@@ -689,7 +661,7 @@ impl Site { | |||||
ensure_directory_exists(&self.output_path)?; | ensure_directory_exists(&self.output_path)?; | ||||
let output_path = self.output_path.join(&taxonomy.kind.name); | let output_path = self.output_path.join(&taxonomy.kind.name); | ||||
let list_output = taxonomy.render_all_terms(&self.tera, &self.config)?; | |||||
let list_output = taxonomy.render_all_terms(&self.tera, &self.config, &self.library)?; | |||||
create_directory(&output_path)?; | create_directory(&output_path)?; | ||||
create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?; | create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?; | ||||
@@ -698,27 +670,26 @@ impl Site { | |||||
.par_iter() | .par_iter() | ||||
.map(|item| { | .map(|item| { | ||||
if taxonomy.kind.rss { | if taxonomy.kind.rss { | ||||
// TODO: can we get rid of `clone()`? | |||||
self.render_rss_feed( | self.render_rss_feed( | ||||
Some(item.pages.clone()), | |||||
item.pages.iter().map(|p| self.library.get_page_by_key(*p)).collect(), | |||||
Some(&PathBuf::from(format!("{}/{}", taxonomy.kind.name, item.slug))), | Some(&PathBuf::from(format!("{}/{}", taxonomy.kind.name, item.slug))), | ||||
)?; | )?; | ||||
} | } | ||||
if taxonomy.kind.is_paginated() { | if taxonomy.kind.is_paginated() { | ||||
self.render_paginated(&output_path, &Paginator::from_taxonomy(&taxonomy, item)) | |||||
self.render_paginated( | |||||
&output_path, | |||||
&Paginator::from_taxonomy(&taxonomy, item, &self.library), | |||||
) | |||||
} else { | } else { | ||||
let single_output = taxonomy.render_term(item, &self.tera, &self.config)?; | |||||
let single_output = | |||||
taxonomy.render_term(item, &self.tera, &self.config, &self.library)?; | |||||
let path = output_path.join(&item.slug); | let path = output_path.join(&item.slug); | ||||
create_directory(&path)?; | create_directory(&path)?; | ||||
create_file( | |||||
&path.join("index.html"), | |||||
&self.inject_livereload(single_output), | |||||
) | |||||
create_file(&path.join("index.html"), &self.inject_livereload(single_output)) | |||||
} | } | ||||
}) | }) | ||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and) | |||||
.collect::<Result<()>>() | |||||
} | } | ||||
/// What it says on the tin | /// What it says on the tin | ||||
@@ -727,8 +698,10 @@ impl Site { | |||||
let mut context = Context::new(); | let mut context = Context::new(); | ||||
let mut pages = self.pages | |||||
.values() | |||||
let mut pages = self | |||||
.library | |||||
.pages_values() | |||||
.iter() | |||||
.filter(|p| !p.is_draft()) | .filter(|p| !p.is_draft()) | ||||
.map(|p| { | .map(|p| { | ||||
let date = match p.meta.date { | let date = match p.meta.date { | ||||
@@ -739,14 +712,16 @@ impl Site { | |||||
}) | }) | ||||
.collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | pages.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | ||||
context.add("pages", &pages); | |||||
context.insert("pages", &pages); | |||||
let mut sections = self.sections | |||||
.values() | |||||
let mut sections = self | |||||
.library | |||||
.sections_values() | |||||
.iter() | |||||
.map(|s| SitemapEntry::new(s.permalink.clone(), None)) | .map(|s| SitemapEntry::new(s.permalink.clone(), None)) | ||||
.collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
sections.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | sections.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | ||||
context.add("sections", §ions); | |||||
context.insert("sections", §ions); | |||||
let mut taxonomies = vec![]; | let mut taxonomies = vec![]; | ||||
for taxonomy in &self.taxonomies { | for taxonomy in &self.taxonomies { | ||||
@@ -754,14 +729,17 @@ impl Site { | |||||
let mut terms = vec![]; | let mut terms = vec![]; | ||||
terms.push(SitemapEntry::new(self.config.make_permalink(name), None)); | terms.push(SitemapEntry::new(self.config.make_permalink(name), None)); | ||||
for item in &taxonomy.items { | for item in &taxonomy.items { | ||||
terms.push(SitemapEntry::new(self.config.make_permalink(&format!("{}/{}", &name, item.slug)), None)); | |||||
terms.push(SitemapEntry::new( | |||||
self.config.make_permalink(&format!("{}/{}", &name, item.slug)), | |||||
None, | |||||
)); | |||||
} | } | ||||
terms.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | terms.sort_by(|a, b| a.permalink.cmp(&b.permalink)); | ||||
taxonomies.push(terms); | taxonomies.push(terms); | ||||
} | } | ||||
context.add("taxonomies", &taxonomies); | |||||
context.insert("taxonomies", &taxonomies); | |||||
context.add("config", &self.config); | |||||
context.insert("config", &self.config); | |||||
let sitemap = &render_template("sitemap.xml", &self.tera, &context, &self.config.theme)?; | let sitemap = &render_template("sitemap.xml", &self.tera, &context, &self.config.theme)?; | ||||
@@ -773,14 +751,15 @@ impl Site { | |||||
/// Renders a RSS feed for the given path and at the given path | /// Renders a RSS feed for the given path and at the given path | ||||
/// If both arguments are `None`, it will render only the RSS feed for the whole | /// If both arguments are `None`, it will render only the RSS feed for the whole | ||||
/// site at the root folder. | /// site at the root folder. | ||||
pub fn render_rss_feed(&self, all_pages: Option<Vec<Page>>, base_path: Option<&PathBuf>) -> Result<()> { | |||||
pub fn render_rss_feed( | |||||
&self, | |||||
all_pages: Vec<&Page>, | |||||
base_path: Option<&PathBuf>, | |||||
) -> Result<()> { | |||||
ensure_directory_exists(&self.output_path)?; | ensure_directory_exists(&self.output_path)?; | ||||
let mut context = Context::new(); | let mut context = Context::new(); | ||||
let pages = all_pages | |||||
// TODO: avoid that cloned(). | |||||
// It requires having `sort_pages` take references of Page | |||||
.unwrap_or_else(|| self.pages.values().cloned().collect::<Vec<_>>()) | |||||
let mut pages = all_pages | |||||
.into_iter() | .into_iter() | ||||
.filter(|p| p.meta.date.is_some() && !p.is_draft()) | .filter(|p| p.meta.date.is_some() && !p.is_draft()) | ||||
.collect::<Vec<_>>(); | .collect::<Vec<_>>(); | ||||
@@ -790,11 +769,19 @@ impl Site { | |||||
return Ok(()); | return Ok(()); | ||||
} | } | ||||
let (sorted_pages, _) = sort_pages(pages, SortBy::Date); | |||||
context.add("last_build_date", &sorted_pages[0].meta.date.clone().map(|d| d.to_string())); | |||||
// limit to the last n elements | |||||
context.add("pages", &sorted_pages.iter().take(self.config.rss_limit).collect::<Vec<_>>()); | |||||
context.add("config", &self.config); | |||||
pages.par_sort_unstable_by(sort_actual_pages_by_date); | |||||
context.insert("last_build_date", &pages[0].meta.date.clone().map(|d| d.to_string())); | |||||
// limit to the last n elements if the limit is set; otherwise use all. | |||||
let num_entries = self.config.rss_limit.unwrap_or(pages.len()); | |||||
let p = pages | |||||
.iter() | |||||
.take(num_entries) | |||||
.map(|x| x.to_serialized_basic(&self.library)) | |||||
.collect::<Vec<_>>(); | |||||
context.insert("pages", &p); | |||||
context.insert("config", &self.config); | |||||
let rss_feed_url = if let Some(ref base) = base_path { | let rss_feed_url = if let Some(ref base) = base_path { | ||||
self.config.make_permalink(&base.join("rss.xml").to_string_lossy().replace('\\', "/")) | self.config.make_permalink(&base.join("rss.xml").to_string_lossy().replace('\\', "/")) | ||||
@@ -802,7 +789,7 @@ impl Site { | |||||
self.config.make_permalink("rss.xml") | self.config.make_permalink("rss.xml") | ||||
}; | }; | ||||
context.add("feed_url", &rss_feed_url); | |||||
context.insert("feed_url", &rss_feed_url); | |||||
let feed = &render_template("rss.xml", &self.tera, &context, &self.config.theme)?; | let feed = &render_template("rss.xml", &self.tera, &context, &self.config.theme)?; | ||||
@@ -846,9 +833,8 @@ impl Site { | |||||
section | section | ||||
.pages | .pages | ||||
.par_iter() | .par_iter() | ||||
.map(|p| self.render_page(p)) | |||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and)?; | |||||
.map(|k| self.render_page(self.library.get_page_by_key(*k))) | |||||
.collect::<Result<()>>()?; | |||||
} | } | ||||
if !section.meta.render { | if !section.meta.render { | ||||
@@ -857,14 +843,17 @@ impl Site { | |||||
if let Some(ref redirect_to) = section.meta.redirect_to { | if let Some(ref redirect_to) = section.meta.redirect_to { | ||||
let permalink = self.config.make_permalink(redirect_to); | let permalink = self.config.make_permalink(redirect_to); | ||||
create_file(&output_path.join("index.html"), &render_redirect_template(&permalink, &self.tera)?)?; | |||||
create_file( | |||||
&output_path.join("index.html"), | |||||
&render_redirect_template(&permalink, &self.tera)?, | |||||
)?; | |||||
return Ok(()); | return Ok(()); | ||||
} | } | ||||
if section.meta.is_paginated() { | if section.meta.is_paginated() { | ||||
self.render_paginated(&output_path, &Paginator::from_section(§ion.pages, section))?; | |||||
self.render_paginated(&output_path, &Paginator::from_section(§ion, &self.library))?; | |||||
} else { | } else { | ||||
let output = section.render_html(&self.tera, &self.config)?; | |||||
let output = section.render_html(&self.tera, &self.config, &self.library)?; | |||||
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; | create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; | ||||
} | } | ||||
@@ -874,20 +863,18 @@ impl Site { | |||||
/// Used only on reload | /// Used only on reload | ||||
pub fn render_index(&self) -> Result<()> { | pub fn render_index(&self) -> Result<()> { | ||||
self.render_section( | self.render_section( | ||||
&self.sections[&self.content_path.join("_index.md")], | |||||
&self.library.get_section(&self.content_path.join("_index.md")).unwrap(), | |||||
false, | false, | ||||
) | ) | ||||
} | } | ||||
/// Renders all sections | /// Renders all sections | ||||
pub fn render_sections(&self) -> Result<()> { | pub fn render_sections(&self) -> Result<()> { | ||||
self.sections | |||||
.values() | |||||
.collect::<Vec<_>>() | |||||
self.library | |||||
.sections_values() | |||||
.into_par_iter() | .into_par_iter() | ||||
.map(|s| self.render_section(s, true)) | .map(|s| self.render_section(s, true)) | ||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and) | |||||
.collect::<Result<()>>() | |||||
} | } | ||||
/// Renders all pages that do not belong to any sections | /// Renders all pages that do not belong to any sections | ||||
@@ -911,20 +898,22 @@ impl Site { | |||||
paginator | paginator | ||||
.pagers | .pagers | ||||
.par_iter() | .par_iter() | ||||
.enumerate() | |||||
.map(|(i, pager)| { | |||||
let page_path = folder_path.join(&format!("{}", i + 1)); | |||||
.map(|pager| { | |||||
let page_path = folder_path.join(&format!("{}", pager.index)); | |||||
create_directory(&page_path)?; | create_directory(&page_path)?; | ||||
let output = paginator.render_pager(pager, &self.config, &self.tera)?; | |||||
if i > 0 { | |||||
let output = | |||||
paginator.render_pager(pager, &self.config, &self.tera, &self.library)?; | |||||
if pager.index > 1 { | |||||
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?; | create_file(&page_path.join("index.html"), &self.inject_livereload(output))?; | ||||
} else { | } else { | ||||
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; | create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; | ||||
create_file(&page_path.join("index.html"), &render_redirect_template(&paginator.permalink, &self.tera)?)?; | |||||
create_file( | |||||
&page_path.join("index.html"), | |||||
&render_redirect_template(&paginator.permalink, &self.tera)?, | |||||
)?; | |||||
} | } | ||||
Ok(()) | Ok(()) | ||||
}) | }) | ||||
.fold(|| Ok(()), Result::and) | |||||
.reduce(|| Ok(()), Result::and) | |||||
.collect::<Result<()>>() | |||||
} | } | ||||
} | } |
@@ -3,13 +3,12 @@ extern crate tempfile; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::env; | use std::env; | ||||
use std::path::Path; | |||||
use std::fs::File; | use std::fs::File; | ||||
use std::io::prelude::*; | use std::io::prelude::*; | ||||
use std::path::Path; | |||||
use tempfile::tempdir; | |||||
use site::Site; | use site::Site; | ||||
use tempfile::tempdir; | |||||
#[test] | #[test] | ||||
fn can_parse_site() { | fn can_parse_site() { | ||||
@@ -18,76 +17,102 @@ fn can_parse_site() { | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | let mut site = Site::new(&path, "config.toml").unwrap(); | ||||
site.load().unwrap(); | site.load().unwrap(); | ||||
// Correct number of pages (sections are pages too) | |||||
assert_eq!(site.pages.len(), 15); | |||||
// Correct number of pages (sections do not count as pages) | |||||
assert_eq!(site.library.pages().len(), 22); | |||||
let posts_path = path.join("content").join("posts"); | let posts_path = path.join("content").join("posts"); | ||||
// Make sure we remove all the pwd + content from the sections | |||||
let basic = &site.pages[&posts_path.join("simple.md")]; | |||||
assert_eq!(basic.file.components, vec!["posts".to_string()]); | |||||
// Make sure the page with a url doesn't have any sections | // Make sure the page with a url doesn't have any sections | ||||
let url_post = &site.pages[&posts_path.join("fixed-url.md")]; | |||||
let url_post = site.library.get_page(&posts_path.join("fixed-url.md")).unwrap(); | |||||
assert_eq!(url_post.path, "a-fixed-url/"); | assert_eq!(url_post.path, "a-fixed-url/"); | ||||
// Make sure the article in a folder with only asset doesn't get counted as a section | // Make sure the article in a folder with only asset doesn't get counted as a section | ||||
let asset_folder_post = &site.pages[&posts_path.join("with-assets").join("index.md")]; | |||||
let asset_folder_post = | |||||
site.library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap(); | |||||
assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]); | assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]); | ||||
// That we have the right number of sections | // That we have the right number of sections | ||||
assert_eq!(site.sections.len(), 7); | |||||
assert_eq!(site.library.sections().len(), 11); | |||||
// And that the sections are correct | // And that the sections are correct | ||||
let index_section = &site.sections[&path.join("content").join("_index.md")]; | |||||
assert_eq!(index_section.subsections.len(), 3); | |||||
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap(); | |||||
assert_eq!(index_section.subsections.len(), 4); | |||||
assert_eq!(index_section.pages.len(), 1); | assert_eq!(index_section.pages.len(), 1); | ||||
assert!(index_section.ancestors.is_empty()); | |||||
let posts_section = &site.sections[&posts_path.join("_index.md")]; | |||||
assert_eq!(posts_section.subsections.len(), 1); | |||||
assert_eq!(posts_section.pages.len(), 7); | |||||
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap(); | |||||
assert_eq!(posts_section.subsections.len(), 2); | |||||
assert_eq!(posts_section.pages.len(), 10); | |||||
assert_eq!( | |||||
posts_section.ancestors, | |||||
vec![*site.library.get_section_key(&index_section.file.path).unwrap()] | |||||
); | |||||
let tutorials_section = &site.sections[&posts_path.join("tutorials").join("_index.md")]; | |||||
// Make sure we remove all the pwd + content from the sections | |||||
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap(); | |||||
assert_eq!(basic.file.components, vec!["posts".to_string()]); | |||||
assert_eq!( | |||||
basic.ancestors, | |||||
vec![ | |||||
*site.library.get_section_key(&index_section.file.path).unwrap(), | |||||
*site.library.get_section_key(&posts_section.file.path).unwrap(), | |||||
] | |||||
); | |||||
let tutorials_section = | |||||
site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap(); | |||||
assert_eq!(tutorials_section.subsections.len(), 2); | assert_eq!(tutorials_section.subsections.len(), 2); | ||||
assert_eq!(tutorials_section.subsections[0].clone().meta.title.unwrap(), "Programming"); | |||||
assert_eq!(tutorials_section.subsections[1].clone().meta.title.unwrap(), "DevOps"); | |||||
let sub1 = site.library.get_section_by_key(tutorials_section.subsections[0]); | |||||
let sub2 = site.library.get_section_by_key(tutorials_section.subsections[1]); | |||||
assert_eq!(sub1.clone().meta.title.unwrap(), "Programming"); | |||||
assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps"); | |||||
assert_eq!(tutorials_section.pages.len(), 0); | assert_eq!(tutorials_section.pages.len(), 0); | ||||
let devops_section = &site.sections[&posts_path.join("tutorials").join("devops").join("_index.md")]; | |||||
let devops_section = site | |||||
.library | |||||
.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")) | |||||
.unwrap(); | |||||
assert_eq!(devops_section.subsections.len(), 0); | assert_eq!(devops_section.subsections.len(), 0); | ||||
assert_eq!(devops_section.pages.len(), 2); | assert_eq!(devops_section.pages.len(), 2); | ||||
let prog_section = &site.sections[&posts_path.join("tutorials").join("programming").join("_index.md")]; | |||||
assert_eq!( | |||||
devops_section.ancestors, | |||||
vec![ | |||||
*site.library.get_section_key(&index_section.file.path).unwrap(), | |||||
*site.library.get_section_key(&posts_section.file.path).unwrap(), | |||||
*site.library.get_section_key(&tutorials_section.file.path).unwrap(), | |||||
] | |||||
); | |||||
let prog_section = site | |||||
.library | |||||
.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")) | |||||
.unwrap(); | |||||
assert_eq!(prog_section.subsections.len(), 0); | assert_eq!(prog_section.subsections.len(), 0); | ||||
assert_eq!(prog_section.pages.len(), 2); | assert_eq!(prog_section.pages.len(), 2); | ||||
} | } | ||||
// 2 helper macros to make all the build testing more bearable | // 2 helper macros to make all the build testing more bearable | ||||
macro_rules! file_exists { | macro_rules! file_exists { | ||||
($root: expr, $path: expr) => { | |||||
{ | |||||
let mut path = $root.clone(); | |||||
for component in $path.split("/") { | |||||
path = path.join(component); | |||||
} | |||||
Path::new(&path).exists() | |||||
($root: expr, $path: expr) => {{ | |||||
let mut path = $root.clone(); | |||||
for component in $path.split("/") { | |||||
path = path.join(component); | |||||
} | } | ||||
} | |||||
Path::new(&path).exists() | |||||
}}; | |||||
} | } | ||||
macro_rules! file_contains { | macro_rules! file_contains { | ||||
($root: expr, $path: expr, $text: expr) => { | |||||
{ | |||||
let mut path = $root.clone(); | |||||
for component in $path.split("/") { | |||||
path = path.join(component); | |||||
} | |||||
let mut file = File::open(&path).unwrap(); | |||||
let mut s = String::new(); | |||||
file.read_to_string(&mut s).unwrap(); | |||||
println!("{}", s); | |||||
s.contains($text) | |||||
($root: expr, $path: expr, $text: expr) => {{ | |||||
let mut path = $root.clone(); | |||||
for component in $path.split("/") { | |||||
path = path.join(component); | |||||
} | } | ||||
} | |||||
let mut file = File::open(&path).unwrap(); | |||||
let mut s = String::new(); | |||||
file.read_to_string(&mut s).unwrap(); | |||||
println!("{}", s); | |||||
s.contains($text) | |||||
}}; | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -123,7 +148,14 @@ fn can_build_site_without_live_reload() { | |||||
assert!(file_exists!(public, "posts/tutorials/programming/index.html")); | assert!(file_exists!(public, "posts/tutorials/programming/index.html")); | ||||
// Ensure subsection pages are correctly filled | // Ensure subsection pages are correctly filled | ||||
assert!(file_contains!(public, "posts/tutorials/index.html", "Sub-pages: 2")); | assert!(file_contains!(public, "posts/tutorials/index.html", "Sub-pages: 2")); | ||||
// TODO: add assertion for syntax highlighting | |||||
// Pages and section get their relative path | |||||
assert!(file_contains!(public, "posts/tutorials/index.html", "posts/tutorials/_index.md")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/tutorials/devops/nix/index.html", | |||||
"posts/tutorials/devops/nix.md" | |||||
)); | |||||
// aliases work | // aliases work | ||||
assert!(file_exists!(public, "an-old-url/old-page/index.html")); | assert!(file_exists!(public, "an-old-url/old-page/index.html")); | ||||
@@ -137,8 +169,11 @@ fn can_build_site_without_live_reload() { | |||||
assert!(file_exists!(public, "posts/tutorials/devops/index.html")); | assert!(file_exists!(public, "posts/tutorials/devops/index.html")); | ||||
assert!(file_contains!(public, "posts/tutorials/devops/index.html", "docker")); | assert!(file_contains!(public, "posts/tutorials/devops/index.html", "docker")); | ||||
// No tags or categories | |||||
assert_eq!(file_exists!(public, "categories/index.html"), false); | |||||
// We do have categories | |||||
assert_eq!(file_exists!(public, "categories/index.html"), true); | |||||
assert_eq!(file_exists!(public, "categories/a-category/index.html"), true); | |||||
assert_eq!(file_exists!(public, "categories/a-category/rss.xml"), true); | |||||
// But no tags | |||||
assert_eq!(file_exists!(public, "tags/index.html"), false); | assert_eq!(file_exists!(public, "tags/index.html"), false); | ||||
// Theme files are there | // Theme files are there | ||||
@@ -161,10 +196,26 @@ fn can_build_site_without_live_reload() { | |||||
assert_eq!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"), false); | assert_eq!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"), false); | ||||
// Both pages and sections are in the sitemap | // Both pages and sections are in the sitemap | ||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/simple/</loc>")); | |||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/</loc>")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"sitemap.xml", | |||||
"<loc>https://replace-this-with-your-url.com/posts/simple/</loc>" | |||||
)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"sitemap.xml", | |||||
"<loc>https://replace-this-with-your-url.com/posts/</loc>" | |||||
)); | |||||
// Drafts are not in the sitemap | // Drafts are not in the sitemap | ||||
assert!(!file_contains!(public, "sitemap.xml", "draft")); | assert!(!file_contains!(public, "sitemap.xml", "draft")); | ||||
// robots.txt has been rendered from the template | |||||
assert!(file_contains!(public, "robots.txt", "User-agent: zola")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"robots.txt", | |||||
"Sitemap: https://replace-this-with-your-url.com/sitemap.xml" | |||||
)); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -176,7 +227,7 @@ fn can_build_site_with_live_reload() { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
site.enable_live_reload(); | |||||
site.enable_live_reload(1000); | |||||
site.build().unwrap(); | site.build().unwrap(); | ||||
assert!(Path::new(&public).exists()); | assert!(Path::new(&public).exists()); | ||||
@@ -197,15 +248,22 @@ fn can_build_site_with_live_reload() { | |||||
assert!(file_exists!(public, "posts/tutorials/programming/index.html")); | assert!(file_exists!(public, "posts/tutorials/programming/index.html")); | ||||
// TODO: add assertion for syntax highlighting | // TODO: add assertion for syntax highlighting | ||||
// No tags or categories | |||||
assert_eq!(file_exists!(public, "categories/index.html"), false); | |||||
// We do have categories | |||||
assert_eq!(file_exists!(public, "categories/index.html"), true); | |||||
assert_eq!(file_exists!(public, "categories/a-category/index.html"), true); | |||||
assert_eq!(file_exists!(public, "categories/a-category/rss.xml"), true); | |||||
// But no tags | |||||
assert_eq!(file_exists!(public, "tags/index.html"), false); | assert_eq!(file_exists!(public, "tags/index.html"), false); | ||||
// no live reload code | // no live reload code | ||||
assert!(file_contains!(public, "index.html", "/livereload.js")); | assert!(file_contains!(public, "index.html", "/livereload.js")); | ||||
// the summary anchor link has been created | // the summary anchor link has been created | ||||
assert!(file_contains!(public, "posts/python/index.html", r#"<a name="continue-reading"></a>"#)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/python/index.html", | |||||
r#"<a name="continue-reading"></a>"# | |||||
)); | |||||
assert!(file_contains!(public, "posts/draft/index.html", r#"THEME_SHORTCODE"#)); | assert!(file_contains!(public, "posts/draft/index.html", r#"THEME_SHORTCODE"#)); | ||||
} | } | ||||
@@ -216,10 +274,13 @@ fn can_build_site_with_taxonomies() { | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | let mut site = Site::new(&path, "config.toml").unwrap(); | ||||
site.load().unwrap(); | site.load().unwrap(); | ||||
for (i, page) in site.pages.values_mut().enumerate() { | |||||
for (i, (_, page)) in site.library.pages_mut().iter_mut().enumerate() { | |||||
page.meta.taxonomies = { | page.meta.taxonomies = { | ||||
let mut taxonomies = HashMap::new(); | let mut taxonomies = HashMap::new(); | ||||
taxonomies.insert("categories".to_string(), vec![if i % 2 == 0 { "A" } else { "B" }.to_string()]); | |||||
taxonomies.insert( | |||||
"categories".to_string(), | |||||
vec![if i % 2 == 0 { "A" } else { "B" }.to_string()], | |||||
); | |||||
taxonomies | taxonomies | ||||
}; | }; | ||||
} | } | ||||
@@ -252,15 +313,27 @@ fn can_build_site_with_taxonomies() { | |||||
assert!(file_exists!(public, "categories/a/index.html")); | assert!(file_exists!(public, "categories/a/index.html")); | ||||
assert!(file_exists!(public, "categories/b/index.html")); | assert!(file_exists!(public, "categories/b/index.html")); | ||||
assert!(file_exists!(public, "categories/a/rss.xml")); | assert!(file_exists!(public, "categories/a/rss.xml")); | ||||
assert!(file_contains!(public, "categories/a/rss.xml", "https://replace-this-with-your-url.com/categories/a/rss.xml")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"categories/a/rss.xml", | |||||
"https://replace-this-with-your-url.com/categories/a/rss.xml" | |||||
)); | |||||
// Extending from a theme works | // Extending from a theme works | ||||
assert!(file_contains!(public, "categories/a/index.html", "EXTENDED")); | assert!(file_contains!(public, "categories/a/index.html", "EXTENDED")); | ||||
// Tags aren't | // Tags aren't | ||||
assert_eq!(file_exists!(public, "tags/index.html"), false); | assert_eq!(file_exists!(public, "tags/index.html"), false); | ||||
// Categories are in the sitemap | // Categories are in the sitemap | ||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/categories/</loc>")); | |||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/categories/a/</loc>")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"sitemap.xml", | |||||
"<loc>https://replace-this-with-your-url.com/categories/</loc>" | |||||
)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"sitemap.xml", | |||||
"<loc>https://replace-this-with-your-url.com/categories/a/</loc>" | |||||
)); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -277,7 +350,11 @@ fn can_build_site_and_insert_anchor_links() { | |||||
assert!(Path::new(&public).exists()); | assert!(Path::new(&public).exists()); | ||||
// anchor link inserted | // anchor link inserted | ||||
assert!(file_contains!(public, "posts/something-else/index.html", "<h1 id=\"title\"><a class=\"gutenberg-anchor\" href=\"#title\"")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/something-else/index.html", | |||||
"<h1 id=\"title\"><a class=\"zola-anchor\" href=\"#title\"" | |||||
)); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -286,7 +363,7 @@ fn can_build_site_with_pagination_for_section() { | |||||
path.push("test_site"); | path.push("test_site"); | ||||
let mut site = Site::new(&path, "config.toml").unwrap(); | let mut site = Site::new(&path, "config.toml").unwrap(); | ||||
site.load().unwrap(); | site.load().unwrap(); | ||||
for section in site.sections.values_mut(){ | |||||
for (_, section) in site.library.sections_mut() { | |||||
if section.is_index() { | if section.is_index() { | ||||
continue; | continue; | ||||
} | } | ||||
@@ -321,41 +398,73 @@ fn can_build_site_with_pagination_for_section() { | |||||
"posts/page/1/index.html", | "posts/page/1/index.html", | ||||
"http-equiv=\"refresh\" content=\"0;url=https://replace-this-with-your-url.com/posts/\"" | "http-equiv=\"refresh\" content=\"0;url=https://replace-this-with-your-url.com/posts/\"" | ||||
)); | )); | ||||
assert!(file_contains!(public, "posts/index.html", "Num pagers: 4")); | |||||
assert!(file_contains!(public, "posts/index.html", "Num pagers: 5")); | |||||
assert!(file_contains!(public, "posts/index.html", "Page size: 2")); | assert!(file_contains!(public, "posts/index.html", "Page size: 2")); | ||||
assert!(file_contains!(public, "posts/index.html", "Current index: 1")); | assert!(file_contains!(public, "posts/index.html", "Current index: 1")); | ||||
assert!(!file_contains!(public, "posts/index.html", "has_prev")); | assert!(!file_contains!(public, "posts/index.html", "has_prev")); | ||||
assert!(file_contains!(public, "posts/index.html", "has_next")); | assert!(file_contains!(public, "posts/index.html", "has_next")); | ||||
assert!(file_contains!(public, "posts/index.html", "First: https://replace-this-with-your-url.com/posts/")); | |||||
assert!(file_contains!(public, "posts/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/index.html", | |||||
"First: https://replace-this-with-your-url.com/posts/" | |||||
)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/index.html", | |||||
"Last: https://replace-this-with-your-url.com/posts/page/5/" | |||||
)); | |||||
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false); | assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false); | ||||
assert!(file_exists!(public, "posts/page/2/index.html")); | assert!(file_exists!(public, "posts/page/2/index.html")); | ||||
assert!(file_contains!(public, "posts/page/2/index.html", "Num pagers: 4")); | |||||
assert!(file_contains!(public, "posts/page/2/index.html", "Num pagers: 5")); | |||||
assert!(file_contains!(public, "posts/page/2/index.html", "Page size: 2")); | assert!(file_contains!(public, "posts/page/2/index.html", "Page size: 2")); | ||||
assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2")); | assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2")); | ||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_prev")); | assert!(file_contains!(public, "posts/page/2/index.html", "has_prev")); | ||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_next")); | assert!(file_contains!(public, "posts/page/2/index.html", "has_next")); | ||||
assert!(file_contains!(public, "posts/page/2/index.html", "First: https://replace-this-with-your-url.com/posts/")); | |||||
assert!(file_contains!(public, "posts/page/2/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/page/2/index.html", | |||||
"First: https://replace-this-with-your-url.com/posts/" | |||||
)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/page/2/index.html", | |||||
"Last: https://replace-this-with-your-url.com/posts/page/5/" | |||||
)); | |||||
assert!(file_exists!(public, "posts/page/3/index.html")); | assert!(file_exists!(public, "posts/page/3/index.html")); | ||||
assert!(file_contains!(public, "posts/page/3/index.html", "Num pagers: 4")); | |||||
assert!(file_contains!(public, "posts/page/3/index.html", "Num pagers: 5")); | |||||
assert!(file_contains!(public, "posts/page/3/index.html", "Page size: 2")); | assert!(file_contains!(public, "posts/page/3/index.html", "Page size: 2")); | ||||
assert!(file_contains!(public, "posts/page/3/index.html", "Current index: 3")); | assert!(file_contains!(public, "posts/page/3/index.html", "Current index: 3")); | ||||
assert!(file_contains!(public, "posts/page/3/index.html", "has_prev")); | assert!(file_contains!(public, "posts/page/3/index.html", "has_prev")); | ||||
assert!(file_contains!(public, "posts/page/3/index.html", "has_next")); | assert!(file_contains!(public, "posts/page/3/index.html", "has_next")); | ||||
assert!(file_contains!(public, "posts/page/3/index.html", "First: https://replace-this-with-your-url.com/posts/")); | |||||
assert!(file_contains!(public, "posts/page/3/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/page/3/index.html", | |||||
"First: https://replace-this-with-your-url.com/posts/" | |||||
)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/page/3/index.html", | |||||
"Last: https://replace-this-with-your-url.com/posts/page/5/" | |||||
)); | |||||
assert!(file_exists!(public, "posts/page/4/index.html")); | assert!(file_exists!(public, "posts/page/4/index.html")); | ||||
assert!(file_contains!(public, "posts/page/4/index.html", "Num pagers: 4")); | |||||
assert!(file_contains!(public, "posts/page/4/index.html", "Num pagers: 5")); | |||||
assert!(file_contains!(public, "posts/page/4/index.html", "Page size: 2")); | assert!(file_contains!(public, "posts/page/4/index.html", "Page size: 2")); | ||||
assert!(file_contains!(public, "posts/page/4/index.html", "Current index: 4")); | assert!(file_contains!(public, "posts/page/4/index.html", "Current index: 4")); | ||||
assert!(file_contains!(public, "posts/page/4/index.html", "has_prev")); | assert!(file_contains!(public, "posts/page/4/index.html", "has_prev")); | ||||
assert!(!file_contains!(public, "posts/page/4/index.html", "has_next")); | |||||
assert!(file_contains!(public, "posts/page/4/index.html", "First: https://replace-this-with-your-url.com/posts/")); | |||||
assert!(file_contains!(public, "posts/page/4/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/")); | |||||
assert!(file_contains!(public, "posts/page/4/index.html", "has_next")); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/page/4/index.html", | |||||
"First: https://replace-this-with-your-url.com/posts/" | |||||
)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/page/4/index.html", | |||||
"Last: https://replace-this-with-your-url.com/posts/page/5/" | |||||
)); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -365,7 +474,7 @@ fn can_build_site_with_pagination_for_index() { | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | let mut site = Site::new(&path, "config.toml").unwrap(); | ||||
site.load().unwrap(); | site.load().unwrap(); | ||||
{ | { | ||||
let index = site.sections.get_mut(&path.join("content").join("_index.md")).unwrap(); | |||||
let index = site.library.get_section_mut(&path.join("content").join("_index.md")).unwrap(); | |||||
index.meta.paginate_by = Some(2); | index.meta.paginate_by = Some(2); | ||||
index.meta.template = Some("index_paginated.html".to_string()); | index.meta.template = Some("index_paginated.html".to_string()); | ||||
} | } | ||||
@@ -422,7 +531,6 @@ fn can_build_rss_feed() { | |||||
assert!(file_contains!(public, "rss.xml", "Simple article with shortcodes")); | assert!(file_contains!(public, "rss.xml", "Simple article with shortcodes")); | ||||
} | } | ||||
#[test] | #[test] | ||||
fn can_build_search_index() { | fn can_build_search_index() { | ||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); | let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); | ||||
@@ -453,6 +561,53 @@ fn can_build_with_extra_syntaxes() { | |||||
assert!(&public.exists()); | assert!(&public.exists()); | ||||
assert!(file_exists!(public, "posts/extra-syntax/index.html")); | assert!(file_exists!(public, "posts/extra-syntax/index.html")); | ||||
assert!(file_contains!(public, "posts/extra-syntax/index.html", | |||||
r#"<span style="background-color:#2b303b;color:#d08770;">test</span>"#)); | |||||
assert!(file_contains!( | |||||
public, | |||||
"posts/extra-syntax/index.html", | |||||
r#"<span style="color:#d08770;">test</span>"# | |||||
)); | |||||
} | |||||
#[test] | |||||
fn can_apply_page_templates() { | |||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); | |||||
path.push("test_site"); | |||||
let mut site = Site::new(&path, "config.toml").unwrap(); | |||||
site.load().unwrap(); | |||||
let template_path = path.join("content").join("applying_page_template"); | |||||
let template_section = site.library.get_section(&template_path.join("_index.md")).unwrap(); | |||||
assert_eq!(template_section.subsections.len(), 2); | |||||
assert_eq!(template_section.pages.len(), 2); | |||||
let from_section_config = site.library.get_page_by_key(template_section.pages[0]); | |||||
assert_eq!(from_section_config.meta.template, Some("page_template.html".into())); | |||||
assert_eq!(from_section_config.meta.title, Some("From section config".into())); | |||||
let override_page_template = site.library.get_page_by_key(template_section.pages[1]); | |||||
assert_eq!(override_page_template.meta.template, Some("page_template_override.html".into())); | |||||
assert_eq!(override_page_template.meta.title, Some("Override".into())); | |||||
// It should have applied recursively as well | |||||
let another_section = | |||||
site.library.get_section(&template_path.join("another_section").join("_index.md")).unwrap(); | |||||
assert_eq!(another_section.subsections.len(), 0); | |||||
assert_eq!(another_section.pages.len(), 1); | |||||
let changed_recursively = site.library.get_page_by_key(another_section.pages[0]); | |||||
assert_eq!(changed_recursively.meta.template, Some("page_template.html".into())); | |||||
assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into())); | |||||
// But it should not have override a children page_template | |||||
let yet_another_section = site | |||||
.library | |||||
.get_section(&template_path.join("yet_another_section").join("_index.md")) | |||||
.unwrap(); | |||||
assert_eq!(yet_another_section.subsections.len(), 0); | |||||
assert_eq!(yet_another_section.pages.len(), 1); | |||||
let child = site.library.get_page_by_key(yet_another_section.pages[0]); | |||||
assert_eq!(child.meta.template, Some("page_template_child.html".into())); | |||||
assert_eq!(child.meta.title, Some("Local section override".into())); | |||||
} | } |
@@ -1,16 +0,0 @@ | |||||
[package] | |||||
name = "taxonomies" | |||||
version = "0.1.0" | |||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
[dependencies] | |||||
tera = "0.11" | |||||
slug = "0.1" | |||||
serde = "1" | |||||
serde_derive = "1" | |||||
errors = { path = "../errors" } | |||||
config = { path = "../config" } | |||||
content = { path = "../content" } | |||||
front_matter = { path = "../front_matter" } | |||||
utils = { path = "../utils" } |
@@ -5,13 +5,18 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] | |||||
[dependencies] | [dependencies] | ||||
tera = "0.11" | tera = "0.11" | ||||
base64 = "0.9" | |||||
base64 = "0.10" | |||||
lazy_static = "1" | lazy_static = "1" | ||||
pulldown-cmark = "0" | |||||
pulldown-cmark = "0.2" | |||||
toml = "0.4" | |||||
csv = "1" | |||||
serde_json = "1.0" | |||||
error-chain = "0.12" | |||||
reqwest = "0.9" | |||||
url = "1.5" | |||||
errors = { path = "../errors" } | errors = { path = "../errors" } | ||||
utils = { path = "../utils" } | utils = { path = "../utils" } | ||||
content = { path = "../content" } | |||||
library = { path = "../library" } | |||||
config = { path = "../config" } | config = { path = "../config" } | ||||
taxonomies = { path = "../taxonomies" } | |||||
imageproc = { path = "../imageproc" } | imageproc = { path = "../imageproc" } |
@@ -1 +1 @@ | |||||
<a class="gutenberg-anchor" href="#{{ id }}" aria-label="Anchor link for: {{ id }}">đź”—</a> | |||||
<a class="zola-anchor" href="#{{ id }}" aria-label="Anchor link for: {{ id }}">đź”—</a> |
@@ -3,7 +3,7 @@ | |||||
<title>{{ config.title }}</title> | <title>{{ config.title }}</title> | ||||
<link>{{ config.base_url | safe }}</link> | <link>{{ config.base_url | safe }}</link> | ||||
<description>{{ config.description }}</description> | <description>{{ config.description }}</description> | ||||
<generator>Gutenberg</generator> | |||||
<generator>Zola</generator> | |||||
<language>{{ config.default_language }}</language> | <language>{{ config.default_language }}</language> | ||||
<atom:link href="{{ feed_url | safe }}" rel="self" type="application/rss+xml"/> | <atom:link href="{{ feed_url | safe }}" rel="self" type="application/rss+xml"/> | ||||
<lastBuildDate>{{ last_build_date | date(format="%a, %d %b %Y %H:%M:%S %z") }}</lastBuildDate> | <lastBuildDate>{{ last_build_date | date(format="%a, %d %b %Y %H:%M:%S %z") }}</lastBuildDate> | ||||
@@ -1,9 +1,8 @@ | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use base64::{encode, decode}; | |||||
use base64::{decode, encode}; | |||||
use pulldown_cmark as cmark; | use pulldown_cmark as cmark; | ||||
use tera::{Value, to_value, Result as TeraResult}; | |||||
use tera::{to_value, Result as TeraResult, Value}; | |||||
pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> { | pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> { | ||||
let s = try_get_value!("markdown", "value", String, value); | let s = try_get_value!("markdown", "value", String, value); | ||||
@@ -12,8 +11,12 @@ pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> | |||||
None => false, | None => false, | ||||
}; | }; | ||||
let mut opts = cmark::Options::empty(); | |||||
opts.insert(cmark::Options::ENABLE_TABLES); | |||||
opts.insert(cmark::Options::ENABLE_FOOTNOTES); | |||||
let mut html = String::new(); | let mut html = String::new(); | ||||
let parser = cmark::Parser::new(&s); | |||||
let parser = cmark::Parser::new_ext(&s, opts); | |||||
cmark::html::push_html(&mut html, parser); | cmark::html::push_html(&mut html, parser); | ||||
if inline { | if inline { | ||||
@@ -27,33 +30,23 @@ pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> | |||||
Ok(to_value(&html).unwrap()) | Ok(to_value(&html).unwrap()) | ||||
} | } | ||||
pub fn base64_encode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> { | pub fn base64_encode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> { | ||||
let s = try_get_value!("base64_encode", "value", String, value); | let s = try_get_value!("base64_encode", "value", String, value); | ||||
Ok( | |||||
to_value(&encode(s.as_bytes())).unwrap() | |||||
) | |||||
Ok(to_value(&encode(s.as_bytes())).unwrap()) | |||||
} | } | ||||
pub fn base64_decode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> { | pub fn base64_decode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> { | ||||
let s = try_get_value!("base64_decode", "value", String, value); | let s = try_get_value!("base64_decode", "value", String, value); | ||||
Ok( | |||||
to_value( | |||||
&String::from_utf8( | |||||
decode(s.as_bytes()).unwrap() | |||||
).unwrap() | |||||
).unwrap() | |||||
) | |||||
Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap()) | |||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use tera::to_value; | use tera::to_value; | ||||
use super::{markdown, base64_decode, base64_encode}; | |||||
use super::{base64_decode, base64_encode, markdown}; | |||||
#[test] | #[test] | ||||
fn markdown_filter() { | fn markdown_filter() { | ||||
@@ -66,11 +59,35 @@ mod tests { | |||||
fn markdown_filter_inline() { | fn markdown_filter_inline() { | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("inline".to_string(), to_value(true).unwrap()); | args.insert("inline".to_string(), to_value(true).unwrap()); | ||||
let result = markdown(to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(), args); | |||||
let result = markdown( | |||||
to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(), | |||||
args, | |||||
); | |||||
assert!(result.is_ok()); | assert!(result.is_ok()); | ||||
assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap()); | assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap()); | ||||
} | } | ||||
// https://github.com/Keats/gutenberg/issues/417 | |||||
#[test] | |||||
fn markdown_filter_inline_tables() { | |||||
let mut args = HashMap::new(); | |||||
args.insert("inline".to_string(), to_value(true).unwrap()); | |||||
let result = markdown( | |||||
to_value( | |||||
&r#" | |||||
|id|author_id| timestamp_created|title |content | | |||||
|-:|--------:|-----------------------:|:---------------------|:-----------------| | |||||
| 1| 1|2018-09-05 08:03:43.141Z|How to train your ORM |Badly written blog| | |||||
| 2| 1|2018-08-22 13:11:50.050Z|How to bake a nice pie|Badly written blog| | |||||
"#, | |||||
) | |||||
.unwrap(), | |||||
args, | |||||
); | |||||
assert!(result.is_ok()); | |||||
assert!(result.unwrap().as_str().unwrap().contains("<table>")); | |||||
} | |||||
#[test] | #[test] | ||||
fn base64_encode_filter() { | fn base64_encode_filter() { | ||||
// from https://tools.ietf.org/html/rfc4648#section-10 | // from https://tools.ietf.org/html/rfc4648#section-10 | ||||
@@ -81,7 +98,7 @@ mod tests { | |||||
("foo", "Zm9v"), | ("foo", "Zm9v"), | ||||
("foob", "Zm9vYg=="), | ("foob", "Zm9vYg=="), | ||||
("fooba", "Zm9vYmE="), | ("fooba", "Zm9vYmE="), | ||||
("foobar", "Zm9vYmFy") | |||||
("foobar", "Zm9vYmFy"), | |||||
]; | ]; | ||||
for (input, expected) in tests { | for (input, expected) in tests { | ||||
let args = HashMap::new(); | let args = HashMap::new(); | ||||
@@ -91,7 +108,6 @@ mod tests { | |||||
} | } | ||||
} | } | ||||
#[test] | #[test] | ||||
fn base64_decode_filter() { | fn base64_decode_filter() { | ||||
let tests = vec![ | let tests = vec![ | ||||
@@ -101,7 +117,7 @@ mod tests { | |||||
("Zm9v", "foo"), | ("Zm9v", "foo"), | ||||
("Zm9vYg==", "foob"), | ("Zm9vYg==", "foob"), | ||||
("Zm9vYmE=", "fooba"), | ("Zm9vYmE=", "fooba"), | ||||
("Zm9vYmFy", "foobar") | |||||
("Zm9vYmFy", "foobar"), | |||||
]; | ]; | ||||
for (input, expected) in tests { | for (input, expected) in tests { | ||||
let args = HashMap::new(); | let args = HashMap::new(); | ||||
@@ -0,0 +1,471 @@ | |||||
extern crate serde_json; | |||||
extern crate toml; | |||||
use utils::de::fix_toml_dates; | |||||
use utils::fs::{get_file_time, is_path_in_directory, read_file}; | |||||
use reqwest::{header, Client}; | |||||
use std::collections::hash_map::DefaultHasher; | |||||
use std::fmt; | |||||
use std::hash::{Hash, Hasher}; | |||||
use std::str::FromStr; | |||||
use url::Url; | |||||
use std::path::PathBuf; | |||||
use std::sync::{Arc, Mutex}; | |||||
use csv::Reader; | |||||
use std::collections::HashMap; | |||||
use tera::{from_value, to_value, Error, GlobalFn, Map, Result, Value}; | |||||
static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str = | |||||
"`load_data`: requires EITHER a `path` or `url` argument"; | |||||
enum DataSource { | |||||
Url(Url), | |||||
Path(PathBuf), | |||||
} | |||||
#[derive(Debug)] | |||||
enum OutputFormat { | |||||
Toml, | |||||
Json, | |||||
Csv, | |||||
Plain, | |||||
} | |||||
impl fmt::Display for OutputFormat { | |||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | |||||
fmt::Debug::fmt(self, f) | |||||
} | |||||
} | |||||
impl Hash for OutputFormat { | |||||
fn hash<H: Hasher>(&self, state: &mut H) { | |||||
self.to_string().hash(state); | |||||
} | |||||
} | |||||
impl FromStr for OutputFormat { | |||||
type Err = Error; | |||||
fn from_str(output_format: &str) -> Result<Self> { | |||||
return match output_format { | |||||
"toml" => Ok(OutputFormat::Toml), | |||||
"csv" => Ok(OutputFormat::Csv), | |||||
"json" => Ok(OutputFormat::Json), | |||||
"plain" => Ok(OutputFormat::Plain), | |||||
format => Err(format!("Unknown output format {}", format).into()), | |||||
}; | |||||
} | |||||
} | |||||
impl OutputFormat { | |||||
fn as_accept_header(&self) -> header::HeaderValue { | |||||
return header::HeaderValue::from_static(match self { | |||||
OutputFormat::Json => "application/json", | |||||
OutputFormat::Csv => "text/csv", | |||||
OutputFormat::Toml => "application/toml", | |||||
OutputFormat::Plain => "text/plain", | |||||
}); | |||||
} | |||||
} | |||||
impl DataSource { | |||||
fn from_args( | |||||
path_arg: Option<String>, | |||||
url_arg: Option<String>, | |||||
content_path: &PathBuf, | |||||
) -> Result<Self> { | |||||
if path_arg.is_some() && url_arg.is_some() { | |||||
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into()); | |||||
} | |||||
if let Some(path) = path_arg { | |||||
let full_path = content_path.join(path); | |||||
if !full_path.exists() { | |||||
return Err(format!("{} doesn't exist", full_path.display()).into()); | |||||
} | |||||
return Ok(DataSource::Path(full_path)); | |||||
} | |||||
if let Some(url) = url_arg { | |||||
return Url::parse(&url) | |||||
.map(|parsed_url| DataSource::Url(parsed_url)) | |||||
.map_err(|e| format!("Failed to parse {} as url: {}", url, e).into()); | |||||
} | |||||
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into()); | |||||
} | |||||
fn get_cache_key(&self, format: &OutputFormat) -> u64 { | |||||
let mut hasher = DefaultHasher::new(); | |||||
format.hash(&mut hasher); | |||||
self.hash(&mut hasher); | |||||
return hasher.finish(); | |||||
} | |||||
} | |||||
impl Hash for DataSource { | |||||
fn hash<H: Hasher>(&self, state: &mut H) { | |||||
match self { | |||||
DataSource::Url(url) => url.hash(state), | |||||
DataSource::Path(path) => { | |||||
path.hash(state); | |||||
get_file_time(&path).expect("get file time").hash(state); | |||||
} | |||||
}; | |||||
} | |||||
} | |||||
fn get_data_source_from_args( | |||||
content_path: &PathBuf, | |||||
args: &HashMap<String, Value>, | |||||
) -> Result<DataSource> { | |||||
let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE); | |||||
let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE); | |||||
return DataSource::from_args(path_arg, url_arg, content_path); | |||||
} | |||||
fn read_data_file(base_path: &PathBuf, full_path: PathBuf) -> Result<String> { | |||||
if !is_path_in_directory(&base_path, &full_path) | |||||
.map_err(|e| format!("Failed to read data file {}: {}", full_path.display(), e))? | |||||
{ | |||||
return Err(format!( | |||||
"{} is not inside the base site directory {}", | |||||
full_path.display(), | |||||
base_path.display() | |||||
) | |||||
.into()); | |||||
} | |||||
return read_file(&full_path).map_err(|e| { | |||||
format!("`load_data`: error {} loading file {}", full_path.to_str().unwrap(), e).into() | |||||
}); | |||||
} | |||||
fn get_output_format_from_args( | |||||
args: &HashMap<String, Value>, | |||||
data_source: &DataSource, | |||||
) -> Result<OutputFormat> { | |||||
let format_arg = optional_arg!( | |||||
String, | |||||
args.get("format"), | |||||
"`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml)" | |||||
); | |||||
if let Some(format) = format_arg { | |||||
return OutputFormat::from_str(&format); | |||||
} | |||||
let from_extension = if let DataSource::Path(path) = data_source { | |||||
let extension_result: Result<&str> = | |||||
path.extension().map(|extension| extension.to_str().unwrap()).ok_or( | |||||
format!("Could not determine format for {} from extension", path.display()).into(), | |||||
); | |||||
extension_result? | |||||
} else { | |||||
"plain" | |||||
}; | |||||
return OutputFormat::from_str(from_extension); | |||||
} | |||||
/// A global function to load data from a file or from a URL | |||||
/// Currently the supported formats are json, toml, csv and plain text | |||||
pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn { | |||||
let mut headers = header::HeaderMap::new(); | |||||
headers.insert(header::USER_AGENT, "zola".parse().unwrap()); | |||||
let client = Arc::new(Mutex::new(Client::builder().build().expect("reqwest client build"))); | |||||
let result_cache: Arc<Mutex<HashMap<u64, Value>>> = Arc::new(Mutex::new(HashMap::new())); | |||||
Box::new(move |args| -> Result<Value> { | |||||
let data_source = get_data_source_from_args(&content_path, &args)?; | |||||
let file_format = get_output_format_from_args(&args, &data_source)?; | |||||
let cache_key = data_source.get_cache_key(&file_format); | |||||
let mut cache = result_cache.lock().expect("result cache lock"); | |||||
let response_client = client.lock().expect("response client lock"); | |||||
if let Some(cached_result) = cache.get(&cache_key) { | |||||
return Ok(cached_result.clone()); | |||||
} | |||||
let data = match data_source { | |||||
DataSource::Path(path) => read_data_file(&base_path, path), | |||||
DataSource::Url(url) => { | |||||
let mut response = response_client | |||||
.get(url.as_str()) | |||||
.header(header::ACCEPT, file_format.as_accept_header()) | |||||
.send() | |||||
.and_then(|res| res.error_for_status()) | |||||
.map_err(|e| { | |||||
format!( | |||||
"Failed to request {}: {}", | |||||
url, | |||||
e.status().expect("response status") | |||||
) | |||||
})?; | |||||
response | |||||
.text() | |||||
.map_err(|e| format!("Failed to parse response from {}: {:?}", url, e).into()) | |||||
} | |||||
}?; | |||||
let result_value: Result<Value> = match file_format { | |||||
OutputFormat::Toml => load_toml(data), | |||||
OutputFormat::Csv => load_csv(data), | |||||
OutputFormat::Json => load_json(data), | |||||
OutputFormat::Plain => to_value(data).map_err(|e| e.into()), | |||||
}; | |||||
if let Ok(data_result) = &result_value { | |||||
cache.insert(cache_key, data_result.clone()); | |||||
} | |||||
result_value | |||||
}) | |||||
} | |||||
/// Parse a JSON string and convert it to a Tera Value | |||||
fn load_json(json_data: String) -> Result<Value> { | |||||
let json_content: Value = | |||||
serde_json::from_str(json_data.as_str()).map_err(|e| format!("{:?}", e))?; | |||||
return Ok(json_content); | |||||
} | |||||
/// Parse a TOML string and convert it to a Tera Value | |||||
fn load_toml(toml_data: String) -> Result<Value> { | |||||
let toml_content: toml::Value = toml::from_str(&toml_data).map_err(|e| format!("{:?}", e))?; | |||||
let toml_value = to_value(toml_content).expect("Got invalid JSON that was valid TOML somehow"); | |||||
match toml_value { | |||||
Value::Object(m) => Ok(fix_toml_dates(m)), | |||||
_ => unreachable!("Loaded something other than a TOML object"), | |||||
} | |||||
} | |||||
/// Parse a CSV string and convert it to a Tera Value | |||||
/// | |||||
/// An example csv file `example.csv` could be: | |||||
/// ```csv | |||||
/// Number, Title | |||||
/// 1,Gutenberg | |||||
/// 2,Printing | |||||
/// ``` | |||||
/// The json value output would be: | |||||
/// ```json | |||||
/// { | |||||
/// "headers": ["Number", "Title"], | |||||
/// "records": [ | |||||
/// ["1", "Gutenberg"], | |||||
/// ["2", "Printing"] | |||||
/// ], | |||||
/// } | |||||
/// ``` | |||||
fn load_csv(csv_data: String) -> Result<Value> { | |||||
let mut reader = Reader::from_reader(csv_data.as_bytes()); | |||||
let mut csv_map = Map::new(); | |||||
{ | |||||
let hdrs = reader.headers().map_err(|e| { | |||||
format!("'load_data': {} - unable to read CSV header line (line 1) for CSV file", e) | |||||
})?; | |||||
let headers_array = hdrs.iter().map(|v| Value::String(v.to_string())).collect(); | |||||
csv_map.insert(String::from("headers"), Value::Array(headers_array)); | |||||
} | |||||
{ | |||||
let records = reader.records(); | |||||
let mut records_array: Vec<Value> = Vec::new(); | |||||
for result in records { | |||||
let record = result.unwrap(); | |||||
let mut elements_array: Vec<Value> = Vec::new(); | |||||
for e in record.into_iter() { | |||||
elements_array.push(Value::String(String::from(e))); | |||||
} | |||||
records_array.push(Value::Array(elements_array)); | |||||
} | |||||
csv_map.insert(String::from("records"), Value::Array(records_array)); | |||||
} | |||||
let csv_value: Value = Value::Object(csv_map); | |||||
to_value(csv_value).map_err(|err| err.into()) | |||||
} | |||||
#[cfg(test)] | |||||
mod tests { | |||||
use super::{make_load_data, DataSource, OutputFormat}; | |||||
use std::collections::HashMap; | |||||
use std::path::PathBuf; | |||||
use tera::to_value; | |||||
fn get_test_file(filename: &str) -> PathBuf { | |||||
let test_files = PathBuf::from("../utils/test-files").canonicalize().unwrap(); | |||||
return test_files.join(filename); | |||||
} | |||||
#[test] | |||||
fn fails_when_missing_file() { | |||||
let static_fn = | |||||
make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils")); | |||||
let mut args = HashMap::new(); | |||||
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap()); | |||||
let result = static_fn(args); | |||||
assert!(result.is_err()); | |||||
assert!(result.unwrap_err().description().contains("READMEE.md doesn't exist")); | |||||
} | |||||
#[test] | |||||
fn cant_load_outside_content_dir() { | |||||
let static_fn = | |||||
make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils")); | |||||
let mut args = HashMap::new(); | |||||
args.insert("path".to_string(), to_value("../../../README.md").unwrap()); | |||||
args.insert("format".to_string(), to_value("plain").unwrap()); | |||||
let result = static_fn(args); | |||||
assert!(result.is_err()); | |||||
assert!(result | |||||
.unwrap_err() | |||||
.description() | |||||
.contains("README.md is not inside the base site directory")); | |||||
} | |||||
#[test] | |||||
fn calculates_cache_key_for_path() { | |||||
// We can't test against a fixed value, due to the fact the cache key is built from the absolute path | |||||
let cache_key = | |||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml); | |||||
let cache_key_2 = | |||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml); | |||||
assert_eq!(cache_key, cache_key_2); | |||||
} | |||||
#[test] | |||||
fn calculates_cache_key_for_url() { | |||||
let cache_key = | |||||
DataSource::Url("https://api.github.com/repos/getzola/zola".parse().unwrap()) | |||||
.get_cache_key(&OutputFormat::Plain); | |||||
assert_eq!(cache_key, 8916756616423791754); | |||||
} | |||||
#[test] | |||||
fn different_cache_key_per_filename() { | |||||
let toml_cache_key = | |||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml); | |||||
let json_cache_key = | |||||
DataSource::Path(get_test_file("test.json")).get_cache_key(&OutputFormat::Toml); | |||||
assert_ne!(toml_cache_key, json_cache_key); | |||||
} | |||||
#[test] | |||||
fn different_cache_key_per_format() { | |||||
let toml_cache_key = | |||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml); | |||||
let json_cache_key = | |||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Json); | |||||
assert_ne!(toml_cache_key, json_cache_key); | |||||
} | |||||
#[test] | |||||
fn can_load_remote_data() { | |||||
let static_fn = make_load_data(PathBuf::new(), PathBuf::new()); | |||||
let mut args = HashMap::new(); | |||||
args.insert("url".to_string(), to_value("https://httpbin.org/json").unwrap()); | |||||
args.insert("format".to_string(), to_value("json").unwrap()); | |||||
let result = static_fn(args).unwrap(); | |||||
assert_eq!( | |||||
result.get("slideshow").unwrap().get("title").unwrap(), | |||||
&to_value("Sample Slide Show").unwrap() | |||||
); | |||||
} | |||||
#[test] | |||||
fn fails_when_request_404s() { | |||||
let static_fn = make_load_data(PathBuf::new(), PathBuf::new()); | |||||
let mut args = HashMap::new(); | |||||
args.insert("url".to_string(), to_value("https://httpbin.org/status/404/").unwrap()); | |||||
args.insert("format".to_string(), to_value("json").unwrap()); | |||||
let result = static_fn(args); | |||||
assert!(result.is_err()); | |||||
assert_eq!( | |||||
result.unwrap_err().description(), | |||||
"Failed to request https://httpbin.org/status/404/: 404 Not Found" | |||||
); | |||||
} | |||||
#[test] | |||||
fn can_load_toml() { | |||||
let static_fn = make_load_data( | |||||
PathBuf::from("../utils/test-files"), | |||||
PathBuf::from("../utils/test-files"), | |||||
); | |||||
let mut args = HashMap::new(); | |||||
args.insert("path".to_string(), to_value("test.toml").unwrap()); | |||||
let result = static_fn(args.clone()).unwrap(); | |||||
//TOML does not load in order | |||||
assert_eq!( | |||||
result, | |||||
json!({ | |||||
"category": { | |||||
"date": "1979-05-27T07:32:00Z", | |||||
"key": "value" | |||||
}, | |||||
}) | |||||
); | |||||
} | |||||
#[test] | |||||
fn can_load_csv() { | |||||
let static_fn = make_load_data( | |||||
PathBuf::from("../utils/test-files"), | |||||
PathBuf::from("../utils/test-files"), | |||||
); | |||||
let mut args = HashMap::new(); | |||||
args.insert("path".to_string(), to_value("test.csv").unwrap()); | |||||
let result = static_fn(args.clone()).unwrap(); | |||||
assert_eq!( | |||||
result, | |||||
json!({ | |||||
"headers": ["Number", "Title"], | |||||
"records": [ | |||||
["1", "Gutenberg"], | |||||
["2", "Printing"] | |||||
], | |||||
}) | |||||
) | |||||
} | |||||
#[test] | |||||
fn can_load_json() { | |||||
let static_fn = make_load_data( | |||||
PathBuf::from("../utils/test-files"), | |||||
PathBuf::from("../utils/test-files"), | |||||
); | |||||
let mut args = HashMap::new(); | |||||
args.insert("path".to_string(), to_value("test.json").unwrap()); | |||||
let result = static_fn(args.clone()).unwrap(); | |||||
assert_eq!( | |||||
result, | |||||
json!({ | |||||
"key": "value", | |||||
"array": [1, 2, 3], | |||||
"subpackage": { | |||||
"subkey": 5 | |||||
} | |||||
}) | |||||
) | |||||
} | |||||
} |
@@ -0,0 +1,25 @@ | |||||
#[macro_export] | |||||
macro_rules! required_arg { | |||||
($ty: ty, $e: expr, $err: expr) => { | |||||
match $e { | |||||
Some(v) => match from_value::<$ty>(v.clone()) { | |||||
Ok(u) => u, | |||||
Err(_) => return Err($err.into()), | |||||
}, | |||||
None => return Err($err.into()), | |||||
} | |||||
}; | |||||
} | |||||
#[macro_export] | |||||
macro_rules! optional_arg { | |||||
($ty: ty, $e: expr, $err: expr) => { | |||||
match $e { | |||||
Some(v) => match from_value::<$ty>(v.clone()) { | |||||
Ok(u) => Some(u), | |||||
Err(_) => return Err($err.into()), | |||||
}, | |||||
None => None, | |||||
} | |||||
}; | |||||
} |
@@ -1,40 +1,22 @@ | |||||
extern crate error_chain; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use std::path::PathBuf; | |||||
use std::sync::{Arc, Mutex}; | use std::sync::{Arc, Mutex}; | ||||
use tera::{GlobalFn, Value, from_value, to_value, Result}; | |||||
use tera::{from_value, to_value, GlobalFn, Result, Value}; | |||||
use content::{Page, Section}; | |||||
use config::Config; | use config::Config; | ||||
use library::{Library, Taxonomy}; | |||||
use utils::site::resolve_internal_link; | use utils::site::resolve_internal_link; | ||||
use taxonomies::Taxonomy; | |||||
use imageproc; | use imageproc; | ||||
#[macro_use] | |||||
mod macros; | |||||
macro_rules! required_arg { | |||||
($ty: ty, $e: expr, $err: expr) => { | |||||
match $e { | |||||
Some(v) => match from_value::<$ty>(v.clone()) { | |||||
Ok(u) => u, | |||||
Err(_) => return Err($err.into()) | |||||
}, | |||||
None => return Err($err.into()) | |||||
} | |||||
}; | |||||
} | |||||
macro_rules! optional_arg { | |||||
($ty: ty, $e: expr, $err: expr) => { | |||||
match $e { | |||||
Some(v) => match from_value::<$ty>(v.clone()) { | |||||
Ok(u) => Some(u), | |||||
Err(_) => return Err($err.into()) | |||||
}, | |||||
None => None | |||||
} | |||||
}; | |||||
} | |||||
mod load_data; | |||||
pub use self::load_data::make_load_data; | |||||
pub fn make_trans(config: Config) -> GlobalFn { | pub fn make_trans(config: Config) -> GlobalFn { | ||||
let translations_config = config.translations; | let translations_config = config.translations; | ||||
@@ -42,21 +24,20 @@ pub fn make_trans(config: Config) -> GlobalFn { | |||||
Box::new(move |args| -> Result<Value> { | Box::new(move |args| -> Result<Value> { | ||||
let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument."); | let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument."); | ||||
let lang = optional_arg!( | |||||
String, | |||||
args.get("lang"), | |||||
"`trans`: `lang` must be a string." | |||||
).unwrap_or(default_lang.clone()); | |||||
let lang = optional_arg!(String, args.get("lang"), "`trans`: `lang` must be a string.") | |||||
.unwrap_or_else(|| default_lang.clone()); | |||||
let translations = &translations_config[lang.as_str()]; | let translations = &translations_config[lang.as_str()]; | ||||
Ok(to_value(&translations[key.as_str()]).unwrap()) | Ok(to_value(&translations[key.as_str()]).unwrap()) | ||||
}) | }) | ||||
} | } | ||||
pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn { | |||||
pub fn make_get_page(library: &Library) -> GlobalFn { | |||||
let mut pages = HashMap::new(); | let mut pages = HashMap::new(); | ||||
for page in all_pages.values() { | |||||
pages.insert(page.file.relative.clone(), page.clone()); | |||||
for page in library.pages_values() { | |||||
pages.insert( | |||||
page.file.relative.clone(), | |||||
to_value(library.get_page(&page.file.path).unwrap().to_serialized(library)).unwrap(), | |||||
); | |||||
} | } | ||||
Box::new(move |args| -> Result<Value> { | Box::new(move |args| -> Result<Value> { | ||||
@@ -66,19 +47,27 @@ pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn { | |||||
"`get_page` requires a `path` argument with a string value" | "`get_page` requires a `path` argument with a string value" | ||||
); | ); | ||||
match pages.get(&path) { | match pages.get(&path) { | ||||
Some(p) => Ok(to_value(p).unwrap()), | |||||
None => Err(format!("Page `{}` not found.", path).into()) | |||||
Some(p) => Ok(p.clone()), | |||||
None => Err(format!("Page `{}` not found.", path).into()), | |||||
} | } | ||||
}) | }) | ||||
} | } | ||||
pub fn make_get_section(all_sections: &HashMap<PathBuf, Section>) -> GlobalFn { | |||||
pub fn make_get_section(library: &Library) -> GlobalFn { | |||||
let mut sections = HashMap::new(); | let mut sections = HashMap::new(); | ||||
for section in all_sections.values() { | |||||
if section.file.components == vec!["rebuild".to_string()] { | |||||
//println!("Setting sections:\n{:#?}", section.pages[0]); | |||||
} | |||||
sections.insert(section.file.relative.clone(), section.clone()); | |||||
let mut sections_basic = HashMap::new(); | |||||
for section in library.sections_values() { | |||||
sections.insert( | |||||
section.file.relative.clone(), | |||||
to_value(library.get_section(§ion.file.path).unwrap().to_serialized(library)) | |||||
.unwrap(), | |||||
); | |||||
sections_basic.insert( | |||||
section.file.relative.clone(), | |||||
to_value(library.get_section(§ion.file.path).unwrap().to_serialized_basic(library)) | |||||
.unwrap(), | |||||
); | |||||
} | } | ||||
Box::new(move |args| -> Result<Value> { | Box::new(move |args| -> Result<Value> { | ||||
@@ -87,27 +76,28 @@ pub fn make_get_section(all_sections: &HashMap<PathBuf, Section>) -> GlobalFn { | |||||
args.get("path"), | args.get("path"), | ||||
"`get_section` requires a `path` argument with a string value" | "`get_section` requires a `path` argument with a string value" | ||||
); | ); | ||||
//println!("Found {:#?}", sections.get(&path).unwrap().pages[0]); | |||||
match sections.get(&path) { | |||||
Some(p) => Ok(to_value(p).unwrap()), | |||||
None => Err(format!("Section `{}` not found.", path).into()) | |||||
let metadata_only = args | |||||
.get("metadata_only") | |||||
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false)); | |||||
let container = if metadata_only { §ions_basic } else { §ions }; | |||||
match container.get(&path) { | |||||
Some(p) => Ok(p.clone()), | |||||
None => Err(format!("Section `{}` not found.", path).into()), | |||||
} | } | ||||
}) | }) | ||||
} | } | ||||
pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> GlobalFn { | pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> GlobalFn { | ||||
Box::new(move |args| -> Result<Value> { | Box::new(move |args| -> Result<Value> { | ||||
let cachebust = args | |||||
.get("cachebust") | |||||
.map_or(false, |c| { | |||||
from_value::<bool>(c.clone()).unwrap_or(false) | |||||
}); | |||||
let cachebust = | |||||
args.get("cachebust").map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false)); | |||||
let trailing_slash = args | let trailing_slash = args | ||||
.get("trailing_slash") | .get("trailing_slash") | ||||
.map_or(true, |c| { | |||||
from_value::<bool>(c.clone()).unwrap_or(true) | |||||
}); | |||||
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false)); | |||||
let path = required_arg!( | let path = required_arg!( | ||||
String, | String, | ||||
@@ -117,12 +107,14 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob | |||||
if path.starts_with("./") { | if path.starts_with("./") { | ||||
match resolve_internal_link(&path, &permalinks) { | match resolve_internal_link(&path, &permalinks) { | ||||
Ok(url) => Ok(to_value(url).unwrap()), | Ok(url) => Ok(to_value(url).unwrap()), | ||||
Err(_) => Err(format!("Could not resolve URL for link `{}` not found.", path).into()) | |||||
Err(_) => { | |||||
Err(format!("Could not resolve URL for link `{}` not found.", path).into()) | |||||
} | |||||
} | } | ||||
} else { | } else { | ||||
// anything else | // anything else | ||||
let mut permalink = config.make_permalink(&path); | let mut permalink = config.make_permalink(&path); | ||||
if !trailing_slash && permalink.ends_with("/") { | |||||
if !trailing_slash && permalink.ends_with('/') { | |||||
permalink.pop(); // Removes the slash | permalink.pop(); // Removes the slash | ||||
} | } | ||||
@@ -134,10 +126,11 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob | |||||
}) | }) | ||||
} | } | ||||
pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||||
pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> GlobalFn { | |||||
let mut taxonomies = HashMap::new(); | let mut taxonomies = HashMap::new(); | ||||
for taxonomy in all_taxonomies { | for taxonomy in all_taxonomies { | ||||
taxonomies.insert(taxonomy.kind.name.clone(), taxonomy); | |||||
taxonomies | |||||
.insert(taxonomy.kind.name.clone(), to_value(taxonomy.to_serialized(library)).unwrap()); | |||||
} | } | ||||
Box::new(move |args| -> Result<Value> { | Box::new(move |args| -> Result<Value> { | ||||
@@ -148,19 +141,25 @@ pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||||
); | ); | ||||
let container = match taxonomies.get(&kind) { | let container = match taxonomies.get(&kind) { | ||||
Some(c) => c, | Some(c) => c, | ||||
None => return Err( | |||||
format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into() | |||||
), | |||||
None => { | |||||
return Err( | |||||
format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into() | |||||
) | |||||
} | |||||
}; | }; | ||||
return Ok(to_value(container).unwrap()); | |||||
Ok(to_value(container).unwrap()) | |||||
}) | }) | ||||
} | } | ||||
pub fn make_get_taxonomy_url(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||||
pub fn make_get_taxonomy_url(all_taxonomies: &[Taxonomy]) -> GlobalFn { | |||||
let mut taxonomies = HashMap::new(); | let mut taxonomies = HashMap::new(); | ||||
for taxonomy in all_taxonomies { | for taxonomy in all_taxonomies { | ||||
taxonomies.insert(taxonomy.kind.name.clone(), taxonomy); | |||||
let mut items = HashMap::new(); | |||||
for item in &taxonomy.items { | |||||
items.insert(item.name.clone(), item.permalink.clone()); | |||||
} | |||||
taxonomies.insert(taxonomy.kind.name.clone(), items); | |||||
} | } | ||||
Box::new(move |args| -> Result<Value> { | Box::new(move |args| -> Result<Value> { | ||||
@@ -176,20 +175,20 @@ pub fn make_get_taxonomy_url(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||||
); | ); | ||||
let container = match taxonomies.get(&kind) { | let container = match taxonomies.get(&kind) { | ||||
Some(c) => c, | Some(c) => c, | ||||
None => return Err( | |||||
format!("`get_taxonomy_url` received an unknown taxonomy as kind: {}", kind).into() | |||||
) | |||||
None => { | |||||
return Err(format!( | |||||
"`get_taxonomy_url` received an unknown taxonomy as kind: {}", | |||||
kind | |||||
) | |||||
.into()) | |||||
} | |||||
}; | }; | ||||
for item in &container.items { | |||||
if item.name == name { | |||||
return Ok(to_value(item.permalink.clone()).unwrap()); | |||||
} | |||||
if let Some(ref permalink) = container.get(&name) { | |||||
return Ok(to_value(permalink.clone()).unwrap()); | |||||
} | } | ||||
Err( | |||||
format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into() | |||||
) | |||||
Err(format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into()) | |||||
}) | }) | ||||
} | } | ||||
@@ -213,16 +212,11 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF | |||||
args.get("height"), | args.get("height"), | ||||
"`resize_image`: `height` must be a non-negative integer" | "`resize_image`: `height` must be a non-negative integer" | ||||
); | ); | ||||
let op = optional_arg!( | |||||
String, | |||||
args.get("op"), | |||||
"`resize_image`: `op` must be a string" | |||||
).unwrap_or(DEFAULT_OP.to_string()); | |||||
let quality = optional_arg!( | |||||
u8, | |||||
args.get("quality"), | |||||
"`resize_image`: `quality` must be a number" | |||||
).unwrap_or(DEFAULT_Q); | |||||
let op = optional_arg!(String, args.get("op"), "`resize_image`: `op` must be a string") | |||||
.unwrap_or_else(|| DEFAULT_OP.to_string()); | |||||
let quality = | |||||
optional_arg!(u8, args.get("quality"), "`resize_image`: `quality` must be a number") | |||||
.unwrap_or(DEFAULT_Q); | |||||
if quality == 0 || quality > 100 { | if quality == 0 || quality > 100 { | ||||
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into()); | return Err("`resize_image`: `quality` must be in range 1-100".to_string().into()); | ||||
} | } | ||||
@@ -240,18 +234,16 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF | |||||
}) | }) | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use super::{make_get_url, make_get_taxonomy, make_get_taxonomy_url, make_trans}; | |||||
use super::{make_get_taxonomy, make_get_taxonomy_url, make_get_url, make_trans}; | |||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use tera::to_value; | |||||
use tera::{to_value, Value}; | |||||
use config::{Config, Taxonomy as TaxonomyConfig}; | use config::{Config, Taxonomy as TaxonomyConfig}; | ||||
use taxonomies::{Taxonomy, TaxonomyItem}; | |||||
use library::{Library, Taxonomy, TaxonomyItem}; | |||||
#[test] | #[test] | ||||
fn can_add_cachebust_to_url() { | fn can_add_cachebust_to_url() { | ||||
@@ -260,28 +252,28 @@ mod tests { | |||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("path".to_string(), to_value("app.css").unwrap()); | args.insert("path".to_string(), to_value("app.css").unwrap()); | ||||
args.insert("cachebust".to_string(), to_value(true).unwrap()); | args.insert("cachebust".to_string(), to_value(true).unwrap()); | ||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css/?t=1"); | |||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css?t=1"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_remove_trailing_slashes() { | |||||
fn can_add_trailing_slashes() { | |||||
let config = Config::default(); | let config = Config::default(); | ||||
let static_fn = make_get_url(HashMap::new(), config); | let static_fn = make_get_url(HashMap::new(), config); | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("path".to_string(), to_value("app.css").unwrap()); | args.insert("path".to_string(), to_value("app.css").unwrap()); | ||||
args.insert("trailing_slash".to_string(), to_value(false).unwrap()); | |||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css"); | |||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap()); | |||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css/"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_remove_slashes_and_cachebust() { | |||||
fn can_add_slashes_and_cachebust() { | |||||
let config = Config::default(); | let config = Config::default(); | ||||
let static_fn = make_get_url(HashMap::new(), config); | let static_fn = make_get_url(HashMap::new(), config); | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("path".to_string(), to_value("app.css").unwrap()); | args.insert("path".to_string(), to_value("app.css").unwrap()); | ||||
args.insert("trailing_slash".to_string(), to_value(false).unwrap()); | |||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap()); | |||||
args.insert("cachebust".to_string(), to_value(true).unwrap()); | args.insert("cachebust".to_string(), to_value(true).unwrap()); | ||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css?t=1"); | |||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css/?t=1"); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -290,28 +282,42 @@ mod tests { | |||||
let static_fn = make_get_url(HashMap::new(), config); | let static_fn = make_get_url(HashMap::new(), config); | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("path".to_string(), to_value("app.css").unwrap()); | args.insert("path".to_string(), to_value("app.css").unwrap()); | ||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css/"); | |||||
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css"); | |||||
} | } | ||||
#[test] | #[test] | ||||
fn can_get_taxonomy() { | fn can_get_taxonomy() { | ||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | ||||
let tag = TaxonomyItem::new( | |||||
"Progamming", | |||||
"tags", | |||||
&Config::default(), | |||||
vec![], | |||||
); | |||||
let tags = Taxonomy { | |||||
kind: taxo_config, | |||||
items: vec![tag], | |||||
}; | |||||
let library = Library::new(0, 0); | |||||
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); | |||||
let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; | |||||
let static_fn = make_get_taxonomy(vec![tags.clone()]); | |||||
let taxonomies = vec![tags.clone()]; | |||||
let static_fn = make_get_taxonomy(&taxonomies, &library); | |||||
// can find it correctly | // can find it correctly | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("kind".to_string(), to_value("tags").unwrap()); | args.insert("kind".to_string(), to_value("tags").unwrap()); | ||||
assert_eq!(static_fn(args).unwrap(), to_value(&tags).unwrap()); | |||||
let res = static_fn(args).unwrap(); | |||||
let res_obj = res.as_object().unwrap(); | |||||
assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap()); | |||||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1); | |||||
assert_eq!( | |||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"], | |||||
Value::String("Programming".to_string()) | |||||
); | |||||
assert_eq!( | |||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["slug"], | |||||
Value::String("programming".to_string()) | |||||
); | |||||
assert_eq!( | |||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap() | |||||
["permalink"], | |||||
Value::String("http://a-website.com/tags/programming/".to_string()) | |||||
); | |||||
assert_eq!( | |||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["pages"], | |||||
Value::Array(vec![]) | |||||
); | |||||
// and errors if it can't find it | // and errors if it can't find it | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("kind".to_string(), to_value("something-else").unwrap()); | args.insert("kind".to_string(), to_value("something-else").unwrap()); | ||||
@@ -321,23 +327,20 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_get_taxonomy_url() { | fn can_get_taxonomy_url() { | ||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | ||||
let tag = TaxonomyItem::new( | |||||
"Programming", | |||||
"tags", | |||||
&Config::default(), | |||||
vec![], | |||||
); | |||||
let tags = Taxonomy { | |||||
kind: taxo_config, | |||||
items: vec![tag], | |||||
}; | |||||
let library = Library::new(0, 0); | |||||
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); | |||||
let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; | |||||
let static_fn = make_get_taxonomy_url(vec![tags.clone()]); | |||||
let taxonomies = vec![tags.clone()]; | |||||
let static_fn = make_get_taxonomy_url(&taxonomies); | |||||
// can find it correctly | // can find it correctly | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("kind".to_string(), to_value("tags").unwrap()); | args.insert("kind".to_string(), to_value("tags").unwrap()); | ||||
args.insert("name".to_string(), to_value("Programming").unwrap()); | args.insert("name".to_string(), to_value("Programming").unwrap()); | ||||
assert_eq!(static_fn(args).unwrap(), to_value("http://a-website.com/tags/programming/").unwrap()); | |||||
assert_eq!( | |||||
static_fn(args).unwrap(), | |||||
to_value("http://a-website.com/tags/programming/").unwrap() | |||||
); | |||||
// and errors if it can't find it | // and errors if it can't find it | ||||
let mut args = HashMap::new(); | let mut args = HashMap::new(); | ||||
args.insert("kind".to_string(), to_value("tags").unwrap()); | args.insert("kind".to_string(), to_value("tags").unwrap()); |
@@ -3,24 +3,32 @@ extern crate lazy_static; | |||||
#[macro_use] | #[macro_use] | ||||
extern crate tera; | extern crate tera; | ||||
extern crate base64; | extern crate base64; | ||||
extern crate csv; | |||||
extern crate pulldown_cmark; | extern crate pulldown_cmark; | ||||
extern crate reqwest; | |||||
extern crate url; | |||||
#[cfg(test)] | |||||
#[macro_use] | |||||
extern crate serde_json; | |||||
#[cfg(not(test))] | |||||
extern crate serde_json; | |||||
extern crate errors; | |||||
extern crate utils; | |||||
extern crate content; | |||||
extern crate config; | extern crate config; | ||||
extern crate taxonomies; | |||||
extern crate errors; | |||||
extern crate imageproc; | extern crate imageproc; | ||||
extern crate library; | |||||
extern crate utils; | |||||
pub mod filters; | pub mod filters; | ||||
pub mod global_fns; | pub mod global_fns; | ||||
use tera::{Tera, Context}; | |||||
use tera::{Context, Tera}; | |||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
lazy_static! { | lazy_static! { | ||||
pub static ref GUTENBERG_TERA: Tera = { | |||||
pub static ref ZOLA_TERA: Tera = { | |||||
let mut tera = Tera::default(); | let mut tera = Tera::default(); | ||||
tera.add_raw_templates(vec![ | tera.add_raw_templates(vec![ | ||||
("404.html", include_str!("builtins/404.html")), | ("404.html", include_str!("builtins/404.html")), | ||||
@@ -28,14 +36,13 @@ lazy_static! { | |||||
("sitemap.xml", include_str!("builtins/sitemap.xml")), | ("sitemap.xml", include_str!("builtins/sitemap.xml")), | ||||
("robots.txt", include_str!("builtins/robots.txt")), | ("robots.txt", include_str!("builtins/robots.txt")), | ||||
("anchor-link.html", include_str!("builtins/anchor-link.html")), | ("anchor-link.html", include_str!("builtins/anchor-link.html")), | ||||
("shortcodes/youtube.html", include_str!("builtins/shortcodes/youtube.html")), | ("shortcodes/youtube.html", include_str!("builtins/shortcodes/youtube.html")), | ||||
("shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")), | ("shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")), | ||||
("shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")), | ("shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")), | ||||
("shortcodes/streamable.html", include_str!("builtins/shortcodes/streamable.html")), | ("shortcodes/streamable.html", include_str!("builtins/shortcodes/streamable.html")), | ||||
("internal/alias.html", include_str!("builtins/internal/alias.html")), | ("internal/alias.html", include_str!("builtins/internal/alias.html")), | ||||
]).unwrap(); | |||||
]) | |||||
.unwrap(); | |||||
tera.register_filter("markdown", filters::markdown); | tera.register_filter("markdown", filters::markdown); | ||||
tera.register_filter("base64_encode", filters::base64_encode); | tera.register_filter("base64_encode", filters::base64_encode); | ||||
tera.register_filter("base64_decode", filters::base64_decode); | tera.register_filter("base64_decode", filters::base64_decode); | ||||
@@ -43,12 +50,11 @@ lazy_static! { | |||||
}; | }; | ||||
} | } | ||||
/// Renders the `internal/alias.html` template that will redirect | /// Renders the `internal/alias.html` template that will redirect | ||||
/// via refresh to the url given | /// via refresh to the url given | ||||
pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> { | pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> { | ||||
let mut context = Context::new(); | let mut context = Context::new(); | ||||
context.add("url", &url); | |||||
context.insert("url", &url); | |||||
tera.render("internal/alias.html", &context) | tera.render("internal/alias.html", &context) | ||||
.chain_err(|| format!("Failed to render alias for '{}'", url)) | .chain_err(|| format!("Failed to render alias for '{}'", url)) | ||||
@@ -8,6 +8,8 @@ errors = { path = "../errors" } | |||||
tera = "0.11" | tera = "0.11" | ||||
unicode-segmentation = "1.2" | unicode-segmentation = "1.2" | ||||
walkdir = "2" | walkdir = "2" | ||||
toml = "0.4" | |||||
serde = "1" | |||||
[dev-dependencies] | [dev-dependencies] | ||||
tempfile = "3" | tempfile = "3" |
@@ -0,0 +1,53 @@ | |||||
use serde::{Deserialize, Deserializer}; | |||||
use tera::{Map, Value}; | |||||
use toml; | |||||
/// Used as an attribute when we want to convert from TOML to a string date | |||||
pub fn from_toml_datetime<'de, D>(deserializer: D) -> Result<Option<String>, D::Error> | |||||
where | |||||
D: Deserializer<'de>, | |||||
{ | |||||
toml::value::Datetime::deserialize(deserializer).map(|s| Some(s.to_string())) | |||||
} | |||||
/// Returns key/value for a converted date from TOML. | |||||
/// If the table itself is the TOML struct, only return its value without the key | |||||
fn convert_toml_date(table: Map<String, Value>) -> Value { | |||||
let mut new = Map::new(); | |||||
for (k, v) in table { | |||||
if k == "$__toml_private_datetime" { | |||||
return v; | |||||
} | |||||
match v { | |||||
Value::Object(o) => { | |||||
new.insert(k, convert_toml_date(o)); | |||||
} | |||||
_ => { | |||||
new.insert(k, v); | |||||
} | |||||
} | |||||
} | |||||
Value::Object(new) | |||||
} | |||||
/// TOML datetimes will be serialized as a struct but we want the | |||||
/// stringified version for json, otherwise they are going to be weird | |||||
pub fn fix_toml_dates(table: Map<String, Value>) -> Value { | |||||
let mut new = Map::new(); | |||||
for (key, value) in table { | |||||
match value { | |||||
Value::Object(mut o) => { | |||||
new.insert(key, convert_toml_date(o)); | |||||
} | |||||
_ => { | |||||
new.insert(key, value); | |||||
} | |||||
} | |||||
} | |||||
Value::Object(new) | |||||
} |
@@ -1,22 +1,22 @@ | |||||
<html> | <html> | ||||
<head> | <head> | ||||
<title>Gutenberg</title> | |||||
<title>Zola</title> | |||||
</head> | </head> | ||||
<body> | <body> | ||||
<div class="container"> | <div class="container"> | ||||
<h1>Welcome to Gutenberg!</h1> | |||||
<h1>Welcome to Zola!</h1> | |||||
<p> | <p> | ||||
You're seeing this page because we couldn't find a template to render. | You're seeing this page because we couldn't find a template to render. | ||||
</p> | </p> | ||||
<p> | <p> | ||||
To modify this page, create a <b>{{filename}}</b> file in the templates directory or | To modify this page, create a <b>{{filename}}</b> file in the templates directory or | ||||
<a href="https://www.getgutenberg.io/documentation/themes/installing-and-using-themes/" target="_blank">install a theme</a>. | |||||
<a href="https://www.getzola.org/documentation/themes/installing-and-using-themes/" target="_blank">install a theme</a>. | |||||
<br> | <br> | ||||
You can find what variables are available in this template in the <a href="{{url}}" target="_blank">documentation</a>. | You can find what variables are available in this template in the <a href="{{url}}" target="_blank">documentation</a>. | ||||
</p> | </p> | ||||
</div> | </div> | ||||
<footer> | <footer> | ||||
<a href="https://www.getgutenberg.io/documentation/getting-started/cli-usage/" target="_blank">Get started with Gutenberg</a> | |||||
<a href="https://www.getzola.org/documentation/getting-started/cli-usage/" target="_blank">Get started with Zola</a> | |||||
</footer> | </footer> | ||||
<style> | <style> | ||||
html { | html { | ||||
@@ -1,11 +1,21 @@ | |||||
use std::fs::{copy, create_dir_all, read_dir, File}; | |||||
use std::io::prelude::*; | use std::io::prelude::*; | ||||
use std::fs::{File, create_dir_all, read_dir, copy}; | |||||
use std::path::{Path, PathBuf}; | use std::path::{Path, PathBuf}; | ||||
use std::time::SystemTime; | |||||
use walkdir::WalkDir; | use walkdir::WalkDir; | ||||
use errors::{Result, ResultExt}; | use errors::{Result, ResultExt}; | ||||
pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> { | |||||
let canonical_path = path | |||||
.canonicalize() | |||||
.map_err(|e| format!("Failed to canonicalize {}: {}", path.display(), e))?; | |||||
let canonical_parent = parent | |||||
.canonicalize() | |||||
.map_err(|e| format!("Failed to canonicalize {}: {}", parent.display(), e))?; | |||||
Ok(canonical_path.starts_with(canonical_parent)) | |||||
} | |||||
/// Create a file with the content given | /// Create a file with the content given | ||||
pub fn create_file(path: &Path, content: &str) -> Result<()> { | pub fn create_file(path: &Path, content: &str) -> Result<()> { | ||||
@@ -39,6 +49,11 @@ pub fn read_file(path: &Path) -> Result<String> { | |||||
.chain_err(|| format!("Failed to open '{:?}'", path.display()))? | .chain_err(|| format!("Failed to open '{:?}'", path.display()))? | ||||
.read_to_string(&mut content)?; | .read_to_string(&mut content)?; | ||||
// Remove utf-8 BOM if any. | |||||
if content.starts_with("\u{feff}") { | |||||
content.drain(..3); | |||||
} | |||||
Ok(content) | Ok(content) | ||||
} | } | ||||
@@ -93,9 +108,24 @@ pub fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<()> { | |||||
Ok(()) | Ok(()) | ||||
} | } | ||||
pub fn get_file_time(path: &Path) -> Option<SystemTime> { | |||||
path.metadata().ok().and_then(|meta| { | |||||
Some(match (meta.created().ok(), meta.modified().ok()) { | |||||
(Some(tc), Some(tm)) => tc.max(tm), | |||||
(Some(tc), None) => tc, | |||||
(None, Some(tm)) => tm, | |||||
(None, None) => return None, | |||||
}) | |||||
}) | |||||
} | |||||
/// Compares source and target files' timestamps and returns true if the source file | /// Compares source and target files' timestamps and returns true if the source file | ||||
/// has been created _or_ updated after the target file has | /// has been created _or_ updated after the target file has | ||||
pub fn file_stale<PS, PT>(p_source: PS, p_target: PT) -> bool where PS: AsRef<Path>, PT: AsRef<Path> { | |||||
pub fn file_stale<PS, PT>(p_source: PS, p_target: PT) -> bool | |||||
where | |||||
PS: AsRef<Path>, | |||||
PT: AsRef<Path>, | |||||
{ | |||||
let p_source = p_source.as_ref(); | let p_source = p_source.as_ref(); | ||||
let p_target = p_target.as_ref(); | let p_target = p_target.as_ref(); | ||||
@@ -103,22 +133,12 @@ pub fn file_stale<PS, PT>(p_source: PS, p_target: PT) -> bool where PS: AsRef<Pa | |||||
return true; | return true; | ||||
} | } | ||||
let get_time = |path: &Path| path.metadata().ok().and_then(|meta| { | |||||
Some(match (meta.created().ok(), meta.modified().ok()) { | |||||
(Some(tc), Some(tm)) => tc.max(tm), | |||||
(Some(tc), None) => tc, | |||||
(None, Some(tm)) => tm, | |||||
(None, None) => return None, | |||||
}) | |||||
}); | |||||
let time_source = get_time(p_source); | |||||
let time_target = get_time(p_target); | |||||
let time_source = get_file_time(p_source); | |||||
let time_target = get_file_time(p_target); | |||||
time_source.and_then(|ts| time_target.map(|tt| ts > tt)).unwrap_or(true) | time_source.and_then(|ts| time_target.map(|tt| ts > tt)).unwrap_or(true) | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::fs::File; | use std::fs::File; | ||||
@@ -1,13 +1,16 @@ | |||||
#[macro_use] | #[macro_use] | ||||
extern crate errors; | extern crate errors; | ||||
extern crate serde; | |||||
#[cfg(test)] | #[cfg(test)] | ||||
extern crate tempfile; | extern crate tempfile; | ||||
extern crate tera; | extern crate tera; | ||||
extern crate walkdir; | |||||
extern crate toml; | |||||
extern crate unicode_segmentation; | extern crate unicode_segmentation; | ||||
extern crate walkdir; | |||||
pub mod de; | |||||
pub mod fs; | pub mod fs; | ||||
pub mod net; | |||||
pub mod site; | pub mod site; | ||||
pub mod templates; | pub mod templates; | ||||
pub mod net; |
@@ -1,12 +1,10 @@ | |||||
use std::net::TcpListener; | use std::net::TcpListener; | ||||
pub fn get_available_port() -> Option<u16> { | |||||
(1000..9000) | |||||
.find(|port| port_is_available(*port)) | |||||
pub fn get_available_port(avoid: u16) -> Option<u16> { | |||||
(1000..9000).find(|port| *port != avoid && port_is_available(*port)) | |||||
} | } | ||||
fn port_is_available(port: u16) -> bool { | |||||
pub fn port_is_available(port: u16) -> bool { | |||||
match TcpListener::bind(("127.0.0.1", port)) { | match TcpListener::bind(("127.0.0.1", port)) { | ||||
Ok(_) => true, | Ok(_) => true, | ||||
Err(_) => false, | Err(_) => false, | ||||
@@ -14,7 +14,7 @@ pub fn get_reading_analytics(content: &str) -> (usize, usize) { | |||||
/// Resolves an internal link (of the `./posts/something.md#hey` sort) to its absolute link | /// Resolves an internal link (of the `./posts/something.md#hey` sort) to its absolute link | ||||
pub fn resolve_internal_link(link: &str, permalinks: &HashMap<String, String>) -> Result<String> { | pub fn resolve_internal_link(link: &str, permalinks: &HashMap<String, String>) -> Result<String> { | ||||
// First we remove the ./ since that's gutenberg specific | |||||
// First we remove the ./ since that's zola specific | |||||
let clean_link = link.replacen("./", "", 1); | let clean_link = link.replacen("./", "", 1); | ||||
// Then we remove any potential anchor | // Then we remove any potential anchor | ||||
// parts[0] will be the file path and parts[1] the anchor if present | // parts[0] will be the file path and parts[1] the anchor if present | ||||
@@ -31,12 +31,11 @@ pub fn resolve_internal_link(link: &str, permalinks: &HashMap<String, String>) - | |||||
} | } | ||||
} | } | ||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use std::collections::HashMap; | use std::collections::HashMap; | ||||
use super::{resolve_internal_link, get_reading_analytics}; | |||||
use super::{get_reading_analytics, resolve_internal_link}; | |||||
#[test] | #[test] | ||||
fn can_resolve_valid_internal_link() { | fn can_resolve_valid_internal_link() { | ||||
@@ -1,54 +1,55 @@ | |||||
use tera::{Tera, Context}; | |||||
use std::collections::HashMap; | |||||
use tera::{Context, Tera}; | |||||
use errors::Result; | use errors::Result; | ||||
static DEFAULT_TPL: &str = include_str!("default_tpl.html"); | static DEFAULT_TPL: &str = include_str!("default_tpl.html"); | ||||
macro_rules! render_default_tpl { | macro_rules! render_default_tpl { | ||||
($filename: expr, $url: expr) => { | |||||
{ | |||||
let mut context = Context::new(); | |||||
context.add("filename", $filename); | |||||
context.add("url", $url); | |||||
Tera::one_off(DEFAULT_TPL, &context, true).map_err(|e| e.into()) | |||||
} | |||||
}; | |||||
($filename: expr, $url: expr) => {{ | |||||
let mut context = Context::new(); | |||||
context.insert("filename", $filename); | |||||
context.insert("url", $url); | |||||
Tera::one_off(DEFAULT_TPL, &context, true).map_err(|e| e.into()) | |||||
}}; | |||||
} | } | ||||
/// Renders the given template with the given context, but also ensures that, if the default file | /// Renders the given template with the given context, but also ensures that, if the default file | ||||
/// is not found, it will look up for the equivalent template for the current theme if there is one. | /// is not found, it will look up for the equivalent template for the current theme if there is one. | ||||
/// Lastly, if it's a default template (index, section or page), it will just return an empty string | /// Lastly, if it's a default template (index, section or page), it will just return an empty string | ||||
/// to avoid an error if there isn't a template with that name | /// to avoid an error if there isn't a template with that name | ||||
pub fn render_template(name: &str, tera: &Tera, context: &Context, theme: &Option<String>) -> Result<String> { | |||||
pub fn render_template( | |||||
name: &str, | |||||
tera: &Tera, | |||||
context: &Context, | |||||
theme: &Option<String>, | |||||
) -> Result<String> { | |||||
if tera.templates.contains_key(name) { | if tera.templates.contains_key(name) { | ||||
return tera | |||||
.render(name, context) | |||||
.map_err(|e| e.into()); | |||||
return tera.render(name, context).map_err(|e| e.into()); | |||||
} | } | ||||
if let Some(ref t) = *theme { | if let Some(ref t) = *theme { | ||||
return tera | |||||
.render(&format!("{}/templates/{}", t, name), context) | |||||
.map_err(|e| e.into()); | |||||
return tera.render(&format!("{}/templates/{}", t, name), context).map_err(|e| e.into()); | |||||
} | } | ||||
// maybe it's a default one? | // maybe it's a default one? | ||||
match name { | match name { | ||||
"index.html" | "section.html" => { | |||||
render_default_tpl!(name, "https://www.getgutenberg.io/documentation/templates/pages-sections/#section-variables") | |||||
} | |||||
"page.html" => { | |||||
render_default_tpl!(name, "https://www.getgutenberg.io/documentation/templates/pages-sections/#page-variables") | |||||
} | |||||
"index.html" | "section.html" => render_default_tpl!( | |||||
name, | |||||
"https://www.getzola.org/documentation/templates/pages-sections/#section-variables" | |||||
), | |||||
"page.html" => render_default_tpl!( | |||||
name, | |||||
"https://www.getzola.org/documentation/templates/pages-sections/#page-variables" | |||||
), | |||||
"single.html" | "list.html" => { | "single.html" | "list.html" => { | ||||
render_default_tpl!(name, "https://www.getgutenberg.io/documentation/templates/taxonomies/") | |||||
render_default_tpl!(name, "https://www.getzola.org/documentation/templates/taxonomies/") | |||||
} | } | ||||
_ => bail!("Tried to render `{}` but the template wasn't found", name) | |||||
_ => bail!("Tried to render `{}` but the template wasn't found", name), | |||||
} | } | ||||
} | } | ||||
/// Rewrites the path from extend/macros of the theme used to ensure | /// Rewrites the path from extend/macros of the theme used to ensure | ||||
/// that they will point to the right place (theme/templates/...) | /// that they will point to the right place (theme/templates/...) | ||||
/// Include is NOT supported as it would be a pain to add and using blocks | /// Include is NOT supported as it would be a pain to add and using blocks | ||||
@@ -57,9 +58,12 @@ pub fn render_template(name: &str, tera: &Tera, context: &Context, theme: &Optio | |||||
/// so themes shortcodes can be used. | /// so themes shortcodes can be used. | ||||
pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) { | pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) { | ||||
let mut shortcodes_to_move = vec![]; | let mut shortcodes_to_move = vec![]; | ||||
let mut templates = HashMap::new(); | |||||
let old_templates = ::std::mem::replace(&mut tera.templates, HashMap::new()); | |||||
// We want to match the paths in the templates to the new names | // We want to match the paths in the templates to the new names | ||||
for tpl in tera.templates.values_mut() { | |||||
for (key, mut tpl) in old_templates { | |||||
tpl.name = format!("{}/templates/{}", theme, tpl.name); | |||||
// First the parent if there is none | // First the parent if there is none | ||||
if let Some(ref p) = tpl.parent.clone() { | if let Some(ref p) = tpl.parent.clone() { | ||||
tpl.parent = Some(format!("{}/templates/{}", theme, p)); | tpl.parent = Some(format!("{}/templates/{}", theme, p)); | ||||
@@ -74,11 +78,15 @@ pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) { | |||||
if tpl.name.starts_with(&format!("{}/templates/shortcodes", theme)) { | if tpl.name.starts_with(&format!("{}/templates/shortcodes", theme)) { | ||||
let new_name = tpl.name.replace(&format!("{}/templates/", theme), ""); | let new_name = tpl.name.replace(&format!("{}/templates/", theme), ""); | ||||
shortcodes_to_move.push((tpl.name.clone(), new_name.clone())); | |||||
shortcodes_to_move.push((key, new_name.clone())); | |||||
tpl.name = new_name; | tpl.name = new_name; | ||||
} | } | ||||
templates.insert(tpl.name.clone(), tpl); | |||||
} | } | ||||
tera.templates = templates; | |||||
// and then replace shortcodes in the Tera instance using the new names | // and then replace shortcodes in the Tera instance using the new names | ||||
for (old_name, new_name) in shortcodes_to_move { | for (old_name, new_name) in shortcodes_to_move { | ||||
let tpl = tera.templates.remove(&old_name).unwrap(); | let tpl = tera.templates.remove(&old_name).unwrap(); | ||||
@@ -88,8 +96,8 @@ pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) { | |||||
#[cfg(test)] | #[cfg(test)] | ||||
mod tests { | mod tests { | ||||
use tera::Tera; | |||||
use super::rewrite_theme_paths; | use super::rewrite_theme_paths; | ||||
use tera::Tera; | |||||
#[test] | #[test] | ||||
fn can_rewrite_all_paths_of_theme() { | fn can_rewrite_all_paths_of_theme() { | ||||
@@ -0,0 +1,3 @@ | |||||
Number,Title | |||||
1,Gutenberg | |||||
2,Printing |
@@ -0,0 +1,7 @@ | |||||
{ | |||||
"key": "value", | |||||
"array": [1, 2, 3], | |||||
"subpackage": { | |||||
"subkey": 5 | |||||
} | |||||
} |
@@ -0,0 +1,3 @@ | |||||
[category] | |||||
key = "value" | |||||
date = 1979-05-27T07:32:00Z |
@@ -1,5 +1,5 @@ | |||||
base_url = "https://www.getgutenberg.io/" | |||||
title = "Gutenberg" | |||||
base_url = "https://www.getzola.org/" | |||||
title = "Zola" | |||||
description = "Everything you need to make a static site engine in one binary." | description = "Everything you need to make a static site engine in one binary." | ||||
compile_sass = true | compile_sass = true | ||||
@@ -23,10 +23,10 @@ resize_image(path, width, height, op, quality) | |||||
### Image processing and return value | ### Image processing and return value | ||||
Gutenberg performs image processing during the build process and places the resized images in a subdirectory in the static files directory: | |||||
Zola performs image processing during the build process and places the resized images in a subdirectory in the static files directory: | |||||
``` | ``` | ||||
static/_processed_images/ | |||||
static/processed_images/ | |||||
``` | ``` | ||||
Resized images are JPEGs. Filename of each resized image is a hash of the function arguments, | Resized images are JPEGs. Filename of each resized image is a hash of the function arguments, | ||||
@@ -17,18 +17,18 @@ if the slug already exists for that article. For example: | |||||
``` | ``` | ||||
## Anchor insertion | ## Anchor insertion | ||||
It is possible to have Gutenberg automatically insert anchor links next to the header, as you can see on the site you are currently | |||||
It is possible to have Zola automatically insert anchor links next to the header, as you can see on the site you are currently | |||||
reading if you hover a title. | reading if you hover a title. | ||||
This option is set at the section level: the `insert_anchor_links` variable on the | |||||
This option is set at the section level: the `insert_anchor_links` variable on the | |||||
[Section front-matter page](./documentation/content/section.md#front-matter). | [Section front-matter page](./documentation/content/section.md#front-matter). | ||||
The default template is very basic and will need CSS tweaks in your project to look decent. | |||||
If you want to change the anchor template, it can easily be overwritten by | |||||
The default template is very basic and will need CSS tweaks in your project to look decent. | |||||
If you want to change the anchor template, it can easily be overwritten by | |||||
creating a `anchor-link.html` file in the `templates` directory. | creating a `anchor-link.html` file in the `templates` directory. | ||||
## Internal links | ## Internal links | ||||
Linking to other pages and their headers is so common that Gutenberg adds a | |||||
Linking to other pages and their headers is so common that Zola adds a | |||||
special syntax to Markdown links to handle them: start the link with `./` and point to the `.md` file you want | special syntax to Markdown links to handle them: start the link with `./` and point to the `.md` file you want | ||||
to link to. The path to the file starts from the `content` directory. | to link to. The path to the file starts from the `content` directory. | ||||
@@ -4,7 +4,7 @@ weight = 10 | |||||
+++ | +++ | ||||
Gutenberg uses the folder structure to determine the site structure. | |||||
Zola uses the folder structure to determine the site structure. | |||||
Each folder in the `content` directory represents a [section](./documentation/content/section.md) | Each folder in the `content` directory represents a [section](./documentation/content/section.md) | ||||
that contains [pages](./documentation/content/page.md): your `.md` files. | that contains [pages](./documentation/content/page.md): your `.md` files. | ||||
@@ -40,7 +40,7 @@ While not shown in the example, sections can be nested indefinitely. | |||||
## Assets colocation | ## Assets colocation | ||||
The `content` directory is not limited to markup files though: it's natural to want to co-locate a page and some related | The `content` directory is not limited to markup files though: it's natural to want to co-locate a page and some related | ||||
assets, for instance images or spreadsheets. Gutenberg supports that pattern out of the box for both sections and pages. | |||||
assets, for instance images or spreadsheets. Zola supports that pattern out of the box for both sections and pages. | |||||
Any non-markdown file you add in the page/section folder will be copied alongside the generated page when building the site, | Any non-markdown file you add in the page/section folder will be copied alongside the generated page when building the site, | ||||
which allows us to use a relative path to access them. | which allows us to use a relative path to access them. | ||||
@@ -82,14 +82,14 @@ ignored_content = ["*.xlsx"] | |||||
## Static assets | ## Static assets | ||||
In addition to placing content files in the `content` directory, you may also place content | |||||
In addition to placing content files in the `content` directory, you may also place content | |||||
files in the `static` directory. Any files/folders that you place in the `static` directory | files in the `static` directory. Any files/folders that you place in the `static` directory | ||||
will be copied, without modification, to the public directory. | |||||
will be copied, without modification, to the public directory. | |||||
Typically, you might put site-wide assets (such as the site favicon, site logos or site-wide | Typically, you might put site-wide assets (such as the site favicon, site logos or site-wide | ||||
JavaScript) in the root of the static directory. You can also place any HTML or other files that | JavaScript) in the root of the static directory. You can also place any HTML or other files that | ||||
you wish to be included without modification (that is, without being parsed as Markdown files) | you wish to be included without modification (that is, without being parsed as Markdown files) | ||||
into the static directory. | |||||
into the static directory. | |||||
Note that the static folder provides an _alternative_ to colocation. For example, imagine that you | Note that the static folder provides an _alternative_ to colocation. For example, imagine that you | ||||
had the following directory structure (a simplified version of the structure presented above): | had the following directory structure (a simplified version of the structure presented above): | ||||
@@ -109,12 +109,12 @@ have three options: | |||||
relative path from the `index.md` page. This is the approach described under **colocation**, | relative path from the `index.md` page. This is the approach described under **colocation**, | ||||
above. | above. | ||||
* You could save the image to a `static/blog/configuration` folder and link it in exactly the | * You could save the image to a `static/blog/configuration` folder and link it in exactly the | ||||
same way as if you had colocated it. If you do this, the generated files will be identical to | |||||
if you had colocated; the only difference will be that all static files will be saved in the | |||||
same way as if you had colocated it. If you do this, the generated files will be identical to | |||||
if you had colocated; the only difference will be that all static files will be saved in the | |||||
static folder rather than in the content folder. Depending on your organizational needs, this | static folder rather than in the content folder. Depending on your organizational needs, this | ||||
may be better or worse. | may be better or worse. | ||||
* Or you could save the image to some arbitrary folder within the static folder. For example, | * Or you could save the image to some arbitrary folder within the static folder. For example, | ||||
you could save all images to `static/images`. Using this approach, you would no longer be able | you could save all images to `static/images`. Using this approach, you would no longer be able | ||||
to use relative links, but could use an absolute link to `images/[filename]` to access your | |||||
image. This might be preferable for small sites or for sites that associate images with | |||||
to use relative links, but could use an absolute link to `images/[filename]` to access your | |||||
image. This might be preferable for small sites or for sites that associate images with | |||||
multiple pages (e.g., logo images that appear on every page). | multiple pages (e.g., logo images that appear on every page). |
@@ -16,6 +16,10 @@ create a **page** at `[base_url]/about`). | |||||
If the file is given any name *other* than `index.md` or `_index.md`, then it will | If the file is given any name *other* than `index.md` or `_index.md`, then it will | ||||
create a page with that name (without the `.md`). So naming a file in the root of your | create a page with that name (without the `.md`). So naming a file in the root of your | ||||
content directory `about.md` would also create a page at `[base_url]/about`. | content directory `about.md` would also create a page at `[base_url]/about`. | ||||
Another exception to that rule is that a filename starting with a YYYY-mm-dd date followed by | |||||
an underscore (`_`) or a dash (`-`) will use that date as the page date, unless already set | |||||
in the front-matter. The page name will be anything after `_`/`-` so a filename like `2018-10-10-hello-world.md` will | |||||
be available at `[base_url]/hello-world` | |||||
As you can see, creating an `about.md` file is exactly equivalent to creating an | As you can see, creating an `about.md` file is exactly equivalent to creating an | ||||
`about/index.md` file. The only difference between the two methods is that creating | `about/index.md` file. The only difference between the two methods is that creating | ||||
@@ -24,7 +28,7 @@ the `about` folder allows you to use asset colocation, as discussed in the | |||||
## Front-matter | ## Front-matter | ||||
The front-matter is a set of metadata embedded in a file. In Gutenberg, | |||||
The front-matter is a set of metadata embedded in a file. In Zola, | |||||
it is at the beginning of the file, surrounded by `+++` and uses TOML. | it is at the beginning of the file, surrounded by `+++` and uses TOML. | ||||
While none of the front-matter variables are mandatory, the opening and closing `+++` are required. | While none of the front-matter variables are mandatory, the opening and closing `+++` are required. | ||||
@@ -42,6 +46,7 @@ description = "" | |||||
# Do not wrap dates in quotes, the line below only indicates that there is no default date. | # Do not wrap dates in quotes, the line below only indicates that there is no default date. | ||||
# If the section variable `sort_by` is set to `date`, then any page that lacks a `date` | # If the section variable `sort_by` is set to `date`, then any page that lacks a `date` | ||||
# will not be rendered. | # will not be rendered. | ||||
# Setting this overrides a date set in the filename. | |||||
date = | date = | ||||
# The weight as defined in the Section page | # The weight as defined in the Section page | ||||
@@ -88,7 +93,7 @@ Some content | |||||
## Summary | ## Summary | ||||
You can ask Gutenberg to create a summary if you only want to show the first | |||||
You can ask Zola to create a summary if you only want to show the first | |||||
paragraph of each page in a list for example. | paragraph of each page in a list for example. | ||||
To do so, add <code><!-- more --></code> in your content at the point | To do so, add <code><!-- more --></code> in your content at the point | ||||
@@ -11,9 +11,9 @@ may be of interest: | |||||
* The [official Sass website](http://sass-lang.com/) | * The [official Sass website](http://sass-lang.com/) | ||||
* [Why Sass?](https://alistapart.com/article/why-sass), by Dan Cederholm | * [Why Sass?](https://alistapart.com/article/why-sass), by Dan Cederholm | ||||
## Using Sass in Gutenberg | |||||
## Using Sass in Zola | |||||
Gutenberg processes any files with the `sass` or `scss` extensions in the `sass` | |||||
Zola processes any files with the `sass` or `scss` extensions in the `sass` | |||||
folder, and places the processed output into a `css` file with the same folder | folder, and places the processed output into a `css` file with the same folder | ||||
structure and base name into the `public` folder: | structure and base name into the `public` folder: | ||||
@@ -26,7 +26,7 @@ structure and base name into the `public` folder: | |||||
├── assets | ├── assets | ||||
│ ├── fancy.scss // -> ./public/assets/fancy.css | │ ├── fancy.scss // -> ./public/assets/fancy.css | ||||
│ ├── same_name.scss // -> ./public/assets/same_name.css | │ ├── same_name.scss // -> ./public/assets/same_name.css | ||||
│ ├── same_name.sass # CONFLICT! This has the same base name as the file above, so Gutenberg will return an error. | |||||
│ ├── same_name.sass # CONFLICT! This has the same base name as the file above, so Zola will return an error. | |||||
│ └── _common_mixins.scss # This file won't get put into the `public` folder, but other files can @import it. | │ └── _common_mixins.scss # This file won't get put into the `public` folder, but other files can @import it. | ||||
└── secret-side-project | └── secret-side-project | ||||
└── style.scss // -> ./public/secret-side-project/fancy.css | └── style.scss // -> ./public/secret-side-project/fancy.css | ||||
@@ -38,5 +38,5 @@ folder, but can still be used as `@import` dependencies. For more information, s | |||||
Files with the `scss` extension use ["Sassy CSS" syntax](http://sass-lang.com/documentation/#Formatting), | Files with the `scss` extension use ["Sassy CSS" syntax](http://sass-lang.com/documentation/#Formatting), | ||||
while files with the `sass` extension use the ["indented" syntax](http://sass-lang.com/documentation/file.INDENTED_SYNTAX.html). | while files with the `sass` extension use the ["indented" syntax](http://sass-lang.com/documentation/file.INDENTED_SYNTAX.html). | ||||
Gutenberg will return an error if a `scss` and `sass` file exist with the same | |||||
Zola will return an error if a `scss` and `sass` file exist with the same | |||||
base name in the same folder to avoid confusion -- see the example above. | base name in the same folder to avoid confusion -- see the example above. |
@@ -3,20 +3,20 @@ title = "Search" | |||||
weight = 100 | weight = 100 | ||||
+++ | +++ | ||||
Gutenberg can build a search index from the sections and pages content to | |||||
be used by a JavaScript library: [elasticlunr](http://elasticlunr.com/). | |||||
Zola can build a search index from the sections and pages content to | |||||
be used by a JavaScript library: [elasticlunr](http://elasticlunr.com/). | |||||
To enable it, you only need to set `build_search_index = true` in your `config.toml` and Gutenberg will | |||||
To enable it, you only need to set `build_search_index = true` in your `config.toml` and Zola will | |||||
generate an index for the `default_language` set for all pages not excluded from the search index. | generate an index for the `default_language` set for all pages not excluded from the search index. | ||||
It is very important to set the `default_language` in your `config.toml` if you are writing a site not in | |||||
It is very important to set the `default_language` in your `config.toml` if you are writing a site not in | |||||
English: the index building pipelines are very different depending on the language. | English: the index building pipelines are very different depending on the language. | ||||
After `gutenberg build` or `gutenberg serve`, you should see two files in your static directory: | |||||
After `zola build` or `zola serve`, you should see two files in your static directory: | |||||
- `search_index.${default_language}.js`: so `search_index.en.js` for a default setup | - `search_index.${default_language}.js`: so `search_index.en.js` for a default setup | ||||
- `elasticlunr.min.js` | - `elasticlunr.min.js` | ||||
As each site will be different, Gutenberg makes no assumptions about how your search and doesn't provide | |||||
As each site will be different, Zola makes no assumptions about how your search and doesn't provide | |||||
the JavaScript/CSS code to do an actual search and display results. You can however look at how this very site | the JavaScript/CSS code to do an actual search and display results. You can however look at how this very site | ||||
is implementing it to have an idea: [search.js](https://github.com/Keats/gutenberg/tree/master/docs/static/search.js). | |||||
is implementing it to have an idea: [search.js](https://github.com/getzola/zola/tree/master/docs/static/search.js). |
@@ -3,11 +3,11 @@ title = "Section" | |||||
weight = 20 | weight = 20 | ||||
+++ | +++ | ||||
A section is created whenever a folder (or subfolder) in the `content` section contains an | |||||
`_index.md` file. If a folder does not contain an `_index.md` file, no section will be | |||||
A section is created whenever a folder (or subfolder) in the `content` section contains an | |||||
`_index.md` file. If a folder does not contain an `_index.md` file, no section will be | |||||
created, but markdown files within that folder will still create pages (known as orphan pages). | created, but markdown files within that folder will still create pages (known as orphan pages). | ||||
The index page (i.e., the page displayed when a user browses to your `base_url`) is a section, | |||||
The index page (i.e., the page displayed when a user browses to your `base_url`) is a section, | |||||
which is created whether or not you add an `_index.md` file at the root of your `content` folder. | which is created whether or not you add an `_index.md` file at the root of your `content` folder. | ||||
If you do not create an `_index.md` file in your content directory, this main content section will | If you do not create an `_index.md` file in your content directory, this main content section will | ||||
not have any content or metadata. If you would like to add content or metadata, you can add an | not have any content or metadata. If you would like to add content or metadata, you can add an | ||||
@@ -21,7 +21,7 @@ Any non-Markdown file in the section folder is added to the `assets` collection | |||||
The `_index.md` file within a folder defines the content and metadata for that section. To set | The `_index.md` file within a folder defines the content and metadata for that section. To set | ||||
the metadata, add front matter to the file. | the metadata, add front matter to the file. | ||||
The front-matter is a set of metadata embedded in a file. In Gutenberg, | |||||
The front-matter is a set of metadata embedded in a file. In Zola, | |||||
it is at the beginning of the file, surrounded by `+++` and uses TOML. | it is at the beginning of the file, surrounded by `+++` and uses TOML. | ||||
After the closing `+++`, you can add content that will be parsed as markdown and will be available | After the closing `+++`, you can add content that will be parsed as markdown and will be available | ||||
@@ -49,6 +49,13 @@ weight = 0 | |||||
# Template to use to render this section page | # Template to use to render this section page | ||||
template = "section.html" | template = "section.html" | ||||
# Apply the given template to ALL pages below the section, recursively. | |||||
# If you have several nested sections each with a page_template set, the page | |||||
# will always use the closest to itself. | |||||
# However, a page own `template` variable will always have priority. | |||||
# Not set by default | |||||
page_template = | |||||
# How many pages to be displayed per paginated page. | # How many pages to be displayed per paginated page. | ||||
# No pagination will happen if this isn't set or if the value is 0 | # No pagination will happen if this isn't set or if the value is 0 | ||||
paginate_by = 0 | paginate_by = 0 | ||||
@@ -71,11 +78,17 @@ in_search_index = true | |||||
# to be used directly | # to be used directly | ||||
render = true | render = true | ||||
# Whether to redirect when landing on that section. Defaults to `None`. | |||||
# Whether to redirect when landing on that section. Defaults to not being set. | |||||
# Useful for the same reason as `render` but when you don't want a 404 when | # Useful for the same reason as `render` but when you don't want a 404 when | ||||
# landing on the root section page | |||||
# landing on the root section page. | |||||
# Example: redirect_to = "documentation/content/overview" | |||||
redirect_to = "" | redirect_to = "" | ||||
# Whether the section should pass its pages on to the parent section. Defaults to `false`. | |||||
# Useful when the section shouldn't split up the parent section, like | |||||
# sections for each year under a posts section. | |||||
transparent = false | |||||
# Your own data | # Your own data | ||||
[extra] | [extra] | ||||
+++ | +++ | ||||
@@ -95,10 +108,10 @@ You can also change the pagination path (the word displayed while paginated in t | |||||
by setting the `paginate_path` variable, which defaults to `page`. | by setting the `paginate_path` variable, which defaults to `page`. | ||||
## Sorting | ## Sorting | ||||
It is very common for Gutenberg templates to iterate over pages or sections | |||||
to display all pages/sections a given directory. Consider a very simple | |||||
It is very common for Zola templates to iterate over pages or sections | |||||
to display all pages/sections a given directory. Consider a very simple | |||||
example: a `blog` directory with three files: `blog/Post_1.md`, | example: a `blog` directory with three files: `blog/Post_1.md`, | ||||
`blog/Post_2.md`, and `blog/Post_3.md`. To iterate over these posts and | |||||
`blog/Post_2.md`, and `blog/Post_3.md`. To iterate over these posts and | |||||
create a list of links to the posts, a simple template might look like this: | create a list of links to the posts, a simple template might look like this: | ||||
```j2 | ```j2 | ||||
@@ -107,15 +120,15 @@ create a list of links to the posts, a simple template might look like this: | |||||
{% endfor %} | {% endfor %} | ||||
``` | ``` | ||||
This would iterate over the posts, and would do so in a specific order | |||||
based on the `sort_by` variable set in the `_index.md` page for the | |||||
This would iterate over the posts, and would do so in a specific order | |||||
based on the `sort_by` variable set in the `_index.md` page for the | |||||
containing section. The `sort_by` variable can be given three values: `date`, | containing section. The `sort_by` variable can be given three values: `date`, | ||||
`weight`, and `none`. If no `sort_by` method is set, the pages will be | `weight`, and `none`. If no `sort_by` method is set, the pages will be | ||||
sorted in the `none` order, which is not intended to be used for sorted content. | sorted in the `none` order, which is not intended to be used for sorted content. | ||||
Any page that is missing the data it needs to be sorted will be ignored and | Any page that is missing the data it needs to be sorted will be ignored and | ||||
won't be rendered. For example, if a page is missing the date variable the | |||||
containing section sets `sort_by = "date"`, then that page will be ignored. | |||||
won't be rendered. For example, if a page is missing the date variable the | |||||
containing section sets `sort_by = "date"`, then that page will be ignored. | |||||
The terminal will warn you if this is happening. | The terminal will warn you if this is happening. | ||||
If several pages have the same date/weight/order, their permalink will be used | If several pages have the same date/weight/order, their permalink will be used | ||||
@@ -127,18 +140,18 @@ The `sort_by` front-matter variable can have the following values: | |||||
### `date` | ### `date` | ||||
This will sort all pages by their `date` field, from the most recent (at the | This will sort all pages by their `date` field, from the most recent (at the | ||||
top of the list) to the oldest (at the bottom of the list). Each page will | top of the list) to the oldest (at the bottom of the list). Each page will | ||||
get `page.earlier` and `page.later` variables that contain the pages with | |||||
get `page.earlier` and `page.later` variables that contain the pages with | |||||
earlier and later dates, respectively. | earlier and later dates, respectively. | ||||
### `weight` | ### `weight` | ||||
This will be sort all pages by their `weight` field, from lightest weight | |||||
(at the top of the list) to heaviest (at the bottom of the list). Each | |||||
page gets `page.lighter` and `page.heavier` variables that contain the | |||||
This will be sort all pages by their `weight` field, from lightest weight | |||||
(at the top of the list) to heaviest (at the bottom of the list). Each | |||||
page gets `page.lighter` and `page.heavier` variables that contain the | |||||
pages with lighter and heavier weights, respectively. | pages with lighter and heavier weights, respectively. | ||||
When iterating through pages, you may wish to use the Tera `reverse` filter, | |||||
When iterating through pages, you may wish to use the Tera `reverse` filter, | |||||
which reverses the order of the pages. Thus, after using the `reverse` filter, | which reverses the order of the pages. Thus, after using the `reverse` filter, | ||||
pages sorted by weight will be sorted from lightest (at the top) to heaviest | |||||
pages sorted by weight will be sorted from lightest (at the top) to heaviest | |||||
(at the bottom); pages sorted by date will be sorted from oldest (at the top) | (at the bottom); pages sorted by date will be sorted from oldest (at the top) | ||||
to newest (at the bottom). | to newest (at the bottom). | ||||
@@ -153,8 +166,8 @@ the top of the list and the heaviest (highest `weight`) will be at the top; | |||||
the `reverse` filter reverses this order. | the `reverse` filter reverses this order. | ||||
**Note**: Unlike pages, permalinks will **not** be used to break ties between | **Note**: Unlike pages, permalinks will **not** be used to break ties between | ||||
equally weighted sections. Thus, if the `weight` variable for your section is not set (or if it | |||||
is set in a way that produces ties), then your sections will be sorted in | |||||
equally weighted sections. Thus, if the `weight` variable for your section is not set (or if it | |||||
is set in a way that produces ties), then your sections will be sorted in | |||||
**random** order. Moreover, that order is determined at build time and will | **random** order. Moreover, that order is determined at build time and will | ||||
change with each site rebuild. Thus, if there is any chance that you will | |||||
change with each site rebuild. Thus, if there is any chance that you will | |||||
iterate over your sections, you should always assign them weight. | iterate over your sections, you should always assign them weight. |
@@ -6,9 +6,10 @@ weight = 40 | |||||
While Markdown is good at writing, it isn't great when you need write inline | While Markdown is good at writing, it isn't great when you need write inline | ||||
HTML to add some styling for example. | HTML to add some styling for example. | ||||
To solve this, Gutenberg borrows the concept of [shortcodes](https://codex.wordpress.org/Shortcode_API) | |||||
To solve this, Zola borrows the concept of [shortcodes](https://codex.wordpress.org/Shortcode_API) | |||||
from WordPress. | from WordPress. | ||||
In our case, the shortcode corresponds to a template that is defined in the `templates/shortcodes` directory or a built-in one. | |||||
In our case, the shortcode corresponds to a template that is defined in the `templates/shortcodes` directory or a built-in one that can | |||||
be used in a Markdown file. If you want to use something similar to shortcodes in your templates, try [Tera macros](https://tera.netlify.com/docs/templates/#macros). | |||||
## Writing a shortcode | ## Writing a shortcode | ||||
Let's write a shortcode to embed YouTube videos as an example. | Let's write a shortcode to embed YouTube videos as an example. | ||||
@@ -30,7 +31,7 @@ This template is very straightforward: an iframe pointing to the YouTube embed U | |||||
In terms of input, it expects at least one variable: `id`. Since the other variables | In terms of input, it expects at least one variable: `id`. Since the other variables | ||||
are in a `if` statement, we can assume they are optional. | are in a `if` statement, we can assume they are optional. | ||||
That's it, Gutenberg will now recognise this template as a shortcode named `youtube` (the filename minus the `.html` extension). | |||||
That's it, Zola will now recognise this template as a shortcode named `youtube` (the filename minus the `.html` extension). | |||||
The markdown renderer will wrap an inline HTML node like `<a>` or `<span>` into a paragraph. If you want to disable that, | The markdown renderer will wrap an inline HTML node like `<a>` or `<span>` into a paragraph. If you want to disable that, | ||||
simply wrap your shortcode in a `div`. | simply wrap your shortcode in a `div`. | ||||
@@ -77,7 +78,7 @@ Here is a YouTube video: | |||||
An inline {{/* youtube(id="dQw4w9WgXcQ", autoplay=true, class="youtube") */}} shortcode | An inline {{/* youtube(id="dQw4w9WgXcQ", autoplay=true, class="youtube") */}} shortcode | ||||
``` | ``` | ||||
Note that if you want to have some content that looks like a shortcode but not have Gutenberg try to render it, | |||||
Note that if you want to have some content that looks like a shortcode but not have Zola try to render it, | |||||
you will need to escape it by using `{{/*` and `*/}}` instead of `{{` and `}}`. | you will need to escape it by using `{{/*` and `*/}}` instead of `{{` and `}}`. | ||||
### Shortcodes with body | ### Shortcodes with body | ||||
@@ -103,14 +104,14 @@ A quote | |||||
The body of the shortcode will be automatically passed down to the rendering context as the `body` variable and needs | The body of the shortcode will be automatically passed down to the rendering context as the `body` variable and needs | ||||
to be in a newline. | to be in a newline. | ||||
If you want to have some content that looks like a shortcode but not have Gutenberg try to render it, | |||||
If you want to have some content that looks like a shortcode but not have Zola try to render it, | |||||
you will need to escape it by using `{%/*` and `*/%}` instead of `{%` and `%}`. You won't need to escape | you will need to escape it by using `{%/*` and `*/%}` instead of `{%` and `%}`. You won't need to escape | ||||
anything else until the closing tag. | anything else until the closing tag. | ||||
## Built-in shortcodes | ## Built-in shortcodes | ||||
Gutenberg comes with a few built-in shortcodes. If you want to override a default shortcode template, | |||||
simply place a `{shortcode_name}.html` file in the `templates/shortcodes` directory and Gutenberg will | |||||
Zola comes with a few built-in shortcodes. If you want to override a default shortcode template, | |||||
simply place a `{shortcode_name}.html` file in the `templates/shortcodes` directory and Zola will | |||||
use that instead. | use that instead. | ||||
### YouTube | ### YouTube | ||||