Browse Source

Permalinks and sitemap

index-subcmd
Vincent Prouillet 7 years ago
parent
commit
434a7fcde8
4 changed files with 125 additions and 49 deletions
  1. +15
    -0
      src/config.rs
  2. +81
    -46
      src/page.rs
  3. +19
    -3
      src/site.rs
  4. +10
    -0
      src/templates/sitemap.xml

+ 15
- 0
src/config.rs View File

@@ -61,6 +61,21 @@ impl Config {
} }
} }


impl Default for Config {
/// Exists for testing purposes
fn default() -> Config {
Config {
title: "".to_string(),
base_url: "http://a-website.com/".to_string(),
highlight_code: Some(true),
description: None,
language_code: Some("en".to_string()),
disable_rss: Some(false),
extra: None,
}
}
}



/// Get and parse the config. /// Get and parse the config.
/// If it doesn't succeed, exit /// If it doesn't succeed, exit


+ 81
- 46
src/page.rs View File

@@ -42,6 +42,15 @@ pub struct Page {
pub content: String, pub content: String,
/// The front matter meta-data /// The front matter meta-data
pub meta: FrontMatter, pub meta: FrontMatter,

/// The slug of that page.
/// First tries to find the slug in the meta and defaults to filename otherwise
pub slug: String,
/// The relative URL of the page
pub url: String,
/// The full URL for that page
pub permalink: String,

/// The previous page, by date /// The previous page, by date
pub previous: Option<Box<Page>>, pub previous: Option<Box<Page>>,
/// The next page, by date /// The next page, by date
@@ -57,35 +66,15 @@ impl Page {
sections: vec![], sections: vec![],
raw_content: "".to_string(), raw_content: "".to_string(),
content: "".to_string(), content: "".to_string(),
slug: "".to_string(),
url: "".to_string(),
permalink: "".to_string(),
meta: meta, meta: meta,
previous: None, previous: None,
next: None, next: None,
} }
} }


/// Get the slug for the page.
/// First tries to find the slug in the meta and defaults to filename otherwise
pub fn get_slug(&self) -> String {
if let Some(ref slug) = self.meta.slug {
slug.to_string()
} else {
slugify(self.filename.clone())
}
}

/// Get the URL (without the base URL) to that page
pub fn get_url(&self) -> String {
if let Some(ref u) = self.meta.url {
return u.to_string();
}

if !self.sections.is_empty() {
return format!("/{}/{}", self.sections.join("/"), self.get_slug());
}

format!("/{}", self.get_slug())
}

// Get word count and estimated reading time // Get word count and estimated reading time
pub fn get_reading_analytics(&self) -> (usize, usize) { pub fn get_reading_analytics(&self) -> (usize, usize) {
// Only works for latin language but good enough for a start // Only works for latin language but good enough for a start
@@ -99,7 +88,7 @@ impl Page {
// Parse a page given the content of the .md file // Parse a page given the content of the .md file
// Files without front matter or with invalid front matter are considered // Files without front matter or with invalid front matter are considered
// erroneous // erroneous
pub fn parse(filepath: &str, content: &str) -> Result<Page> {
pub fn parse(filepath: &str, content: &str, config: &Config) -> Result<Page> {
// 1. separate front matter from content // 1. separate front matter from content
if !PAGE_RE.is_match(content) { if !PAGE_RE.is_match(content) {
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", filepath); bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", filepath);
@@ -124,23 +113,43 @@ impl Page {
cmark::html::push_html(&mut html, parser); cmark::html::push_html(&mut html, parser);
html html
}; };
let path = Path::new(filepath);
page.filename = path.file_stem().expect("Couldn't get filename").to_string_lossy().to_string();
page.slug = {
if let Some(ref slug) = page.meta.slug {
slug.to_string()
} else {
slugify(page.filename.clone())
}
};



// 4. Find sections // 4. Find sections
// Pages with custom urls exists outside of sections // Pages with custom urls exists outside of sections
if page.meta.url.is_none() {
let path = Path::new(filepath);
page.filename = path.file_stem().expect("Couldn't get filename").to_string_lossy().to_string();

if let Some(ref u) = page.meta.url {
page.url = u.to_string();
} else {
// find out if we have sections // find out if we have sections
for section in path.parent().unwrap().components() { for section in path.parent().unwrap().components() {
page.sections.push(section.as_ref().to_string_lossy().to_string()); page.sections.push(section.as_ref().to_string_lossy().to_string());
} }

if !page.sections.is_empty() {
page.url = format!("{}/{}", page.sections.join("/"), page.slug);
} else {
page.url = format!("{}", page.slug);
}
} }
page.permalink = if config.base_url.ends_with("/") {
format!("{}{}", config.base_url, page.url)
} else {
format!("{}/{}", config.base_url, page.url)
};


Ok(page) Ok(page)
} }


pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Page> {
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config) -> Result<Page> {
let path = path.as_ref(); let path = path.as_ref();


let mut content = String::new(); let mut content = String::new();
@@ -150,7 +159,7 @@ impl Page {


// Remove the content string from name // Remove the content string from name
// Maybe get a path as an arg instead and use strip_prefix? // Maybe get a path as an arg instead and use strip_prefix?
Page::parse(&path.strip_prefix("content").unwrap().to_string_lossy(), &content)
Page::parse(&path.strip_prefix("content").unwrap().to_string_lossy(), &content, config)
} }


fn get_layout_name(&self) -> String { fn get_layout_name(&self) -> String {
@@ -174,13 +183,14 @@ impl Page {


impl ser::Serialize for Page { impl ser::Serialize for Page {
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer { fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
let mut state = serializer.serialize_struct("page", 12)?;
let mut state = serializer.serialize_struct("page", 13)?;
state.serialize_field("content", &self.content)?; state.serialize_field("content", &self.content)?;
state.serialize_field("title", &self.meta.title)?; state.serialize_field("title", &self.meta.title)?;
state.serialize_field("description", &self.meta.description)?; state.serialize_field("description", &self.meta.description)?;
state.serialize_field("date", &self.meta.date)?; state.serialize_field("date", &self.meta.date)?;
state.serialize_field("slug", &self.get_slug())?;
state.serialize_field("url", &self.get_url())?;
state.serialize_field("slug", &self.slug)?;
state.serialize_field("url", &format!("/{}", self.url))?;
state.serialize_field("permalink", &self.permalink)?;
state.serialize_field("tags", &self.meta.tags)?; state.serialize_field("tags", &self.meta.tags)?;
state.serialize_field("draft", &self.meta.draft)?; state.serialize_field("draft", &self.meta.draft)?;
state.serialize_field("category", &self.meta.category)?; state.serialize_field("category", &self.meta.category)?;
@@ -222,6 +232,7 @@ impl PartialOrd for Page {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{Page}; use super::{Page};
use config::Config;




#[test] #[test]
@@ -233,7 +244,7 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("post.md", content);
let res = Page::parse("post.md", content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();


@@ -252,7 +263,7 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("posts/intro.md", content);
let res = Page::parse("posts/intro.md", content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.sections, vec!["posts".to_string()]); assert_eq!(page.sections, vec!["posts".to_string()]);
@@ -267,7 +278,7 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("posts/intro/start.md", content);
let res = Page::parse("posts/intro/start.md", content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.sections, vec!["posts".to_string(), "intro".to_string()]); assert_eq!(page.sections, vec!["posts".to_string(), "intro".to_string()]);
@@ -282,14 +293,36 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("posts/intro/start.md", content);
let mut conf = Config::default();
conf.base_url = "http://hello.com/".to_string();
let res = Page::parse("posts/intro/start.md", content, &conf);
assert!(res.is_ok());
let page = res.unwrap();
assert_eq!(page.url, "posts/intro/hello-world");
assert_eq!(page.permalink, "http://hello.com/posts/intro/hello-world");
}

#[test]
fn test_can_make_permalink_with_non_trailing_slash_base_url() {
let content = r#"
+++
title = "Hello"
description = "hey there"
slug = "hello-world"
+++
Hello world"#;
let mut conf = Config::default();
conf.base_url = "http://hello.com".to_string();
let res = Page::parse("posts/intro/start.md", content, &conf);
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.get_url(), "/posts/intro/hello-world");
assert_eq!(page.url, "posts/intro/hello-world");
println!("{}", page.permalink);
assert_eq!(page.permalink, format!("{}{}", conf.base_url, "/posts/intro/hello-world"));
} }


#[test] #[test]
fn test_can_make_url_from_sections_and_slug_root() {
fn test_can_make_url_from_slug_only() {
let content = r#" let content = r#"
+++ +++
title = "Hello" title = "Hello"
@@ -297,10 +330,11 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("start.md", content);
let res = Page::parse("start.md", content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.get_url(), "/hello-world");
assert_eq!(page.url, "hello-world");
assert_eq!(page.permalink, format!("{}{}", Config::default().base_url, "hello-world"));
} }


#[test] #[test]
@@ -311,7 +345,7 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("start.md", content);
let res = Page::parse("start.md", content, &Config::default());
assert!(res.is_err()); assert!(res.is_err());
} }


@@ -323,10 +357,11 @@ title = "Hello"
description = "hey there" description = "hey there"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("file with space.md", content);
let res = Page::parse("file with space.md", content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.get_slug(), "file-with-space");
assert_eq!(page.slug, "file-with-space");
assert_eq!(page.permalink, format!("{}{}", Config::default().base_url, "file-with-space"));
} }


#[test] #[test]
@@ -337,7 +372,7 @@ title = "Hello"
description = "hey there" description = "hey there"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse("file with space.md", content);
let res = Page::parse("file with space.md", content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
let (word_count, reading_time) = page.get_reading_analytics(); let (word_count, reading_time) = page.get_reading_analytics();
@@ -356,7 +391,7 @@ Hello world"#.to_string();
for _ in 0..1000 { for _ in 0..1000 {
content.push_str(" Hello world"); content.push_str(" Hello world");
} }
let res = Page::parse("hello.md", &content);
let res = Page::parse("hello.md", &content, &Config::default());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
let (word_count, reading_time) = page.get_reading_analytics(); let (word_count, reading_time) = page.get_reading_analytics();


+ 19
- 3
src/site.rs View File

@@ -41,13 +41,13 @@ impl Site {
// First step: do all the articles and group article by sections // First step: do all the articles and group article by sections
// hardcoded pattern so can't error // hardcoded pattern so can't error
for entry in glob("content/**/*.md").unwrap().filter_map(|e| e.ok()) { for entry in glob("content/**/*.md").unwrap().filter_map(|e| e.ok()) {
let page = Page::from_file(&entry.as_path())?;
let page = Page::from_file(&entry.as_path(), &self.config)?;


for section in &page.sections { for section in &page.sections {
self.sections.entry(section.clone()).or_insert(vec![]).push(page.get_slug());
self.sections.entry(section.clone()).or_insert(vec![]).push(page.slug.clone());
} }


self.pages.insert(page.get_slug(), page);
self.pages.insert(page.slug.clone(), page);
} }


Ok(()) Ok(())
@@ -65,6 +65,8 @@ impl Site {
html html
} }


/// Re-parse and re-generate the site
/// Very dumb for now, ideally it would only rebuild what changed
pub fn rebuild(&mut self) -> Result<()> { pub fn rebuild(&mut self) -> Result<()> {
self.parse_site()?; self.parse_site()?;
self.build() self.build()
@@ -124,6 +126,20 @@ impl Site {
let index = self.templates.render("index.html", &context)?; let index = self.templates.render("index.html", &context)?;
create_file(public.join("index.html"), &self.inject_livereload(index))?; create_file(public.join("index.html"), &self.inject_livereload(index))?;


self.render_sitemap()?;

Ok(())
}

pub fn render_sitemap(&self) -> Result<()> {
let tpl = String::from_utf8(include_bytes!("templates/sitemap.xml").to_vec()).unwrap();
let mut context = Context::new();
context.add("pages", &self.pages.values().collect::<Vec<&Page>>());
let sitemap = Tera::one_off(&tpl, &context, false)?;

let public = Path::new("public");
create_file(public.join("sitemap.xml"), &sitemap)?;

Ok(()) Ok(())
} }
} }

+ 10
- 0
src/templates/sitemap.xml View File

@@ -0,0 +1,10 @@
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
{% for page in pages %}
<url>
<loc>{{ page.permalink }}</loc>
{% if page.date %}
<lastmod>{{ page.date }}</lastmod>
{% endif %}
</url>
{% endfor %}
</urlset>

Loading…
Cancel
Save