@@ -46,6 +46,9 @@ pub struct FileInfo { | |||||
/// For example a file at content/kb/solutions/blabla.md will have 2 components: | /// For example a file at content/kb/solutions/blabla.md will have 2 components: | ||||
/// `kb` and `solutions` | /// `kb` and `solutions` | ||||
pub components: Vec<String>, | pub components: Vec<String>, | ||||
/// This is `parent` + `name`, used to find content referring to the same content but in | |||||
/// various languages. | |||||
pub canonical: PathBuf, | |||||
} | } | ||||
impl FileInfo { | impl FileInfo { | ||||
@@ -74,6 +77,7 @@ impl FileInfo { | |||||
path: file_path, | path: file_path, | ||||
// We don't care about grand parent for pages | // We don't care about grand parent for pages | ||||
grand_parent: None, | grand_parent: None, | ||||
canonical: parent.join(&name), | |||||
parent, | parent, | ||||
name, | name, | ||||
components, | components, | ||||
@@ -96,6 +100,7 @@ impl FileInfo { | |||||
FileInfo { | FileInfo { | ||||
filename: file_path.file_name().unwrap().to_string_lossy().to_string(), | filename: file_path.file_name().unwrap().to_string_lossy().to_string(), | ||||
path: file_path, | path: file_path, | ||||
canonical: parent.join(&name), | |||||
parent, | parent, | ||||
grand_parent, | grand_parent, | ||||
name, | name, | ||||
@@ -128,6 +133,7 @@ impl FileInfo { | |||||
} | } | ||||
self.name = parts.swap_remove(0); | self.name = parts.swap_remove(0); | ||||
self.canonical = self.parent.join(&self.name); | |||||
let lang = parts.swap_remove(0); | let lang = parts.swap_remove(0); | ||||
Ok(Some(lang)) | Ok(Some(lang)) | ||||
@@ -145,6 +151,7 @@ impl Default for FileInfo { | |||||
name: String::new(), | name: String::new(), | ||||
components: vec![], | components: vec![], | ||||
relative: String::new(), | relative: String::new(), | ||||
canonical: PathBuf::new(), | |||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -7,6 +7,38 @@ use content::{Page, Section}; | |||||
use library::Library; | use library::Library; | ||||
use rendering::Header; | use rendering::Header; | ||||
#[derive(Clone, Debug, PartialEq, Serialize)] | |||||
pub struct TranslatedContent<'a> { | |||||
lang: &'a Option<String>, | |||||
permalink: &'a str, | |||||
title: &'a Option<String>, | |||||
} | |||||
impl<'a> TranslatedContent<'a> { | |||||
// copypaste eh, not worth creating an enum imo | |||||
pub fn find_all_sections(section: &'a Section, library: &'a Library) -> Vec<Self> { | |||||
let mut translations = vec![]; | |||||
for key in §ion.translations { | |||||
let other = library.get_section_by_key(*key); | |||||
translations.push(TranslatedContent { lang: &other.lang, permalink: &other.permalink, title: &other.meta.title }); | |||||
} | |||||
translations | |||||
} | |||||
pub fn find_all_pages(page: &'a Page, library: &'a Library) -> Vec<Self> { | |||||
let mut translations = vec![]; | |||||
for key in &page.translations { | |||||
let other = library.get_page_by_key(*key); | |||||
translations.push(TranslatedContent { lang: &other.lang, permalink: &other.permalink, title: &other.meta.title }); | |||||
} | |||||
translations | |||||
} | |||||
} | |||||
#[derive(Clone, Debug, PartialEq, Serialize)] | #[derive(Clone, Debug, PartialEq, Serialize)] | ||||
pub struct SerializingPage<'a> { | pub struct SerializingPage<'a> { | ||||
relative_path: &'a str, | relative_path: &'a str, | ||||
@@ -35,6 +67,7 @@ pub struct SerializingPage<'a> { | |||||
heavier: Option<Box<SerializingPage<'a>>>, | heavier: Option<Box<SerializingPage<'a>>>, | ||||
earlier: Option<Box<SerializingPage<'a>>>, | earlier: Option<Box<SerializingPage<'a>>>, | ||||
later: Option<Box<SerializingPage<'a>>>, | later: Option<Box<SerializingPage<'a>>>, | ||||
translations: Vec<TranslatedContent<'a>>, | |||||
} | } | ||||
impl<'a> SerializingPage<'a> { | impl<'a> SerializingPage<'a> { | ||||
@@ -67,6 +100,8 @@ impl<'a> SerializingPage<'a> { | |||||
.map(|k| library.get_section_by_key(*k).file.relative.clone()) | .map(|k| library.get_section_by_key(*k).file.relative.clone()) | ||||
.collect(); | .collect(); | ||||
let translations = TranslatedContent::find_all_pages(page, library); | |||||
SerializingPage { | SerializingPage { | ||||
relative_path: &page.file.relative, | relative_path: &page.file.relative, | ||||
ancestors, | ancestors, | ||||
@@ -94,6 +129,7 @@ impl<'a> SerializingPage<'a> { | |||||
heavier, | heavier, | ||||
earlier, | earlier, | ||||
later, | later, | ||||
translations, | |||||
} | } | ||||
} | } | ||||
@@ -116,6 +152,12 @@ impl<'a> SerializingPage<'a> { | |||||
vec![] | vec![] | ||||
}; | }; | ||||
let translations = if let Some(ref lib) = library { | |||||
TranslatedContent::find_all_pages(page, lib) | |||||
} else { | |||||
vec![] | |||||
}; | |||||
SerializingPage { | SerializingPage { | ||||
relative_path: &page.file.relative, | relative_path: &page.file.relative, | ||||
ancestors, | ancestors, | ||||
@@ -143,6 +185,7 @@ impl<'a> SerializingPage<'a> { | |||||
heavier: None, | heavier: None, | ||||
earlier: None, | earlier: None, | ||||
later: None, | later: None, | ||||
translations, | |||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -165,6 +208,7 @@ pub struct SerializingSection<'a> { | |||||
assets: &'a [String], | assets: &'a [String], | ||||
pages: Vec<SerializingPage<'a>>, | pages: Vec<SerializingPage<'a>>, | ||||
subsections: Vec<&'a str>, | subsections: Vec<&'a str>, | ||||
translations: Vec<TranslatedContent<'a>>, | |||||
} | } | ||||
impl<'a> SerializingSection<'a> { | impl<'a> SerializingSection<'a> { | ||||
@@ -185,6 +229,7 @@ impl<'a> SerializingSection<'a> { | |||||
.iter() | .iter() | ||||
.map(|k| library.get_section_by_key(*k).file.relative.clone()) | .map(|k| library.get_section_by_key(*k).file.relative.clone()) | ||||
.collect(); | .collect(); | ||||
let translations = TranslatedContent::find_all_sections(section, library); | |||||
SerializingSection { | SerializingSection { | ||||
relative_path: §ion.file.relative, | relative_path: §ion.file.relative, | ||||
@@ -203,6 +248,7 @@ impl<'a> SerializingSection<'a> { | |||||
lang: §ion.lang, | lang: §ion.lang, | ||||
pages, | pages, | ||||
subsections, | subsections, | ||||
translations, | |||||
} | } | ||||
} | } | ||||
@@ -218,6 +264,12 @@ impl<'a> SerializingSection<'a> { | |||||
vec![] | vec![] | ||||
}; | }; | ||||
let translations = if let Some(ref lib) = library { | |||||
TranslatedContent::find_all_sections(section, lib) | |||||
} else { | |||||
vec![] | |||||
}; | |||||
SerializingSection { | SerializingSection { | ||||
relative_path: §ion.file.relative, | relative_path: §ion.file.relative, | ||||
ancestors, | ancestors, | ||||
@@ -235,6 +287,7 @@ impl<'a> SerializingSection<'a> { | |||||
lang: §ion.lang, | lang: §ion.lang, | ||||
pages: vec![], | pages: vec![], | ||||
subsections: vec![], | subsections: vec![], | ||||
translations, | |||||
} | } | ||||
} | } | ||||
} | } |
@@ -22,18 +22,21 @@ pub struct Library { | |||||
/// All the sections of the site | /// All the sections of the site | ||||
sections: DenseSlotMap<Section>, | sections: DenseSlotMap<Section>, | ||||
/// A mapping path -> key for pages so we can easily get their key | /// A mapping path -> key for pages so we can easily get their key | ||||
paths_to_pages: HashMap<PathBuf, Key>, | |||||
pub paths_to_pages: HashMap<PathBuf, Key>, | |||||
/// A mapping path -> key for sections so we can easily get their key | /// A mapping path -> key for sections so we can easily get their key | ||||
pub paths_to_sections: HashMap<PathBuf, Key>, | pub paths_to_sections: HashMap<PathBuf, Key>, | ||||
/// Whether we need to look for translations | |||||
is_multilingual: bool, | |||||
} | } | ||||
impl Library { | impl Library { | ||||
pub fn new(cap_pages: usize, cap_sections: usize) -> Self { | |||||
pub fn new(cap_pages: usize, cap_sections: usize, is_multilingual: bool) -> Self { | |||||
Library { | Library { | ||||
pages: DenseSlotMap::with_capacity(cap_pages), | pages: DenseSlotMap::with_capacity(cap_pages), | ||||
sections: DenseSlotMap::with_capacity(cap_sections), | sections: DenseSlotMap::with_capacity(cap_sections), | ||||
paths_to_pages: HashMap::with_capacity(cap_pages), | paths_to_pages: HashMap::with_capacity(cap_pages), | ||||
paths_to_sections: HashMap::with_capacity(cap_sections), | paths_to_sections: HashMap::with_capacity(cap_sections), | ||||
is_multilingual, | |||||
} | } | ||||
} | } | ||||
@@ -116,10 +119,10 @@ impl Library { | |||||
continue; | continue; | ||||
} | } | ||||
if let Some(section_key) = | if let Some(section_key) = | ||||
self.paths_to_sections.get(&path.join(§ion.file.filename)) | |||||
{ | |||||
parents.push(*section_key); | |||||
} | |||||
self.paths_to_sections.get(&path.join(§ion.file.filename)) | |||||
{ | |||||
parents.push(*section_key); | |||||
} | |||||
} | } | ||||
ancestors.insert(section.file.path.clone(), parents); | ancestors.insert(section.file.path.clone(), parents); | ||||
} | } | ||||
@@ -169,6 +172,7 @@ impl Library { | |||||
} | } | ||||
} | } | ||||
self.populate_translations(); | |||||
self.sort_sections_pages(); | self.sort_sections_pages(); | ||||
let sections = self.paths_to_sections.clone(); | let sections = self.paths_to_sections.clone(); | ||||
@@ -188,7 +192,8 @@ impl Library { | |||||
} | } | ||||
} | } | ||||
/// Sort all sections pages | |||||
/// Sort all sections pages according to sorting method given | |||||
/// Pages that cannot be sorted are set to the section.ignored_pages instead | |||||
pub fn sort_sections_pages(&mut self) { | pub fn sort_sections_pages(&mut self) { | ||||
let mut updates = HashMap::new(); | let mut updates = HashMap::new(); | ||||
for (key, section) in &self.sections { | for (key, section) in &self.sections { | ||||
@@ -268,6 +273,52 @@ impl Library { | |||||
} | } | ||||
} | } | ||||
/// Finds all the translations for each section/page and set the `translations` | |||||
/// field of each as needed | |||||
/// A no-op for sites without multiple languages | |||||
fn populate_translations(&mut self) { | |||||
if !self.is_multilingual { | |||||
return; | |||||
} | |||||
// Sections first | |||||
let mut sections_translations = HashMap::new(); | |||||
for (key, section) in &self.sections { | |||||
sections_translations | |||||
.entry(section.file.canonical.clone()) // TODO: avoid this clone | |||||
.or_insert_with(Vec::new) | |||||
.push(key); | |||||
} | |||||
for (key, section) in self.sections.iter_mut() { | |||||
let translations = §ions_translations[§ion.file.canonical]; | |||||
if translations.len() == 1 { | |||||
section.translations = vec![]; | |||||
continue; | |||||
} | |||||
section.translations = translations.iter().filter(|k| **k != key).cloned().collect(); | |||||
} | |||||
// Same thing for pages | |||||
let mut pages_translations = HashMap::new(); | |||||
for (key, page) in &self.pages { | |||||
pages_translations | |||||
.entry(page.file.canonical.clone()) // TODO: avoid this clone | |||||
.or_insert_with(Vec::new) | |||||
.push(key); | |||||
} | |||||
for (key, page) in self.pages.iter_mut() { | |||||
let translations = &pages_translations[&page.file.canonical]; | |||||
if translations.len() == 1 { | |||||
page.translations = vec![]; | |||||
continue; | |||||
} | |||||
page.translations = translations.iter().filter(|k| **k != key).cloned().collect(); | |||||
} | |||||
} | |||||
/// Find all the orphan pages: pages that are in a folder without an `_index.md` | /// Find all the orphan pages: pages that are in a folder without an `_index.md` | ||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | pub fn get_all_orphan_pages(&self) -> Vec<&Page> { | ||||
let pages_in_sections = | let pages_in_sections = | ||||
@@ -254,7 +254,7 @@ mod tests { | |||||
} | } | ||||
fn create_library(is_index: bool) -> (Section, Library) { | fn create_library(is_index: bool) -> (Section, Library) { | ||||
let mut library = Library::new(3, 0); | |||||
let mut library = Library::new(3, 0, false); | |||||
library.insert_page(Page::default()); | library.insert_page(Page::default()); | ||||
library.insert_page(Page::default()); | library.insert_page(Page::default()); | ||||
library.insert_page(Page::default()); | library.insert_page(Page::default()); | ||||
@@ -227,7 +227,7 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_make_taxonomies() { | fn can_make_taxonomies() { | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
let mut library = Library::new(2, 0); | |||||
let mut library = Library::new(2, 0, false); | |||||
config.taxonomies = vec![ | config.taxonomies = vec![ | ||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, | TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, | ||||
@@ -307,7 +307,7 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn errors_on_unknown_taxonomy() { | fn errors_on_unknown_taxonomy() { | ||||
let mut config = Config::default(); | let mut config = Config::default(); | ||||
let mut library = Library::new(2, 0); | |||||
let mut library = Library::new(2, 0, false); | |||||
config.taxonomies = | config.taxonomies = | ||||
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }]; | vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }]; | ||||
@@ -141,7 +141,7 @@ impl Site { | |||||
taxonomies: Vec::new(), | taxonomies: Vec::new(), | ||||
permalinks: HashMap::new(), | permalinks: HashMap::new(), | ||||
// We will allocate it properly later on | // We will allocate it properly later on | ||||
library: Library::new(0, 0), | |||||
library: Library::new(0, 0, false), | |||||
}; | }; | ||||
Ok(site) | Ok(site) | ||||
@@ -173,7 +173,7 @@ impl Site { | |||||
} | } | ||||
pub fn set_base_url(&mut self, base_url: String) { | pub fn set_base_url(&mut self, base_url: String) { | ||||
let mut imageproc = self.imageproc.lock().unwrap(); | |||||
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (set_base_url)"); | |||||
imageproc.set_base_url(&base_url); | imageproc.set_base_url(&base_url); | ||||
self.config.base_url = base_url; | self.config.base_url = base_url; | ||||
} | } | ||||
@@ -189,14 +189,14 @@ impl Site { | |||||
let content_glob = format!("{}/{}", base_path, "content/**/*.md"); | let content_glob = format!("{}/{}", base_path, "content/**/*.md"); | ||||
let (section_entries, page_entries): (Vec<_>, Vec<_>) = glob(&content_glob) | let (section_entries, page_entries): (Vec<_>, Vec<_>) = glob(&content_glob) | ||||
.unwrap() | |||||
.expect("Invalid glob") | |||||
.filter_map(|e| e.ok()) | .filter_map(|e| e.ok()) | ||||
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.')) | .filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.')) | ||||
.partition(|entry| { | .partition(|entry| { | ||||
entry.as_path().file_name().unwrap().to_str().unwrap().starts_with("_index.") | entry.as_path().file_name().unwrap().to_str().unwrap().starts_with("_index.") | ||||
}); | }); | ||||
self.library = Library::new(page_entries.len(), section_entries.len()); | |||||
self.library = Library::new(page_entries.len(), section_entries.len(), self.config.is_multilingual()); | |||||
let sections = { | let sections = { | ||||
let config = &self.config; | let config = &self.config; | ||||
@@ -452,12 +452,12 @@ impl Site { | |||||
} | } | ||||
pub fn num_img_ops(&self) -> usize { | pub fn num_img_ops(&self) -> usize { | ||||
let imageproc = self.imageproc.lock().unwrap(); | |||||
let imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (num_img_ops)"); | |||||
imageproc.num_img_ops() | imageproc.num_img_ops() | ||||
} | } | ||||
pub fn process_images(&self) -> Result<()> { | pub fn process_images(&self) -> Result<()> { | ||||
let mut imageproc = self.imageproc.lock().unwrap(); | |||||
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (process_images)"); | |||||
imageproc.prune()?; | imageproc.prune()?; | ||||
imageproc.do_process() | imageproc.do_process() | ||||
} | } | ||||
@@ -497,7 +497,7 @@ impl Site { | |||||
// Copy any asset we found previously into the same directory as the index.html | // Copy any asset we found previously into the same directory as the index.html | ||||
for asset in &page.assets { | for asset in &page.assets { | ||||
let asset_path = asset.as_path(); | let asset_path = asset.as_path(); | ||||
copy(&asset_path, ¤t_path.join(asset_path.file_name().unwrap()))?; | |||||
copy(&asset_path, ¤t_path.join(asset_path.file_name().expect("Couldn't get filename from page asset")))?; | |||||
} | } | ||||
Ok(()) | Ok(()) | ||||
@@ -626,7 +626,7 @@ impl Site { | |||||
) -> Result<Vec<(PathBuf, PathBuf)>> { | ) -> Result<Vec<(PathBuf, PathBuf)>> { | ||||
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension); | let glob_string = format!("{}/**/*.{}", sass_path.display(), extension); | ||||
let files = glob(&glob_string) | let files = glob(&glob_string) | ||||
.unwrap() | |||||
.expect("Invalid glob for sass") | |||||
.filter_map(|e| e.ok()) | .filter_map(|e| e.ok()) | ||||
.filter(|entry| { | .filter(|entry| { | ||||
!entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_') | !entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_') | ||||
@@ -920,7 +920,7 @@ impl Site { | |||||
// Copy any asset we found previously into the same directory as the index.html | // Copy any asset we found previously into the same directory as the index.html | ||||
for asset in §ion.assets { | for asset in §ion.assets { | ||||
let asset_path = asset.as_path(); | let asset_path = asset.as_path(); | ||||
copy(&asset_path, &output_path.join(asset_path.file_name().unwrap()))?; | |||||
copy(&asset_path, &output_path.join(asset_path.file_name().expect("Failed to get asset filename for section")))?; | |||||
} | } | ||||
if render_pages { | if render_pages { | ||||
@@ -957,7 +957,7 @@ impl Site { | |||||
/// Used only on reload | /// Used only on reload | ||||
pub fn render_index(&self) -> Result<()> { | pub fn render_index(&self) -> Result<()> { | ||||
self.render_section( | self.render_section( | ||||
&self.library.get_section(&self.content_path.join("_index.md")).unwrap(), | |||||
&self.library.get_section(&self.content_path.join("_index.md")).expect("Failed to get index section"), | |||||
false, | false, | ||||
) | ) | ||||
} | } | ||||
@@ -27,10 +27,10 @@ macro_rules! file_contains { | |||||
for component in $path.split("/") { | for component in $path.split("/") { | ||||
path = path.join(component); | path = path.join(component); | ||||
} | } | ||||
let mut file = std::fs::File::open(&path).unwrap(); | |||||
let mut file = std::fs::File::open(&path).expect(&format!("Failed to open {:?}", $path)); | |||||
let mut s = String::new(); | let mut s = String::new(); | ||||
file.read_to_string(&mut s).unwrap(); | file.read_to_string(&mut s).unwrap(); | ||||
// println!("{}", s); | |||||
println!("{}", s); | |||||
s.contains($text) | s.contains($text) | ||||
}}; | }}; | ||||
} | } | ||||
@@ -45,7 +45,7 @@ pub fn build_site(name: &str) -> (Site, TempDir, PathBuf) { | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
site.build().unwrap(); | |||||
site.build().expect("Couldn't build the site"); | |||||
(site, tmp_dir, public.clone()) | (site, tmp_dir, public.clone()) | ||||
} | } | ||||
@@ -64,6 +64,6 @@ where | |||||
let tmp_dir = tempdir().expect("create temp dir"); | let tmp_dir = tempdir().expect("create temp dir"); | ||||
let public = &tmp_dir.path().join("public"); | let public = &tmp_dir.path().join("public"); | ||||
site.set_output_path(&public); | site.set_output_path(&public); | ||||
site.build().unwrap(); | |||||
site.build().expect("Couldn't build the site"); | |||||
(site, tmp_dir, public.clone()) | (site, tmp_dir, public.clone()) | ||||
} | } |
@@ -70,10 +70,21 @@ fn can_build_multilingual_site() { | |||||
assert!(file_exists!(public, "base/index.html")); | assert!(file_exists!(public, "base/index.html")); | ||||
assert!(file_exists!(public, "fr/base/index.html")); | assert!(file_exists!(public, "fr/base/index.html")); | ||||
// Sections are there as well | |||||
// Sections are there as well, with translations info | |||||
assert!(file_exists!(public, "blog/index.html")); | assert!(file_exists!(public, "blog/index.html")); | ||||
assert!(file_contains!(public, "blog/index.html", "Translated in fr: Mon blog https://example.com/fr/blog/")); | |||||
assert!(file_contains!(public, "blog/index.html", "Translated in it: Il mio blog https://example.com/it/blog/")); | |||||
assert!(file_exists!(public, "fr/blog/index.html")); | assert!(file_exists!(public, "fr/blog/index.html")); | ||||
assert!(file_contains!(public, "fr/blog/index.html", "Language: fr")); | assert!(file_contains!(public, "fr/blog/index.html", "Language: fr")); | ||||
assert!(file_contains!(public, "fr/blog/index.html", "Translated in : My blog https://example.com/blog/")); | |||||
assert!(file_contains!(public, "fr/blog/index.html", "Translated in it: Il mio blog https://example.com/it/blog/")); | |||||
// Normal pages are there with the translations | |||||
assert!(file_exists!(public, "blog/something/index.html")); | |||||
assert!(file_contains!(public, "blog/something/index.html", "Translated in fr: Quelque chose https://example.com/fr/blog/something/")); | |||||
assert!(file_exists!(public, "fr/blog/something/index.html")); | |||||
assert!(file_contains!(public, "fr/blog/something/index.html", "Language: fr")); | |||||
assert!(file_contains!(public, "fr/blog/something/index.html", "Translated in : Something https://example.com/blog/something/")); | |||||
// sitemap contains all languages | // sitemap contains all languages | ||||
assert!(file_exists!(public, "sitemap.xml")); | assert!(file_exists!(public, "sitemap.xml")); | ||||
@@ -296,7 +296,7 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_get_taxonomy() { | fn can_get_taxonomy() { | ||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | ||||
let library = Library::new(0, 0); | |||||
let library = Library::new(0, 0, false); | |||||
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); | let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); | ||||
let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; | let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; | ||||
@@ -335,7 +335,7 @@ mod tests { | |||||
#[test] | #[test] | ||||
fn can_get_taxonomy_url() { | fn can_get_taxonomy_url() { | ||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; | ||||
let library = Library::new(0, 0); | |||||
let library = Library::new(0, 0, false); | |||||
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); | let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); | ||||
let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; | let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; | ||||
@@ -52,7 +52,9 @@ ancestors: Array<String>; | |||||
// The relative path from the `content` directory to the markdown file | // The relative path from the `content` directory to the markdown file | ||||
relative_path: String; | relative_path: String; | ||||
// The language for the page if there is one | // The language for the page if there is one | ||||
lang: String? | |||||
lang: String?; | |||||
// Information about all the available languages for that content | |||||
translations: Array<TranslatedContent>; | |||||
``` | ``` | ||||
## Section variables | ## Section variables | ||||
@@ -96,7 +98,9 @@ ancestors: Array<String>; | |||||
// The relative path from the `content` directory to the markdown file | // The relative path from the `content` directory to the markdown file | ||||
relative_path: String; | relative_path: String; | ||||
// The language for the section if there is one | // The language for the section if there is one | ||||
lang: String? | |||||
lang: String?; | |||||
// Information about all the available languages for that content | |||||
translations: Array<TranslatedContent>; | |||||
``` | ``` | ||||
## Table of contents | ## Table of contents | ||||
@@ -116,3 +120,19 @@ permalink: String; | |||||
// All lower level headers below this header | // All lower level headers below this header | ||||
children: Array<Header>; | children: Array<Header>; | ||||
``` | ``` | ||||
## Translated content | |||||
Both page and section have a `translations` field which corresponds to an array of `TranslatedContent`. If your site is not using multiple languages, | |||||
this will always be an empty array. | |||||
A `TranslatedContent` has the following fields: | |||||
```ts | |||||
// The language code for that content, empty if it is the default language | |||||
lang: String?; | |||||
// The title of that content if there is one | |||||
title: String?; | |||||
// A permalink to that content | |||||
permalink: String; | |||||
``` | |||||
@@ -1,4 +1,5 @@ | |||||
+++ | +++ | ||||
title = "Mon blog" | |||||
sort_by = "date" | sort_by = "date" | ||||
insert_anchors = "right" | insert_anchors = "right" | ||||
+++ | +++ |
@@ -1,4 +1,5 @@ | |||||
+++ | +++ | ||||
title = "Il mio blog" | |||||
sort_by = "date" | sort_by = "date" | ||||
insert_anchors = "right" | insert_anchors = "right" | ||||
+++ | +++ |
@@ -1,4 +1,5 @@ | |||||
+++ | +++ | ||||
title = "My blog" | |||||
sort_by = "date" | sort_by = "date" | ||||
insert_anchors = "left" | insert_anchors = "left" | ||||
+++ | +++ |
@@ -1,2 +1,8 @@ | |||||
{{page.title}} | {{page.title}} | ||||
{{page.content | safe}} | {{page.content | safe}} | ||||
Language: {{lang}} | |||||
{% for t in page.translations %} | |||||
Translated in {{t.lang|default(value=config.default_language)}}: {{t.title}} {{t.permalink|safe}} | |||||
{% endfor %} | |||||
@@ -2,3 +2,7 @@ | |||||
{{page.title}} | {{page.title}} | ||||
{% endfor %} | {% endfor %} | ||||
Language: {{lang}} | Language: {{lang}} | ||||
{% for t in section.translations %} | |||||
Translated in {{t.lang|default(value=config.default_language)}}: {{t.title}} {{t.permalink|safe}} | |||||
{% endfor %} |