@@ -195,9 +195,7 @@ impl Config { | |||
format!("{}/", self.base_url) | |||
} else if self.base_url.ends_with('/') && path.starts_with('/') { | |||
format!("{}{}{}", self.base_url, &path[1..], trailing_bit) | |||
} else if self.base_url.ends_with('/') { | |||
format!("{}{}{}", self.base_url, path, trailing_bit) | |||
} else if path.starts_with('/') { | |||
} else if self.base_url.ends_with('/') || path.starts_with('/') { | |||
format!("{}{}{}", self.base_url, path, trailing_bit) | |||
} else { | |||
format!("{}/{}{}", self.base_url, path, trailing_bit) | |||
@@ -103,16 +103,14 @@ impl Page { | |||
page.slug = { | |||
if let Some(ref slug) = page.meta.slug { | |||
slug.trim().to_string() | |||
} else { | |||
if page.file.name == "index" { | |||
if let Some(parent) = page.file.path.parent() { | |||
slugify(parent.file_name().unwrap().to_str().unwrap()) | |||
} else { | |||
slugify(page.file.name.clone()) | |||
} | |||
} else if page.file.name == "index" { | |||
if let Some(parent) = page.file.path.parent() { | |||
slugify(parent.file_name().unwrap().to_str().unwrap()) | |||
} else { | |||
slugify(page.file.name.clone()) | |||
} | |||
} else { | |||
slugify(page.file.name.clone()) | |||
} | |||
}; | |||
@@ -212,8 +212,8 @@ impl Section { | |||
subsections, | |||
pages: vec![], | |||
ignored_pages: vec![], | |||
word_count: self.word_count.clone(), | |||
reading_time: self.reading_time.clone(), | |||
word_count: self.word_count, | |||
reading_time: self.reading_time, | |||
} | |||
} | |||
} | |||
@@ -138,12 +138,10 @@ impl<'a> Paginator<'a> { | |||
let pager_path = if self.is_index { | |||
page_path | |||
} else if self.path.ends_with('/') { | |||
format!("{}{}", self.path, page_path) | |||
} else { | |||
if self.path.ends_with("/") { | |||
format!("{}{}", self.path, page_path) | |||
} else { | |||
format!("{}/{}", self.path, page_path) | |||
} | |||
format!("{}/{}", self.path, page_path) | |||
}; | |||
pagers.push(Pager::new( | |||
@@ -107,27 +107,25 @@ fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> | |||
site.permalinks.remove(&s.file.relative); | |||
site.populate_sections(); | |||
} | |||
} else { | |||
if let Some(p) = site.pages.remove(path) { | |||
site.permalinks.remove(&p.file.relative); | |||
} else if let Some(p) = site.pages.remove(path) { | |||
site.permalinks.remove(&p.file.relative); | |||
if !p.meta.taxonomies.is_empty() { | |||
site.populate_taxonomies()?; | |||
} | |||
if !p.meta.taxonomies.is_empty() { | |||
site.populate_taxonomies()?; | |||
} | |||
// if there is a parent section, we will need to re-render it | |||
// most likely | |||
if find_parent_section(site, &p).is_some() { | |||
site.populate_sections(); | |||
} | |||
}; | |||
// if there is a parent section, we will need to re-render it | |||
// most likely | |||
if find_parent_section(site, &p).is_some() { | |||
site.populate_sections(); | |||
} | |||
} | |||
// Ensure we have our fn updated so it doesn't contain the permalink(s)/section/page deleted | |||
site.register_tera_global_fns(); | |||
// Deletion is something that doesn't happen all the time so we | |||
// don't need to optimise it too much | |||
return site.build(); | |||
site.build() | |||
} | |||
/// Handles a `_index.md` (a section) being edited in some ways | |||
@@ -164,24 +162,21 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { | |||
} | |||
}; | |||
} | |||
return Ok(()); | |||
Ok(()) | |||
} | |||
// New section, only render that one | |||
None => { | |||
site.populate_sections(); | |||
site.register_tera_global_fns(); | |||
return site.render_section(&site.sections[path], true); | |||
site.render_section(&site.sections[path], true) | |||
} | |||
}; | |||
} | |||
} | |||
macro_rules! render_parent_section { | |||
($site: expr, $path: expr) => { | |||
match find_parent_section($site, &$site.pages[$path]) { | |||
Some(s) => { | |||
$site.render_section(s, false)?; | |||
}, | |||
None => (), | |||
if let Some(s) = find_parent_section($site, &$site.pages[$path]) { | |||
$site.render_section(s, false)?; | |||
}; | |||
} | |||
} | |||
@@ -293,12 +288,10 @@ pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> { | |||
} else { | |||
handle_page_editing(site, path) | |||
} | |||
} else if index.exists() { | |||
handle_page_editing(site, &index) | |||
} else { | |||
if index.exists() { | |||
handle_page_editing(site, &index) | |||
} else { | |||
Ok(()) | |||
} | |||
Ok(()) | |||
} | |||
} | |||
@@ -325,7 +318,7 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> { | |||
// because we have no clue which one needs rebuilding | |||
// TODO: look if there the shortcode is used in the markdown instead of re-rendering | |||
// everything | |||
if path.components().collect::<Vec<_>>().contains(&Component::Normal("shortcodes".as_ref())) { | |||
if path.components().any(|x| x == Component::Normal("shortcodes".as_ref())) { | |||
site.render_markdown()?; | |||
} | |||
site.populate_sections(); | |||
@@ -20,7 +20,7 @@ const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n"; | |||
pub struct Rendered { | |||
pub body: String, | |||
pub summary_len: Option<usize>, | |||
pub toc: Vec<Header> | |||
pub toc: Vec<Header>, | |||
} | |||
// We might have cases where the slug is already present in our list of anchor | |||
@@ -41,7 +41,7 @@ fn find_anchor(anchors: &[String], name: String, level: u8) -> String { | |||
} | |||
fn is_colocated_asset_link(link: &str) -> bool { | |||
!link.contains("/") // http://, ftp://, ../ etc | |||
!link.contains('/') // http://, ftp://, ../ etc | |||
&& !link.starts_with("mailto:") | |||
} | |||
@@ -159,22 +159,20 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||
} | |||
} else if is_colocated_asset_link(&link) { | |||
format!("{}{}", context.current_page_permalink, link) | |||
} else { | |||
if context.config.check_external_links | |||
&& !link.starts_with('#') | |||
&& !link.starts_with("mailto:") { | |||
let res = check_url(&link); | |||
if res.is_valid() { | |||
link.to_string() | |||
} else { | |||
error = Some( | |||
format!("Link {} is not valid: {}", link, res.message()).into() | |||
); | |||
String::new() | |||
} | |||
} else { | |||
} else if context.config.check_external_links | |||
&& !link.starts_with('#') | |||
&& !link.starts_with("mailto:") { | |||
let res = check_url(&link); | |||
if res.is_valid() { | |||
link.to_string() | |||
} else { | |||
error = Some( | |||
format!("Link {} is not valid: {}", link, res.message()).into() | |||
); | |||
String::new() | |||
} | |||
} else { | |||
link.to_string() | |||
}; | |||
if in_header { | |||
@@ -237,12 +235,12 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render | |||
} | |||
if let Some(e) = error { | |||
return Err(e) | |||
return Err(e); | |||
} else { | |||
Ok(Rendered { | |||
summary_len: if has_summary { html.find(CONTINUE_READING) } else { None }, | |||
body: html, | |||
toc: make_table_of_contents(&headers) | |||
toc: make_table_of_contents(&headers), | |||
}) | |||
} | |||
} |
@@ -84,7 +84,7 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) { | |||
} | |||
fn render_shortcode(name: String, args: Map<String, Value>, context: &RenderContext, body: Option<&str>) -> Result<String> { | |||
fn render_shortcode(name: &str, args: &Map<String, Value>, context: &RenderContext, body: Option<&str>) -> Result<String> { | |||
let mut tera_context = Context::new(); | |||
for (key, value) in args.iter() { | |||
tera_context.insert(key, value); | |||
@@ -138,7 +138,7 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||
Rule::text | Rule::text_in_ignored_body_sc | Rule::text_in_body_sc => res.push_str(p.into_span().as_str()), | |||
Rule::inline_shortcode => { | |||
let (name, args) = parse_shortcode_call(p); | |||
res.push_str(&render_shortcode(name, args, context, None)?); | |||
res.push_str(&render_shortcode(&name, &args, context, None)?); | |||
} | |||
Rule::shortcode_with_body => { | |||
let mut inner = p.into_inner(); | |||
@@ -146,7 +146,7 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin | |||
// we don't care about the closing tag | |||
let (name, args) = parse_shortcode_call(inner.next().unwrap()); | |||
let body = inner.next().unwrap().into_span().as_str(); | |||
res.push_str(&render_shortcode(name, args, context, Some(body))?); | |||
res.push_str(&render_shortcode(&name, &args, context, Some(body))?); | |||
} | |||
Rule::ignored_inline_shortcode => { | |||
res.push_str( | |||
@@ -15,7 +15,7 @@ use content::Section; | |||
use errors::Result; | |||
pub const ELASTICLUNR_JS: &'static str = include_str!("elasticlunr.min.js"); | |||
pub const ELASTICLUNR_JS: &str = include_str!("elasticlunr.min.js"); | |||
lazy_static! { | |||
static ref AMMONIA: ammonia::Builder<'static> = { | |||
@@ -63,7 +63,7 @@ fn add_section_to_index(index: &mut Index, section: &Section) { | |||
if section.meta.redirect_to.is_none() { | |||
index.add_doc( | |||
§ion.permalink, | |||
&[§ion.meta.title.clone().unwrap_or(String::new()), &AMMONIA.clean(§ion.content).to_string()], | |||
&[§ion.meta.title.clone().unwrap_or_default(), &AMMONIA.clean(§ion.content).to_string()], | |||
); | |||
} | |||
@@ -74,7 +74,7 @@ fn add_section_to_index(index: &mut Index, section: &Section) { | |||
index.add_doc( | |||
&page.permalink, | |||
&[&page.meta.title.clone().unwrap_or(String::new()), &AMMONIA.clean(&page.content).to_string()], | |||
&[&page.meta.title.clone().unwrap_or_default(), &AMMONIA.clean(&page.content).to_string()], | |||
); | |||
} | |||
} |
@@ -188,7 +188,7 @@ impl Site { | |||
let (section_entries, page_entries): (Vec<_>, Vec<_>) = glob(&content_glob) | |||
.unwrap() | |||
.filter_map(|e| e.ok()) | |||
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with(".")) | |||
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.')) | |||
.partition(|entry| entry.as_path().file_name().unwrap() == "_index.md"); | |||
let sections = { | |||
@@ -271,7 +271,7 @@ impl Site { | |||
// This is needed in the first place because of silly borrow checker | |||
let mut pages_insert_anchors = HashMap::new(); | |||
for (_, p) in &self.pages { | |||
for p in self.pages.values() { | |||
pages_insert_anchors.insert(p.file.path.clone(), self.find_parent_section_insert_anchor(&p.file.parent.clone())); | |||
} | |||
@@ -579,10 +579,10 @@ impl Site { | |||
let mut options = SassOptions::default(); | |||
options.output_style = OutputStyle::Compressed; | |||
let mut compiled_paths = self.compile_sass_glob(&sass_path, "scss", options.clone())?; | |||
let mut compiled_paths = self.compile_sass_glob(&sass_path, "scss", &options.clone())?; | |||
options.indented_syntax = true; | |||
compiled_paths.extend(self.compile_sass_glob(&sass_path, "sass", options)?); | |||
compiled_paths.extend(self.compile_sass_glob(&sass_path, "sass", &options)?); | |||
compiled_paths.sort(); | |||
for window in compiled_paths.windows(2) { | |||
@@ -599,7 +599,7 @@ impl Site { | |||
Ok(()) | |||
} | |||
fn compile_sass_glob(&self, sass_path: &Path, extension: &str, options: SassOptions) -> Result<Vec<(PathBuf, PathBuf)>> { | |||
fn compile_sass_glob(&self, sass_path: &Path, extension: &str, options: &SassOptions) -> Result<Vec<(PathBuf, PathBuf)>> { | |||
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension); | |||
let files = glob(&glob_string) | |||
.unwrap() | |||
@@ -127,7 +127,7 @@ pub fn find_taxonomies(config: &Config, all_pages: Vec<&Page>) -> Result<Vec<Tax | |||
if taxonomies_def.contains_key(name) { | |||
all_taxonomies | |||
.entry(name) | |||
.or_insert_with(|| HashMap::new()); | |||
.or_insert_with(HashMap::new); | |||
for v in val { | |||
all_taxonomies.get_mut(name) | |||
@@ -46,7 +46,7 @@ pub fn make_trans(config: Config) -> GlobalFn { | |||
String, | |||
args.get("lang"), | |||
"`trans`: `lang` must be a string." | |||
).unwrap_or(default_lang.clone()); | |||
).unwrap_or_else(|| default_lang.clone()); | |||
let translations = &translations_config[lang.as_str()]; | |||
Ok(to_value(&translations[key.as_str()]).unwrap()) | |||
}) | |||
@@ -122,7 +122,7 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob | |||
} else { | |||
// anything else | |||
let mut permalink = config.make_permalink(&path); | |||
if !trailing_slash && permalink.ends_with("/") { | |||
if !trailing_slash && permalink.ends_with('/') { | |||
permalink.pop(); // Removes the slash | |||
} | |||
@@ -153,7 +153,7 @@ pub fn make_get_taxonomy(all_taxonomies: Vec<Taxonomy>) -> GlobalFn { | |||
), | |||
}; | |||
return Ok(to_value(container).unwrap()); | |||
Ok(to_value(container).unwrap()) | |||
}) | |||
} | |||
@@ -217,7 +217,7 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF | |||
String, | |||
args.get("op"), | |||
"`resize_image`: `op` must be a string" | |||
).unwrap_or(DEFAULT_OP.to_string()); | |||
).unwrap_or_else(|| DEFAULT_OP.to_string()); | |||
let quality = optional_arg!( | |||
u8, | |||
args.get("quality"), | |||
@@ -8,7 +8,7 @@ use prompt::{ask_bool, ask_url}; | |||
use console; | |||
const CONFIG: &'static str = r#" | |||
const CONFIG: &str = r#" | |||
# The URL the site will be built for | |||
base_url = "%BASE_URL%" | |||
@@ -56,7 +56,7 @@ enum ChangeKind { | |||
// Also, commenting out the lines 330-340 (containing `e instanceof ProtocolError`) was needed | |||
// as it seems their build didn't work well and didn't include ProtocolError so it would error on | |||
// errors | |||
const LIVE_RELOAD: &'static str = include_str!("livereload.js"); | |||
const LIVE_RELOAD: &str = include_str!("livereload.js"); | |||
struct NotFoundHandler { | |||
rendered_template: PathBuf, | |||
@@ -276,7 +276,7 @@ pub fn serve(interface: &str, port: &str, output_dir: &str, base_url: &str, conf | |||
rebuild_done_handling(&broadcaster, site.compile_sass(&site.base_path), &p.to_string_lossy()); | |||
}, | |||
(ChangeKind::Config, _) => { | |||
console::info(&format!("-> Config changed. The whole site will be reloaded. The browser needs to be refreshed to make the changes visible.")); | |||
console::info("-> Config changed. The whole site will be reloaded. The browser needs to be refreshed to make the changes visible."); | |||
site = create_new_site(interface, port, output_dir, base_url, config_file).unwrap().0; | |||
} | |||
}; | |||
@@ -99,8 +99,8 @@ pub fn unravel_errors(message: &str, error: &Error) { | |||
/// Check whether to output colors | |||
fn has_color() -> bool { | |||
let use_colors = env::var("CLICOLOR").unwrap_or("1".to_string()) != "0" && !env::var("NO_COLOR").is_ok(); | |||
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or("0".to_string()) != "0"; | |||
let use_colors = env::var("CLICOLOR").unwrap_or_else(|_| "1".to_string()) != "0" && env::var("NO_COLOR").is_err(); | |||
let force_colors = env::var("CLICOLOR_FORCE").unwrap_or_else(|_|"0".to_string()) != "0"; | |||
force_colors || use_colors && atty::is(atty::Stream::Stdout) | |||
} |