This allow to use base_url in robots.txt, to reference a sitemap for example.index-subcmd
@@ -670,9 +670,11 @@ impl Site { | |||||
/// Renders robots.txt | /// Renders robots.txt | ||||
pub fn render_robots(&self) -> Result<()> { | pub fn render_robots(&self) -> Result<()> { | ||||
ensure_directory_exists(&self.output_path)?; | ensure_directory_exists(&self.output_path)?; | ||||
let mut context = Context::new(); | |||||
context.insert("config", &self.config); | |||||
create_file( | create_file( | ||||
&self.output_path.join("robots.txt"), | &self.output_path.join("robots.txt"), | ||||
&render_template("robots.txt", &self.tera, &Context::new(), &self.config.theme)?, | |||||
&render_template("robots.txt", &self.tera, &context, &self.config.theme)?, | |||||
) | ) | ||||
} | } | ||||
@@ -168,6 +168,7 @@ fn can_build_site_without_live_reload() { | |||||
// robots.txt has been rendered from the template | // robots.txt has been rendered from the template | ||||
assert!(file_contains!(public, "robots.txt", "User-agent: gutenberg")); | assert!(file_contains!(public, "robots.txt", "User-agent: gutenberg")); | ||||
assert!(file_contains!(public, "robots.txt", "Sitemap: https://replace-this-with-your-url.com/sitemap.xml")); | |||||
} | } | ||||
#[test] | #[test] | ||||
@@ -6,8 +6,8 @@ weight = 70 | |||||
Gutenberg will look for a `robots.txt` file in the `templates` directory or | Gutenberg will look for a `robots.txt` file in the `templates` directory or | ||||
use the built-in one. | use the built-in one. | ||||
Robots.txt is the simplest of all templates: it doesn't take any variables | |||||
and the default is what most site want. | |||||
Robots.txt is the simplest of all templates: it only gets the config | |||||
and the default is what most site want: | |||||
```jinja2 | ```jinja2 | ||||
User-agent: * | User-agent: * | ||||
@@ -1,2 +1,3 @@ | |||||
User-agent: gutenberg | User-agent: gutenberg | ||||
Allow: / | Allow: / | ||||
Sitemap: {{config.base_url}}/sitemap.xml |