Fix robots.txt not being loaded in Tera

Closes #443
This commit is contained in:
Vincent Prouillet 2018-09-30 19:05:56 +02:00
parent 330c4fca4d
commit a11f8232de
4 changed files with 18 additions and 3 deletions

View file

@ -10,12 +10,13 @@
- Load table and footnote markdown extensions in `markdown` filter - Load table and footnote markdown extensions in `markdown` filter
- `get_url` now defaults to not adding a trailing slash - `get_url` now defaults to not adding a trailing slash
- Fix `--base-url` not overriding processed images URLs - Fix `--base-url` not overriding processed images URLs
- Many many times faster (x10-x20) for sites with thousands of pages - Many many times faster (x10-x40) for sites with thousands of pages
- Add more Emacs temp file to the ignored patterns in `gutenberg serve` - Add more Emacs temp file to the ignored patterns in `gutenberg serve`
- Files starting with `.` are not considered pages anymore even if they end with `.md` - Files starting with `.` are not considered pages anymore even if they end with `.md`
- `_processed_images` folder for image processing has been renamed `processed_images` to avoid issues with GitHub Pages - `_processed_images` folder for image processing has been renamed `processed_images` to avoid issues with GitHub Pages
- Syntax highlighting default was mistakenly `true`, it has been set to `false` - Syntax highlighting default was mistakenly `true`, it has been set to `false`
- Add NO_COLOR and CLICOLOR support for having colours or not in CLI output - Add NO_COLOR and CLICOLOR support for having colours or not in CLI output
- Fix `robots.txt`template not being used
## 0.4.2 (2018-09-03) ## 0.4.2 (2018-09-03)

View file

@ -91,7 +91,6 @@ impl Site {
// Only parsing as we might be extending templates from themes and that would error // Only parsing as we might be extending templates from themes and that would error
// as we haven't loaded them yet // as we haven't loaded them yet
let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?; let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?;
if let Some(theme) = config.theme.clone() { if let Some(theme) = config.theme.clone() {
// Grab data from the extra section of the theme // Grab data from the extra section of the theme
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?; config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
@ -105,10 +104,15 @@ impl Site {
let theme_tpl_glob = format!( let theme_tpl_glob = format!(
"{}/{}", "{}/{}",
path.to_string_lossy().replace("\\", "/"), path.to_string_lossy().replace("\\", "/"),
format!("themes/{}/templates/**/*.html", theme) format!("themes/{}/templates/**/*.*ml", theme)
); );
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?; let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?;
rewrite_theme_paths(&mut tera_theme, &theme); rewrite_theme_paths(&mut tera_theme, &theme);
// TODO: same as above
if theme_path.join("templates").join("robots.txt").exists() {
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
}
println!("{:?}", tera_theme.templates.keys().collect::<Vec<_>>());
tera_theme.build_inheritance_chains()?; tera_theme.build_inheritance_chains()?;
tera.extend(&tera_theme)?; tera.extend(&tera_theme)?;
} }
@ -116,6 +120,12 @@ impl Site {
// the `extend` above already does it but hey // the `extend` above already does it but hey
tera.build_inheritance_chains()?; tera.build_inheritance_chains()?;
// TODO: Tera doesn't use globset right now so we can load the robots.txt as part
// of the glob above, therefore we load it manually if it exists.
if path.join("templates").join("robots.txt").exists() {
tera.add_template_file(path.join("templates").join("robots.txt"), None)?;
}
let content_path = path.join("content"); let content_path = path.join("content");
let static_path = path.join("static"); let static_path = path.join("static");
let imageproc = imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url); let imageproc = imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);

View file

@ -165,6 +165,9 @@ fn can_build_site_without_live_reload() {
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/</loc>")); assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/</loc>"));
// Drafts are not in the sitemap // Drafts are not in the sitemap
assert!(!file_contains!(public, "sitemap.xml", "draft")); assert!(!file_contains!(public, "sitemap.xml", "draft"));
// robots.txt has been rendered from the template
assert!(!file_contains!(public, "robots.txt", "Hello"));
} }
#[test] #[test]

View file

@ -0,0 +1 @@
Hello