From a11f8232decf54f8e6b445a6e914fb2d13fcb53e Mon Sep 17 00:00:00 2001 From: Vincent Prouillet Date: Sun, 30 Sep 2018 19:05:56 +0200 Subject: [PATCH] Fix robots.txt not being loaded in Tera Closes #443 --- CHANGELOG.md | 3 ++- components/site/src/lib.rs | 14 ++++++++++++-- components/site/tests/site.rs | 3 +++ test_site/templates/robots.txt | 1 + 4 files changed, 18 insertions(+), 3 deletions(-) create mode 100644 test_site/templates/robots.txt diff --git a/CHANGELOG.md b/CHANGELOG.md index 28ef41af..b2e189f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,12 +10,13 @@ - Load table and footnote markdown extensions in `markdown` filter - `get_url` now defaults to not adding a trailing slash - Fix `--base-url` not overriding processed images URLs -- Many many times faster (x10-x20) for sites with thousands of pages +- Many many times faster (x10-x40) for sites with thousands of pages - Add more Emacs temp file to the ignored patterns in `gutenberg serve` - Files starting with `.` are not considered pages anymore even if they end with `.md` - `_processed_images` folder for image processing has been renamed `processed_images` to avoid issues with GitHub Pages - Syntax highlighting default was mistakenly `true`, it has been set to `false` - Add NO_COLOR and CLICOLOR support for having colours or not in CLI output +- Fix `robots.txt`template not being used ## 0.4.2 (2018-09-03) diff --git a/components/site/src/lib.rs b/components/site/src/lib.rs index afdf12db..4d3d8ba4 100644 --- a/components/site/src/lib.rs +++ b/components/site/src/lib.rs @@ -91,7 +91,6 @@ impl Site { // Only parsing as we might be extending templates from themes and that would error // as we haven't loaded them yet let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?; - if let Some(theme) = config.theme.clone() { // Grab data from the extra section of the theme config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?; @@ -105,10 +104,15 @@ impl Site { let theme_tpl_glob = format!( "{}/{}", path.to_string_lossy().replace("\\", "/"), - format!("themes/{}/templates/**/*.html", theme) + format!("themes/{}/templates/**/*.*ml", theme) ); let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?; rewrite_theme_paths(&mut tera_theme, &theme); + // TODO: same as above + if theme_path.join("templates").join("robots.txt").exists() { + tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?; + } + println!("{:?}", tera_theme.templates.keys().collect::>()); tera_theme.build_inheritance_chains()?; tera.extend(&tera_theme)?; } @@ -116,6 +120,12 @@ impl Site { // the `extend` above already does it but hey tera.build_inheritance_chains()?; + // TODO: Tera doesn't use globset right now so we can load the robots.txt as part + // of the glob above, therefore we load it manually if it exists. + if path.join("templates").join("robots.txt").exists() { + tera.add_template_file(path.join("templates").join("robots.txt"), None)?; + } + let content_path = path.join("content"); let static_path = path.join("static"); let imageproc = imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url); diff --git a/components/site/tests/site.rs b/components/site/tests/site.rs index a1c79735..0c6a3078 100644 --- a/components/site/tests/site.rs +++ b/components/site/tests/site.rs @@ -165,6 +165,9 @@ fn can_build_site_without_live_reload() { assert!(file_contains!(public, "sitemap.xml", "https://replace-this-with-your-url.com/posts/")); // Drafts are not in the sitemap assert!(!file_contains!(public, "sitemap.xml", "draft")); + + // robots.txt has been rendered from the template + assert!(!file_contains!(public, "robots.txt", "Hello")); } #[test] diff --git a/test_site/templates/robots.txt b/test_site/templates/robots.txt new file mode 100644 index 00000000..e965047a --- /dev/null +++ b/test_site/templates/robots.txt @@ -0,0 +1 @@ +Hello