Fix colocated dates + rustfmt

Closes #607
This commit is contained in:
Vincent Prouillet 2019-02-09 19:54:46 +01:00
parent a42e6dfec4
commit 9bc675f2a7
22 changed files with 331 additions and 212 deletions

13
Cargo.lock generated
View file

@ -229,9 +229,10 @@ dependencies = [
[[package]]
name = "bincode"
version = "1.0.1"
version = "1.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.87 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1372,7 +1373,7 @@ dependencies = [
[[package]]
name = "notify"
version = "4.0.8"
version = "4.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2260,7 +2261,7 @@ name = "syntect"
version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"bincode 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3040,7 +3041,7 @@ dependencies = [
"errors 0.1.0",
"front_matter 0.1.0",
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"notify 4.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"notify 4.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
"rebuild 0.1.0",
"site 0.1.0",
"termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3069,7 +3070,7 @@ dependencies = [
"checksum backtrace-sys 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "797c830ac25ccc92a7f8a7b9862bde440715531514594a6154e3d4a54dd769b6"
"checksum base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0b25d992356d2eb0ed82172f5248873db5560c4721f564b13cb5193bda5e668e"
"checksum base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "489d6c0ed21b11d038c31b6ceccca973e65d73ba3bd8ecb9a2babf5546164643"
"checksum bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f2fb9e29e72fd6bc12071533d5dc7664cb01480c59406f656d7ac25c7bd8ff7"
"checksum bincode 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "58470ad6460f0b0e89b0df5f17b8bd77ebae26af69dca0bd9ddc8b9e38abb2ff"
"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
"checksum bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "228047a76f468627ca71776ecdebd732a3423081fcf5125585bcd7c49886ce12"
"checksum block-buffer 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a076c298b9ecdb530ed9d967e74a6027d6a7478924520acddcddc24c1c8ab3ab"
@ -3191,7 +3192,7 @@ dependencies = [
"checksum nix 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d37e713a259ff641624b6cb20e3b12b2952313ba36b6823c0f16e6cfd9e5de17"
"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945"
"checksum nom 4.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b30adc557058ce00c9d0d7cb3c6e0b5bc6f36e2e2eabe74b0ba726d194abd588"
"checksum notify 4.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c9b605e417814e88bb051c88a84f83655d6ad4fa32fc36d9a96296d86087692d"
"checksum notify 4.0.9 (registry+https://github.com/rust-lang/crates.io-index)" = "9cc7ed2bd4b7edad3ee93b659c38e53dabb619f7274e127a0fab054ad2bb998d"
"checksum num-derive 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d9fe8fcafd1b86a37ce8a1cfa15ae504817e0c8c2e7ad42767371461ac1d316d"
"checksum num-integer 0.1.39 (registry+https://github.com/rust-lang/crates.io-index)" = "e83d528d2677f0518c570baf2b7abdcf0cd2d248860b68507bdcb3e91d4c0cea"
"checksum num-iter 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "af3fdbbc3291a5464dc57b03860ec37ca6bf915ed6ee385e7c6c052c422b2124"

View file

@ -32,10 +32,8 @@ impl StdError for Error {
let mut source = self.source.as_ref().map(|c| &**c);
if source.is_none() {
match self.kind {
ErrorKind::Tera(ref err) => {
source = err.source()
},
_ => ()
ErrorKind::Tera(ref err) => source = err.source(),
_ => (),
};
}
@ -68,7 +66,6 @@ impl Error {
}
}
impl From<&str> for Error {
fn from(e: &str) -> Self {
Self::msg(e)

View file

@ -12,7 +12,7 @@ extern crate toml;
extern crate errors;
extern crate utils;
use errors::{Result, Error};
use errors::{Error, Result};
use regex::Regex;
use std::path::Path;
@ -72,7 +72,10 @@ pub fn split_section_content(
) -> Result<(SectionFrontMatter, String)> {
let (front_matter, content) = split_content(file_path, content)?;
let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| {
Error::chain(format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()), e)
Error::chain(
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()),
e,
)
})?;
Ok((meta, content))
}
@ -82,7 +85,10 @@ pub fn split_section_content(
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> {
let (front_matter, content) = split_content(file_path, content)?;
let meta = PageFrontMatter::parse(&front_matter).map_err(|e| {
Error::chain(format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()), e)
Error::chain(
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()),
e,
)
})?;
Ok((meta, content))
}

View file

@ -20,7 +20,7 @@ use image::{FilterType, GenericImageView};
use rayon::prelude::*;
use regex::Regex;
use errors::{Result, Error};
use errors::{Error, Result};
use utils::fs as ufs;
static RESIZED_SUBDIR: &'static str = "processed_images";

View file

@ -8,7 +8,7 @@ use slug::slugify;
use tera::{Context as TeraContext, Tera};
use config::Config;
use errors::{Result, Error};
use errors::{Error, Result};
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
use library::Library;
use rendering::{render_content, Header, RenderContext};
@ -126,7 +126,16 @@ impl Page {
page.reading_time = Some(reading_time);
let mut slug_from_dated_filename = None;
if let Some(ref caps) = RFC3339_DATE.captures(&page.file.name.replace(".md", "")) {
let file_path = if page.file.name == "index" {
if let Some(parent) = page.file.path.parent() {
parent.file_name().unwrap().to_str().unwrap().to_string()
} else {
page.file.name.replace(".md", "")
}
} else {
page.file.name.replace(".md", "")
};
if let Some(ref caps) = RFC3339_DATE.captures(&file_path) {
slug_from_dated_filename = Some(caps.name("slug").unwrap().as_str().to_string());
if page.meta.date.is_none() {
page.meta.date = Some(caps.name("datetime").unwrap().as_str().to_string());
@ -139,7 +148,11 @@ impl Page {
slug.trim().to_string()
} else if page.file.name == "index" {
if let Some(parent) = page.file.path.parent() {
if let Some(slug) = slug_from_dated_filename {
slugify(&slug)
} else {
slugify(parent.file_name().unwrap().to_str().unwrap())
}
} else {
slugify(&page.file.name)
}
@ -233,8 +246,9 @@ impl Page {
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
let res = render_content(&self.raw_content, &context)
.map_err(|e| Error::chain(format!("Failed to render content of {}", self.file.path.display()), e))?;
let res = render_content(&self.raw_content, &context).map_err(|e| {
Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
})?;
self.summary = res.summary_len.map(|l| res.body[0..l].to_owned());
self.content = res.body;
@ -257,8 +271,9 @@ impl Page {
context.insert("page", &self.to_serialized(library));
context.insert("lang", &self.lang);
render_template(&tpl_name, tera, context, &config.theme)
.map_err(|e| Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e))
render_template(&tpl_name, tera, context, &config.theme).map_err(|e| {
Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e)
})
}
/// Creates a vectors of asset URLs.
@ -499,6 +514,31 @@ Hello world
assert_eq!(page.permalink, "http://a-website.com/posts/hey/");
}
// https://github.com/getzola/zola/issues/607
#[test]
fn page_with_assets_and_date_in_folder_name() {
let tmp_dir = tempdir().expect("create temp dir");
let path = tmp_dir.path();
create_dir(&path.join("content")).expect("create content temp dir");
create_dir(&path.join("content").join("posts")).expect("create posts temp dir");
let nested_path = path.join("content").join("posts").join("2013-06-02_with-assets");
create_dir(&nested_path).expect("create nested temp dir");
let mut f = File::create(nested_path.join("index.md")).unwrap();
f.write_all(b"+++\n\n+++\n").unwrap();
File::create(nested_path.join("example.js")).unwrap();
File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap();
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default());
assert!(res.is_ok());
let page = res.unwrap();
assert_eq!(page.file.parent, path.join("content").join("posts"));
assert_eq!(page.slug, "with-assets");
assert_eq!(page.meta.date, Some("2013-06-02".to_string()));
assert_eq!(page.assets.len(), 3);
assert_eq!(page.permalink, "http://a-website.com/posts/with-assets/");
}
#[test]
fn page_with_ignored_assets_filters_out_correct_files() {
let tmp_dir = tempdir().expect("create temp dir");

View file

@ -5,7 +5,7 @@ use slotmap::Key;
use tera::{Context as TeraContext, Tera};
use config::Config;
use errors::{Result, Error};
use errors::{Error, Result};
use front_matter::{split_section_content, SectionFrontMatter};
use rendering::{render_content, Header, RenderContext};
use utils::fs::{find_related_assets, read_file};
@ -171,8 +171,9 @@ impl Section {
context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None));
let res = render_content(&self.raw_content, &context)
.map_err(|e| Error::chain(format!("Failed to render content of {}", self.file.path.display()), e))?;
let res = render_content(&self.raw_content, &context).map_err(|e| {
Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
})?;
self.content = res.body;
self.toc = res.toc;
Ok(())
@ -189,8 +190,9 @@ impl Section {
context.insert("section", &self.to_serialized(library));
context.insert("lang", &self.lang);
render_template(tpl_name, tera, context, &config.theme)
.map_err(|e| Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e))
render_template(tpl_name, tera, context, &config.theme).map_err(|e| {
Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e)
})
}
/// Is this the index section?

View file

@ -5,9 +5,9 @@ use slotmap::{DenseSlotMap, Key};
use front_matter::SortBy;
use config::Config;
use content::{Page, Section};
use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
use config::Config;
/// Houses everything about pages and sections
/// Think of it as a database where each page and section has an id (Key here)

View file

@ -4,7 +4,7 @@ use slotmap::Key;
use tera::{to_value, Context, Tera, Value};
use config::Config;
use errors::{Result, Error};
use errors::{Error, Result};
use utils::templates::render_template;
use content::{Section, SerializingPage, SerializingSection};

View file

@ -5,7 +5,7 @@ use slug::slugify;
use tera::{Context, Tera};
use config::{Config, Taxonomy as TaxonomyConfig};
use errors::{Result, Error};
use errors::{Error, Result};
use utils::templates::render_template;
use content::SerializingPage;
@ -48,7 +48,13 @@ pub struct TaxonomyItem {
}
impl TaxonomyItem {
pub fn new(name: &str, taxonomy: &TaxonomyConfig, config: &Config, keys: Vec<Key>, library: &Library) -> Self {
pub fn new(
name: &str,
taxonomy: &TaxonomyConfig,
config: &Config,
keys: Vec<Key>,
library: &Library,
) -> Self {
// Taxonomy are almost always used for blogs so we filter by dates
// and it's not like we can sort things across sections by anything other
// than dates
@ -145,7 +151,9 @@ impl Taxonomy {
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug));
render_template(&format!("{}/single.html", self.kind.name), tera, context, &config.theme)
.map_err(|e| Error::chain(format!("Failed to render single term {} page.", self.kind.name), e))
.map_err(|e| {
Error::chain(format!("Failed to render single term {} page.", self.kind.name), e)
})
}
pub fn render_all_terms(
@ -164,7 +172,9 @@ impl Taxonomy {
context.insert("current_path", &self.kind.name);
render_template(&format!("{}/list.html", self.kind.name), tera, context, &config.theme)
.map_err(|e| Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e))
.map_err(|e| {
Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e)
})
}
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> {
@ -232,7 +242,7 @@ mod tests {
use super::*;
use std::collections::HashMap;
use config::{Config, Taxonomy as TaxonomyConfig, Language};
use config::{Config, Language, Taxonomy as TaxonomyConfig};
use content::Page;
use library::Library;
@ -242,9 +252,21 @@ mod tests {
let mut library = Library::new(2, 0, false);
config.taxonomies = vec![
TaxonomyConfig { name: "categories".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() },
TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() },
TaxonomyConfig { name: "authors".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() },
TaxonomyConfig {
name: "categories".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "authors".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
];
let mut page1 = Page::default();
@ -324,8 +346,11 @@ mod tests {
let mut config = Config::default();
let mut library = Library::new(2, 0, false);
config.taxonomies =
vec![TaxonomyConfig { name: "authors".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() }];
config.taxonomies = vec![TaxonomyConfig {
name: "authors".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
}];
let mut page1 = Page::default();
let mut taxo_page1 = HashMap::new();
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
@ -346,13 +371,25 @@ mod tests {
#[test]
fn can_make_taxonomies_in_multiple_languages() {
let mut config = Config::default();
config.languages.push(Language {rss: false, code: "fr".to_string()});
config.languages.push(Language { rss: false, code: "fr".to_string() });
let mut library = Library::new(2, 0, true);
config.taxonomies = vec![
TaxonomyConfig { name: "categories".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() },
TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() },
TaxonomyConfig { name: "auteurs".to_string(), lang: "fr".to_string(), ..TaxonomyConfig::default() },
TaxonomyConfig {
name: "categories".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "auteurs".to_string(),
lang: "fr".to_string(),
..TaxonomyConfig::default()
},
];
let mut page1 = Page::default();
@ -410,7 +447,10 @@ mod tests {
assert_eq!(authors.items[0].name, "Vincent Prouillet");
assert_eq!(authors.items[0].slug, "vincent-prouillet");
assert_eq!(authors.items[0].permalink, "http://a-website.com/fr/auteurs/vincent-prouillet/");
assert_eq!(
authors.items[0].permalink,
"http://a-website.com/fr/auteurs/vincent-prouillet/"
);
assert_eq!(authors.items[0].pages.len(), 1);
assert_eq!(categories.items[0].name, "Other");
@ -430,7 +470,7 @@ mod tests {
#[test]
fn errors_on_taxonomy_of_different_language() {
let mut config = Config::default();
config.languages.push(Language {rss: false, code: "fr".to_string()});
config.languages.push(Language { rss: false, code: "fr".to_string() });
let mut library = Library::new(2, 0, false);
config.taxonomies =

View file

@ -155,12 +155,14 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
SectionChangesNeeded::Sort => {
site.register_tera_global_fns();
}
SectionChangesNeeded::Render => {
site.render_section(&site.library.read().unwrap().get_section(&pathbuf).unwrap(), false)?
}
SectionChangesNeeded::RenderWithPages => {
site.render_section(&site.library.read().unwrap().get_section(&pathbuf).unwrap(), true)?
}
SectionChangesNeeded::Render => site.render_section(
&site.library.read().unwrap().get_section(&pathbuf).unwrap(),
false,
)?,
SectionChangesNeeded::RenderWithPages => site.render_section(
&site.library.read().unwrap().get_section(&pathbuf).unwrap(),
true,
)?,
// not a common enough operation to make it worth optimizing
SectionChangesNeeded::Delete | SectionChangesNeeded::Transparent => {
site.build()?;
@ -182,7 +184,7 @@ macro_rules! render_parent_sections {
($site: expr, $path: expr) => {
for s in $site.library.read().unwrap().find_parent_sections($path) {
$site.render_section(s, false)?;
};
}
};
}
@ -230,7 +232,9 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
}
PageChangesNeeded::Render => {
render_parent_sections!(site, path);
site.render_page(&site.library.read().unwrap().get_page(&path.to_path_buf()).unwrap())?;
site.render_page(
&site.library.read().unwrap().get_page(&path.to_path_buf()).unwrap(),
)?;
}
};
}

View file

@ -4,7 +4,7 @@ use pulldown_cmark as cmark;
use slug::slugify;
use syntect::easy::HighlightLines;
use syntect::html::{
IncludeBackground, start_highlighted_html_snippet, styled_line_to_highlighted_html,
start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground,
};
use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET};
@ -12,13 +12,14 @@ use context::RenderContext;
use errors::{Error, Result};
use front_matter::InsertAnchor;
use link_checker::check_url;
use table_of_contents::{Header, make_table_of_contents};
use table_of_contents::{make_table_of_contents, Header};
use utils::site::resolve_internal_link;
use utils::vec::InsertMany;
use self::cmark::{Event, Options, Parser, Tag};
const CONTINUE_READING: &str = "<p id=\"zola-continue-reading\"><a name=\"continue-reading\"></a></p>\n";
const CONTINUE_READING: &str =
"<p id=\"zola-continue-reading\"><a name=\"continue-reading\"></a></p>\n";
const ANCHOR_LINK_TEMPLATE: &str = "anchor-link.html";
#[derive(Debug)]
@ -88,9 +89,7 @@ fn fix_link(link: &str, context: &RenderContext) -> Result<String> {
if res.is_valid() {
link.to_string()
} else {
return Err(
format!("Link {} is not valid: {}", link, res.message()).into(),
);
return Err(format!("Link {} is not valid: {}", link, res.message()).into());
}
} else {
link.to_string()
@ -148,7 +147,8 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
opts.insert(Options::ENABLE_FOOTNOTES);
{
let mut events = Parser::new_ext(content, opts).map(|event| {
let mut events = Parser::new_ext(content, opts)
.map(|event| {
match event {
Event::Text(text) => {
// if we are in the middle of a code block
@ -157,7 +157,9 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
if let Some(ref extra) = context.config.extra_syntax_set {
highlighter.highlight(&text, &extra)
} else {
unreachable!("Got a highlighter from extra syntaxes but no extra?");
unreachable!(
"Got a highlighter from extra syntaxes but no extra?"
);
}
} else {
highlighter.highlight(&text, &SYNTAX_SET)
@ -178,8 +180,10 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
let theme = &THEME_SET.themes[&context.config.highlight_theme];
highlighter = Some(get_highlighter(info, &context.config));
// This selects the background color the same way that start_coloured_html_snippet does
let color =
theme.settings.background.unwrap_or(::syntect::highlighting::Color::WHITE);
let color = theme
.settings
.background
.unwrap_or(::syntect::highlighting::Color::WHITE);
background = IncludeBackground::IfDifferent(color);
let snippet = start_highlighted_html_snippet(theme);
Event::Html(Owned(snippet.0))
@ -207,7 +211,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
Ok(fixed_link) => fixed_link,
Err(err) => {
error = Some(err);
return Event::Html(Borrowed(""))
return Event::Html(Borrowed(""));
}
};
@ -219,7 +223,8 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
}
_ => event,
}
}).collect::<Vec<_>>(); // We need to collect the events to make a second pass
})
.collect::<Vec<_>>(); // We need to collect the events to make a second pass
let header_refs = get_header_refs(&events);
@ -228,7 +233,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
for header_ref in header_refs {
let start_idx = header_ref.start_idx;
let end_idx = header_ref.end_idx;
let title = get_text(&events[start_idx + 1 .. end_idx]);
let title = get_text(&events[start_idx + 1..end_idx]);
let id = find_anchor(&inserted_anchors, slugify(&title), 0);
inserted_anchors.push(id.clone());
@ -246,7 +251,12 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
let mut c = tera::Context::new();
c.insert("id", &id);
let anchor_link = utils::templates::render_template(&ANCHOR_LINK_TEMPLATE, context.tera, c, &None)
let anchor_link = utils::templates::render_template(
&ANCHOR_LINK_TEMPLATE,
context.tera,
c,
&None,
)
.map_err(|e| Error::chain("Failed to render anchor link template", e))?;
anchors_to_insert.push((anchor_idx, Event::Html(Owned(anchor_link))));
}

View file

@ -114,8 +114,7 @@ fn render_shortcode(
let template_name = format!("shortcodes/{}.html", name);
let res =
utils::templates::render_template(&template_name, &context.tera, tera_context, &None)
let res = utils::templates::render_template(&template_name, &context.tera, tera_context, &None)
.map_err(|e| Error::chain(format!("Failed to render {} shortcode", name), e))?;
// Small hack to avoid having multiple blank lines because of Tera tags for example

View file

@ -30,7 +30,7 @@ use sass_rs::{compile_file, Options as SassOptions, OutputStyle};
use tera::{Context, Tera};
use config::{get_config, Config};
use errors::{Result, Error};
use errors::{Error, Result};
use front_matter::InsertAnchor;
use library::{
find_taxonomies, sort_actual_pages_by_date, Library, Page, Paginator, Section, Taxonomy,
@ -87,7 +87,8 @@ impl Site {
format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml");
// Only parsing as we might be extending templates from themes and that would error
// as we haven't loaded them yet
let mut tera = Tera::parse(&tpl_glob).map_err(|e| Error::chain("Error parsing templates", e))?;
let mut tera =
Tera::parse(&tpl_glob).map_err(|e| Error::chain("Error parsing templates", e))?;
if let Some(theme) = config.theme.clone() {
// Grab data from the extra section of the theme
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
@ -103,8 +104,8 @@ impl Site {
path.to_string_lossy().replace("\\", "/"),
format!("themes/{}/templates/**/*.*ml", theme)
);
let mut tera_theme =
Tera::parse(&theme_tpl_glob).map_err(|e| Error::chain("Error parsing templates from themes", e))?;
let mut tera_theme = Tera::parse(&theme_tpl_glob)
.map_err(|e| Error::chain("Error parsing templates from themes", e))?;
rewrite_theme_paths(&mut tera_theme, &theme);
// TODO: we do that twice, make it dry?
if theme_path.join("templates").join("robots.txt").exists() {
@ -196,8 +197,11 @@ impl Site {
entry.as_path().file_name().unwrap().to_str().unwrap().starts_with("_index.")
});
self.library =
Arc::new(RwLock::new(Library::new(page_entries.len(), section_entries.len(), self.config.is_multilingual())));
self.library = Arc::new(RwLock::new(Library::new(
page_entries.len(),
section_entries.len(),
self.config.is_multilingual(),
)));
let sections = {
let config = &self.config;
@ -349,8 +353,14 @@ impl Site {
}
pub fn register_tera_global_fns(&mut self) {
self.tera.register_function("get_page", global_fns::GetPage::new(self.base_path.clone(), self.library.clone()));
self.tera.register_function("get_section", global_fns::GetSection::new(self.base_path.clone(), self.library.clone()));
self.tera.register_function(
"get_page",
global_fns::GetPage::new(self.base_path.clone(), self.library.clone()),
);
self.tera.register_function(
"get_section",
global_fns::GetSection::new(self.base_path.clone(), self.library.clone()),
);
self.tera.register_function(
"get_taxonomy",
global_fns::GetTaxonomy::new(self.taxonomies.clone(), self.library.clone()),
@ -475,7 +485,8 @@ impl Site {
pub fn clean(&self) -> Result<()> {
if self.output_path.exists() {
// Delete current `public` directory so we can start fresh
remove_dir_all(&self.output_path).map_err(|e| Error::chain("Couldn't delete output directory", e))?;
remove_dir_all(&self.output_path)
.map_err(|e| Error::chain("Couldn't delete output directory", e))?;
}
Ok(())
@ -544,12 +555,8 @@ impl Site {
if !lang.rss {
continue;
}
let pages = library
.pages_values()
.iter()
.filter(|p| p.lang == lang.code)
.map(|p| *p)
.collect();
let pages =
library.pages_values().iter().filter(|p| p.lang == lang.code).map(|p| *p).collect();
self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?;
}
@ -735,7 +742,8 @@ impl Site {
} else {
self.output_path.join(&taxonomy.kind.name)
};
let list_output = taxonomy.render_all_terms(&self.tera, &self.config, &self.library.read().unwrap())?;
let list_output =
taxonomy.render_all_terms(&self.tera, &self.config, &self.library.read().unwrap())?;
create_directory(&output_path)?;
create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?;
let library = self.library.read().unwrap();
@ -794,14 +802,20 @@ impl Site {
let mut sections = self
.library
.read().unwrap()
.read()
.unwrap()
.sections_values()
.iter()
.filter(|s| s.meta.render)
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
.collect::<Vec<_>>();
for section in
self.library.read().unwrap().sections_values().iter().filter(|s| s.meta.paginate_by.is_some())
for section in self
.library
.read()
.unwrap()
.sections_values()
.iter()
.filter(|s| s.meta.paginate_by.is_some())
{
let number_pagers = (section.pages.len() as f64
/ section.meta.paginate_by.unwrap() as f64)
@ -971,9 +985,13 @@ impl Site {
}
if section.meta.is_paginated() {
self.render_paginated(&output_path, &Paginator::from_section(&section, &self.library.read().unwrap()))?;
self.render_paginated(
&output_path,
&Paginator::from_section(&section, &self.library.read().unwrap()),
)?;
} else {
let output = section.render_html(&self.tera, &self.config, &self.library.read().unwrap())?;
let output =
section.render_html(&self.tera, &self.config, &self.library.read().unwrap())?;
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?;
}
@ -985,7 +1003,8 @@ impl Site {
self.render_section(
&self
.library
.read().unwrap()
.read()
.unwrap()
.get_section(&self.content_path.join("_index.md"))
.expect("Failed to get index section"),
false,
@ -995,7 +1014,8 @@ impl Site {
/// Renders all sections
pub fn render_sections(&self) -> Result<()> {
self.library
.read().unwrap()
.read()
.unwrap()
.sections_values()
.into_par_iter()
.map(|s| self.render_section(s, true))
@ -1026,8 +1046,12 @@ impl Site {
.map(|pager| {
let page_path = folder_path.join(&format!("{}", pager.index));
create_directory(&page_path)?;
let output =
paginator.render_pager(pager, &self.config, &self.tera, &self.library.read().unwrap())?;
let output = paginator.render_pager(
pager,
&self.config,
&self.tera,
&self.library.read().unwrap(),
)?;
if pager.index > 1 {
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
} else {

View file

@ -631,9 +631,8 @@ fn can_apply_page_templates() {
assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into()));
// But it should not have override a children page_template
let yet_another_section = library
.get_section(&template_path.join("yet_another_section").join("_index.md"))
.unwrap();
let yet_another_section =
library.get_section(&template_path.join("yet_another_section").join("_index.md")).unwrap();
assert_eq!(yet_another_section.subsections.len(), 0);
assert_eq!(yet_another_section.pages.len(), 1);

View file

@ -23,8 +23,7 @@ fn can_parse_multilingual_site() {
assert_eq!(default_index_section.pages.len(), 1);
assert!(default_index_section.ancestors.is_empty());
let fr_index_section =
library.get_section(&path.join("content").join("_index.fr.md")).unwrap();
let fr_index_section = library.get_section(&path.join("content").join("_index.fr.md")).unwrap();
assert_eq!(fr_index_section.pages.len(), 1);
assert!(fr_index_section.ancestors.is_empty());
@ -139,5 +138,4 @@ fn can_build_multilingual_site() {
assert!(!file_contains!(public, "fr/auteurs/index.html", "Queen"));
assert!(file_contains!(public, "fr/auteurs/index.html", "Vincent"));
assert!(!file_exists!(public, "fr/auteurs/vincent-prouillet/rss.xml"));
}

View file

@ -183,7 +183,7 @@ impl LoadData {
pub fn new(content_path: PathBuf, base_path: PathBuf) -> Self {
let client = Arc::new(Mutex::new(Client::builder().build().expect("reqwest client build")));
let result_cache = Arc::new(Mutex::new(HashMap::new()));
Self {content_path, base_path, client, result_cache}
Self { content_path, base_path, client, result_cache }
}
}
@ -310,7 +310,7 @@ fn load_csv(csv_data: String) -> Result<Value> {
#[cfg(test)]
mod tests {
use super::{LoadData, DataSource, OutputFormat};
use super::{DataSource, LoadData, OutputFormat};
use std::collections::HashMap;
use std::path::PathBuf;

View file

@ -15,7 +15,7 @@ mod macros;
mod load_data;
pub use self::load_data::LoadData;
pub use self::load_data::LoadData;
#[derive(Debug)]
pub struct Trans {
@ -23,7 +23,7 @@ pub struct Trans {
}
impl Trans {
pub fn new(config: Config) -> Self {
Self {config}
Self { config }
}
}
impl TeraFn for Trans {
@ -43,7 +43,7 @@ pub struct GetUrl {
}
impl GetUrl {
pub fn new(config: Config, permalinks: HashMap<String, String>) -> Self {
Self {config, permalinks}
Self { config, permalinks }
}
}
impl TeraFn for GetUrl {
@ -88,7 +88,7 @@ pub struct ResizeImage {
}
impl ResizeImage {
pub fn new(imageproc: Arc<Mutex<imageproc::Processor>>) -> Self {
Self {imageproc}
Self { imageproc }
}
}
@ -154,7 +154,7 @@ impl GetTaxonomyUrl {
}
taxonomies.insert(taxonomy.kind.name.clone(), items);
}
Self {taxonomies}
Self { taxonomies }
}
}
impl TeraFn for GetTaxonomyUrl {
@ -188,7 +188,6 @@ impl TeraFn for GetTaxonomyUrl {
}
}
#[derive(Debug)]
pub struct GetPage {
base_path: PathBuf,
@ -196,7 +195,7 @@ pub struct GetPage {
}
impl GetPage {
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
Self {base_path: base_path.join("content"), library}
Self { base_path: base_path.join("content"), library }
}
}
impl TeraFn for GetPage {
@ -209,9 +208,7 @@ impl TeraFn for GetPage {
let full_path = self.base_path.join(&path);
let library = self.library.read().unwrap();
match library.get_page(&full_path) {
Some(p) => {
Ok(to_value(p.to_serialized(&library)).unwrap())
},
Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()),
None => Err(format!("Page `{}` not found.", path).into()),
}
}
@ -224,7 +221,7 @@ pub struct GetSection {
}
impl GetSection {
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
Self {base_path: base_path.join("content"), library}
Self { base_path: base_path.join("content"), library }
}
}
impl TeraFn for GetSection {
@ -249,13 +246,12 @@ impl TeraFn for GetSection {
} else {
Ok(to_value(s.to_serialized(&library)).unwrap())
}
},
}
None => Err(format!("Section `{}` not found.", path).into()),
}
}
}
#[derive(Debug)]
pub struct GetTaxonomy {
library: Arc<RwLock<Library>>,
@ -267,7 +263,7 @@ impl GetTaxonomy {
for taxo in all_taxonomies {
taxonomies.insert(taxo.kind.name.clone(), taxo);
}
Self {taxonomies, library}
Self { taxonomies, library }
}
}
impl TeraFn for GetTaxonomy {
@ -279,15 +275,9 @@ impl TeraFn for GetTaxonomy {
);
match self.taxonomies.get(&kind) {
Some(t) => {
Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap())
},
Some(t) => Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap()),
None => {
Err(format!(
"`get_taxonomy` received an unknown taxonomy as kind: {}",
kind
)
.into())
Err(format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into())
}
}
}
@ -298,9 +288,9 @@ mod tests {
use super::{GetTaxonomy, GetTaxonomyUrl, GetUrl, Trans};
use std::collections::HashMap;
use std::sync::{RwLock, Arc};
use std::sync::{Arc, RwLock};
use tera::{to_value, Value, Function};
use tera::{to_value, Function, Value};
use config::{Config, Taxonomy as TaxonomyConfig};
use library::{Library, Taxonomy, TaxonomyItem};
@ -348,9 +338,19 @@ mod tests {
#[test]
fn can_get_taxonomy() {
let config = Config::default();
let taxo_config = TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() };
let taxo_config = TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
};
let library = Arc::new(RwLock::new(Library::new(0, 0, false)));
let tag = TaxonomyItem::new("Programming", &taxo_config, &config, vec![], &library.read().unwrap());
let tag = TaxonomyItem::new(
"Programming",
&taxo_config,
&config,
vec![],
&library.read().unwrap(),
);
let tags = Taxonomy { kind: taxo_config, items: vec![tag] };
let taxonomies = vec![tags.clone()];
@ -388,7 +388,11 @@ mod tests {
#[test]
fn can_get_taxonomy_url() {
let config = Config::default();
let taxo_config = TaxonomyConfig { name: "tags".to_string(), lang: config.default_language.clone(), ..TaxonomyConfig::default() };
let taxo_config = TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
};
let library = Library::new(0, 0, false);
let tag = TaxonomyItem::new("Programming", &taxo_config, &config, vec![], &library);
let tags = Taxonomy { kind: taxo_config, items: vec![tag] };

View file

@ -25,7 +25,7 @@ pub mod global_fns;
use tera::{Context, Tera};
use errors::{Result, Error};
use errors::{Error, Result};
lazy_static! {
pub static ref ZOLA_TERA: Tera = {

View file

@ -4,7 +4,7 @@ use std::path::{Path, PathBuf};
use std::time::SystemTime;
use walkdir::WalkDir;
use errors::{Result, Error};
use errors::{Error, Result};
pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
let canonical_path = path
@ -19,8 +19,8 @@ pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
/// Create a file with the content given
pub fn create_file(path: &Path, content: &str) -> Result<()> {
let mut file = File::create(&path)
.map_err(|e| Error::chain(format!("Failed to create {:?}", path), e))?;
let mut file =
File::create(&path).map_err(|e| Error::chain(format!("Failed to create {:?}", path), e))?;
file.write_all(content.as_bytes())?;
Ok(())
}
@ -37,8 +37,9 @@ pub fn ensure_directory_exists(path: &Path) -> Result<()> {
/// exists before creating it
pub fn create_directory(path: &Path) -> Result<()> {
if !path.exists() {
create_dir_all(path)
.map_err(|e| Error::chain(format!("Was not able to create folder {}", path.display()), e))?;
create_dir_all(path).map_err(|e| {
Error::chain(format!("Was not able to create folder {}", path.display()), e)
})?;
}
Ok(())
}

View file

@ -16,7 +16,7 @@ impl<T> InsertMany for Vec<T> {
for (idx, elem) in elem_to_insert.into_iter() {
let head_len = idx - last_idx;
inserted.extend(self.splice(0 .. head_len, std::iter::empty()));
inserted.extend(self.splice(0..head_len, std::iter::empty()));
inserted.push(elem);
last_idx = idx;
}

View file

@ -36,7 +36,7 @@ use ctrlc;
use notify::{watcher, RecursiveMode, Watcher};
use ws::{Message, Sender, WebSocket};
use errors::{Result, Error as ZolaError};
use errors::{Error as ZolaError, Result};
use site::Site;
use utils::fs::copy_file;
@ -296,11 +296,7 @@ pub fn serve(
};
console::info(&msg);
// Force refresh
rebuild_done_handling(
&broadcaster,
rebuild::after_template_change(site, &path),
"/x.js",
);
rebuild_done_handling(&broadcaster, rebuild::after_template_change(site, &path), "/x.js");
};
let reload_sass = |site: &Site, path: &Path, partial_path: &Path| {

View file

@ -7,8 +7,8 @@ use atty;
use chrono::Duration;
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use site::Site;
use errors::Error;
use site::Site;
lazy_static! {
/// Termcolor color choice.
@ -64,9 +64,7 @@ pub fn warn_about_ignored_pages(site: &Site) {
let ignored_pages: Vec<_> = library
.sections_values()
.iter()
.flat_map(|s| {
s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone())
})
.flat_map(|s| s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone()))
.collect();
if !ignored_pages.is_empty() {