diff --git a/build.rs b/build.rs index 8912a1b8..91d124f5 100644 --- a/build.rs +++ b/build.rs @@ -7,9 +7,9 @@ include!("src/cli.rs"); fn main() { // disabled below as it fails in CI -// let mut app = build_cli(); -// app.gen_completions("zola", Shell::Bash, "completions/"); -// app.gen_completions("zola", Shell::Fish, "completions/"); -// app.gen_completions("zola", Shell::Zsh, "completions/"); -// app.gen_completions("zola", Shell::PowerShell, "completions/"); + // let mut app = build_cli(); + // app.gen_completions("zola", Shell::Bash, "completions/"); + // app.gen_completions("zola", Shell::Fish, "completions/"); + // app.gen_completions("zola", Shell::Zsh, "completions/"); + // app.gen_completions("zola", Shell::PowerShell, "completions/"); } diff --git a/components/config/examples/generate_sublime.rs b/components/config/examples/generate_sublime.rs index 2015fa84..981e3998 100644 --- a/components/config/examples/generate_sublime.rs +++ b/components/config/examples/generate_sublime.rs @@ -3,10 +3,10 @@ //! Although it is a valid example for serializing syntaxes, you probably won't need //! to do this yourself unless you want to cache your own compiled grammars. extern crate syntect; -use syntect::parsing::SyntaxSetBuilder; -use syntect::highlighting::ThemeSet; -use syntect::dumps::*; use std::env; +use syntect::dumps::*; +use syntect::highlighting::ThemeSet; +use syntect::parsing::SyntaxSetBuilder; fn usage_and_exit() -> ! { println!("USAGE: cargo run --example generate_sublime synpack source-dir newlines.packdump nonewlines.packdump\n @@ -32,7 +32,7 @@ fn main() { println!("- {} -> {:?}", s.name, s.file_extensions); } } - }, + } (Some(ref cmd), Some(ref theme_dir), Some(ref packpath)) if cmd == "themepack" => { let ts = ThemeSet::load_from_folder(theme_dir).unwrap(); for path in ts.themes.keys() { diff --git a/components/config/src/config.rs b/components/config/src/config.rs index 0564dcc4..7687c360 100644 --- a/components/config/src/config.rs +++ b/components/config/src/config.rs @@ -41,12 +41,7 @@ impl Taxonomy { impl Default for Taxonomy { fn default() -> Taxonomy { - Taxonomy { - name: String::new(), - paginate_by: None, - paginate_path: None, - rss: false, - } + Taxonomy { name: String::new(), paginate_by: None, paginate_path: None, rss: false } } } @@ -137,19 +132,12 @@ impl Config { for pat in &config.ignored_content { let glob = match Glob::new(pat) { Ok(g) => g, - Err(e) => bail!( - "Invalid ignored_content glob pattern: {}, error = {}", - pat, - e - ), + Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e), }; glob_set_builder.add(glob); } - config.ignored_content_globset = Some( - glob_set_builder - .build() - .expect("Bad ignored_content in config file."), - ); + config.ignored_content_globset = + Some(glob_set_builder.build().expect("Bad ignored_content in config file.")); } Ok(config) @@ -162,10 +150,7 @@ impl Config { let file_name = path.file_name().unwrap(); File::open(path) .chain_err(|| { - format!( - "No `{:?}` file found. Are you in the right directory?", - file_name - ) + format!("No `{:?}` file found. Are you in the right directory?", file_name) })? .read_to_string(&mut content)?; @@ -217,16 +202,12 @@ impl Config { let original = self.extra.clone(); // 2. inject theme extra values for (key, val) in &theme.extra { - self.extra - .entry(key.to_string()) - .or_insert_with(|| val.clone()); + self.extra.entry(key.to_string()).or_insert_with(|| val.clone()); } // 3. overwrite with original config for (key, val) in &original { - self.extra - .entry(key.to_string()) - .or_insert_with(|| val.clone()); + self.extra.entry(key.to_string()).or_insert_with(|| val.clone()); } Ok(()) @@ -316,16 +297,7 @@ hello = "world" let config = Config::parse(config); assert!(config.is_ok()); - assert_eq!( - config - .unwrap() - .extra - .get("hello") - .unwrap() - .as_str() - .unwrap(), - "world" - ); + assert_eq!(config.unwrap().extra.get("hello").unwrap().as_str().unwrap(), "world"); } #[test] @@ -360,10 +332,7 @@ hello = "world" fn can_make_url_with_localhost() { let mut config = Config::default(); config.base_url = "http://127.0.0.1:1111".to_string(); - assert_eq!( - config.make_permalink("/tags/rust"), - "http://127.0.0.1:1111/tags/rust/" - ); + assert_eq!(config.make_permalink("/tags/rust"), "http://127.0.0.1:1111/tags/rust/"); } // https://github.com/Keats/gutenberg/issues/486 diff --git a/components/config/src/highlighting.rs b/components/config/src/highlighting.rs index 0c35d812..413a700c 100644 --- a/components/config/src/highlighting.rs +++ b/components/config/src/highlighting.rs @@ -1,18 +1,18 @@ use syntect::dumps::from_binary; -use syntect::parsing::SyntaxSet; -use syntect::highlighting::ThemeSet; use syntect::easy::HighlightLines; +use syntect::highlighting::ThemeSet; +use syntect::parsing::SyntaxSet; use Config; - lazy_static! { pub static ref SYNTAX_SET: SyntaxSet = { - let ss: SyntaxSet = from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump")); + let ss: SyntaxSet = + from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump")); ss }; - - pub static ref THEME_SET: ThemeSet = from_binary(include_bytes!("../../../sublime_themes/all.themedump")); + pub static ref THEME_SET: ThemeSet = + from_binary(include_bytes!("../../../sublime_themes/all.themedump")); } /// Returns the highlighter and whether it was found in the extra or not @@ -21,7 +21,8 @@ pub fn get_highlighter<'a>(info: &str, config: &Config) -> (HighlightLines<'a>, let mut in_extra = false; if let Some(ref lang) = info.split(' ').next() { - let syntax = SYNTAX_SET.find_syntax_by_token(lang) + let syntax = SYNTAX_SET + .find_syntax_by_token(lang) .or_else(|| { if let Some(ref extra) = config.extra_syntax_set { let s = extra.find_syntax_by_token(lang); diff --git a/components/config/src/lib.rs b/components/config/src/lib.rs index 52bd813e..621e3aa4 100644 --- a/components/config/src/lib.rs +++ b/components/config/src/lib.rs @@ -9,10 +9,9 @@ extern crate globset; extern crate lazy_static; extern crate syntect; - mod config; -mod theme; pub mod highlighting; +mod theme; pub use config::{Config, Taxonomy}; use std::path::Path; diff --git a/components/config/src/theme.rs b/components/config/src/theme.rs index 5e95deee..1bce6bf9 100644 --- a/components/config/src/theme.rs +++ b/components/config/src/theme.rs @@ -7,7 +7,6 @@ use toml::Value as Toml; use errors::{Result, ResultExt}; - /// Holds the data from a `theme.toml` file. /// There are other fields than `extra` in it but Zola /// itself doesn't care about them. @@ -36,7 +35,6 @@ impl Theme { bail!("Expected the `theme.toml` to be a TOML table") } - Ok(Theme { extra }) } @@ -44,11 +42,11 @@ impl Theme { pub fn from_file(path: &PathBuf) -> Result { let mut content = String::new(); File::open(path) - .chain_err(|| + .chain_err(|| { "No `theme.toml` file found. \ - Is the `theme` defined in your `config.toml present in the `themes` directory \ - and does it have a `theme.toml` inside?" - )? + Is the `theme` defined in your `config.toml present in the `themes` directory \ + and does it have a `theme.toml` inside?" + })? .read_to_string(&mut content)?; Theme::parse(&content) diff --git a/components/errors/src/lib.rs b/components/errors/src/lib.rs index 4471651a..0d0a32dc 100755 --- a/components/errors/src/lib.rs +++ b/components/errors/src/lib.rs @@ -2,10 +2,10 @@ #[macro_use] extern crate error_chain; -extern crate tera; -extern crate toml; extern crate image; extern crate syntect; +extern crate tera; +extern crate toml; error_chain! { errors {} diff --git a/components/front_matter/src/lib.rs b/components/front_matter/src/lib.rs index fd2739c3..ec6d7ae7 100644 --- a/components/front_matter/src/lib.rs +++ b/components/front_matter/src/lib.rs @@ -2,18 +2,18 @@ extern crate lazy_static; #[macro_use] extern crate serde_derive; -extern crate serde; -extern crate toml; -extern crate regex; -extern crate tera; extern crate chrono; +extern crate regex; +extern crate serde; +extern crate tera; +extern crate toml; #[macro_use] extern crate errors; -use std::path::Path; -use regex::Regex; use errors::{Result, ResultExt}; +use regex::Regex; +use std::path::Path; mod page; mod section; @@ -22,7 +22,8 @@ pub use page::PageFrontMatter; pub use section::SectionFrontMatter; lazy_static! { - static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap(); + static ref PAGE_RE: Regex = + Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap(); } #[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)] @@ -44,12 +45,14 @@ pub enum InsertAnchor { None, } - /// Split a file between the front matter and its content /// Will return an error if the front matter wasn't found fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { if !PAGE_RE.is_match(content) { - bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy()); + bail!( + "Couldn't find front matter in `{}`. Did you forget to add `+++`?", + file_path.to_string_lossy() + ); } // 2. extract the front matter and the content @@ -62,10 +65,14 @@ fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> { /// Split a file between the front matter and its content. /// Returns a parsed `SectionFrontMatter` and the rest of the content -pub fn split_section_content(file_path: &Path, content: &str) -> Result<(SectionFrontMatter, String)> { +pub fn split_section_content( + file_path: &Path, + content: &str, +) -> Result<(SectionFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; - let meta = SectionFrontMatter::parse(&front_matter) - .chain_err(|| format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()))?; + let meta = SectionFrontMatter::parse(&front_matter).chain_err(|| { + format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()) + })?; Ok((meta, content)) } @@ -73,8 +80,9 @@ pub fn split_section_content(file_path: &Path, content: &str) -> Result<(Section /// Returns a parsed `PageFrontMatter` and the rest of the content pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { let (front_matter, content) = split_content(file_path, content)?; - let meta = PageFrontMatter::parse(&front_matter) - .chain_err(|| format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()))?; + let meta = PageFrontMatter::parse(&front_matter).chain_err(|| { + format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()) + })?; Ok((meta, content)) } @@ -82,7 +90,7 @@ pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontM mod tests { use std::path::Path; - use super::{split_section_content, split_page_content}; + use super::{split_page_content, split_section_content}; #[test] fn can_split_page_content_valid() { diff --git a/components/front_matter/src/page.rs b/components/front_matter/src/page.rs index cb6c0814..2f3f99f3 100644 --- a/components/front_matter/src/page.rs +++ b/components/front_matter/src/page.rs @@ -2,19 +2,17 @@ use std::collections::HashMap; use std::result::Result as StdResult; use chrono::prelude::*; -use tera::{Map, Value}; use serde::{Deserialize, Deserializer}; +use tera::{Map, Value}; use toml; use errors::Result; - fn from_toml_datetime<'de, D>(deserializer: D) -> StdResult, D::Error> - where - D: Deserializer<'de>, +where + D: Deserializer<'de>, { - toml::value::Datetime::deserialize(deserializer) - .map(|s| Some(s.to_string())) + toml::value::Datetime::deserialize(deserializer).map(|s| Some(s.to_string())) } /// Returns key/value for a converted date from TOML. @@ -36,7 +34,9 @@ fn convert_toml_date(table: Map) -> Value { } new.insert(k, convert_toml_date(o)); } - _ => { new.insert(k, v); } + _ => { + new.insert(k, v); + } } } @@ -53,14 +53,15 @@ fn fix_toml_dates(table: Map) -> Value { Value::Object(mut o) => { new.insert(key, convert_toml_date(o)); } - _ => { new.insert(key, value); } + _ => { + new.insert(key, value); + } } } Value::Object(new) } - /// The front matter of every page #[derive(Debug, Clone, PartialEq, Deserialize)] #[serde(default)] @@ -143,7 +144,9 @@ impl PageFrontMatter { if d.contains('T') { DateTime::parse_from_rfc3339(&d).ok().and_then(|s| Some(s.naive_local())) } else { - NaiveDate::parse_from_str(&d, "%Y-%m-%d").ok().and_then(|s| Some(s.and_hms(0, 0, 0))) + NaiveDate::parse_from_str(&d, "%Y-%m-%d") + .ok() + .and_then(|s| Some(s.and_hms(0, 0, 0))) } } else { None @@ -187,11 +190,10 @@ impl Default for PageFrontMatter { } } - #[cfg(test)] mod tests { - use tera::to_value; use super::PageFrontMatter; + use tera::to_value; #[test] fn can_have_empty_front_matter() { @@ -213,7 +215,6 @@ mod tests { assert_eq!(res.description.unwrap(), "hey there".to_string()) } - #[test] fn errors_with_invalid_front_matter() { let content = r#"title = 1\n"#; diff --git a/components/front_matter/src/section.rs b/components/front_matter/src/section.rs index 64eb27dc..17fca5f5 100644 --- a/components/front_matter/src/section.rs +++ b/components/front_matter/src/section.rs @@ -5,11 +5,10 @@ use toml; use errors::Result; -use super::{SortBy, InsertAnchor}; +use super::{InsertAnchor, SortBy}; static DEFAULT_PAGINATE_PATH: &'static str = "page"; - /// The front matter of every section #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(default)] @@ -69,7 +68,7 @@ impl SectionFrontMatter { pub fn is_paginated(&self) -> bool { match self.paginate_by { Some(v) => v > 0, - None => false + None => false, } } } diff --git a/components/imageproc/src/lib.rs b/components/imageproc/src/lib.rs index ea62b9e2..b73a7b53 100644 --- a/components/imageproc/src/lib.rs +++ b/components/imageproc/src/lib.rs @@ -1,32 +1,32 @@ #[macro_use] extern crate lazy_static; -extern crate regex; extern crate image; extern crate rayon; +extern crate regex; -extern crate utils; extern crate errors; +extern crate utils; -use std::path::{Path, PathBuf}; -use std::hash::{Hash, Hasher}; -use std::collections::HashMap; -use std::collections::hash_map::Entry as HEntry; use std::collections::hash_map::DefaultHasher; +use std::collections::hash_map::Entry as HEntry; +use std::collections::HashMap; use std::fs::{self, File}; +use std::hash::{Hash, Hasher}; +use std::path::{Path, PathBuf}; -use regex::Regex; -use image::{FilterType, GenericImageView}; use image::jpeg::JPEGEncoder; +use image::{FilterType, GenericImageView}; use rayon::prelude::*; +use regex::Regex; -use utils::fs as ufs; use errors::{Result, ResultExt}; - +use utils::fs as ufs; static RESIZED_SUBDIR: &'static str = "processed_images"; lazy_static! { - pub static ref RESIZED_FILENAME: Regex = Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap(); + pub static ref RESIZED_FILENAME: Regex = + Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.]jpg"#).unwrap(); } /// Describes the precise kind of a resize operation @@ -57,16 +57,22 @@ impl ResizeOp { // Validate args: match op { - "fit_width" => if width.is_none() { - return Err("op=\"fit_width\" requires a `width` argument".to_string().into()); - }, - "fit_height" => if height.is_none() { - return Err("op=\"fit_height\" requires a `height` argument".to_string().into()); - }, - "scale" | "fit" | "fill" => if width.is_none() || height.is_none() { - return Err(format!("op={} requires a `width` and `height` argument", op).into()); - }, - _ => return Err(format!("Invalid image resize operation: {}", op).into()) + "fit_width" => { + if width.is_none() { + return Err("op=\"fit_width\" requires a `width` argument".to_string().into()); + } + } + "fit_height" => { + if height.is_none() { + return Err("op=\"fit_height\" requires a `height` argument".to_string().into()); + } + } + "scale" | "fit" | "fill" => { + if width.is_none() || height.is_none() { + return Err(format!("op={} requires a `width` and `height` argument", op).into()); + } + } + _ => return Err(format!("Invalid image resize operation: {}", op).into()), }; Ok(match op { @@ -121,8 +127,12 @@ impl From for u8 { impl Hash for ResizeOp { fn hash(&self, hasher: &mut H) { hasher.write_u8(u8::from(*self)); - if let Some(w) = self.width() { hasher.write_u32(w); } - if let Some(h) = self.height() { hasher.write_u32(h); } + if let Some(w) = self.width() { + hasher.write_u32(w); + } + if let Some(h) = self.height() { + hasher.write_u32(h); + } } } @@ -207,8 +217,7 @@ impl ImageOp { ((img_w - crop_w) / 2, 0) }; - img.crop(offset_w, offset_h, crop_w, crop_h) - .resize_exact(w, h, RESIZE_FILTER) + img.crop(offset_w, offset_h, crop_w, crop_h).resize_exact(w, h, RESIZE_FILTER) } } }; @@ -221,7 +230,6 @@ impl ImageOp { } } - /// A strcture into which image operations can be enqueued and then performed. /// All output is written in a subdirectory in `static_path`, /// taking care of file stale status based on timestamps and possible hash collisions. @@ -271,7 +279,11 @@ impl Processor { fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 { match self.img_ops.entry(img_op.hash) { - HEntry::Occupied(entry) => if *entry.get() == img_op { return 0; }, + HEntry::Occupied(entry) => { + if *entry.get() == img_op { + return 0; + } + } HEntry::Vacant(entry) => { entry.insert(img_op); return 0; @@ -341,9 +353,8 @@ impl Processor { let filename = entry_path.file_name().unwrap().to_string_lossy(); if let Some(capts) = RESIZED_FILENAME.captures(filename.as_ref()) { let hash = u64::from_str_radix(capts.get(1).unwrap().as_str(), 16).unwrap(); - let collision_id = u32::from_str_radix( - capts.get(2).unwrap().as_str(), 16, - ).unwrap(); + let collision_id = + u32::from_str_radix(capts.get(2).unwrap().as_str(), 16).unwrap(); if collision_id > 0 || !self.img_ops.contains_key(&hash) { fs::remove_file(&entry_path)?; @@ -359,24 +370,28 @@ impl Processor { ufs::ensure_directory_exists(&self.resized_path)?; } - self.img_ops.par_iter().map(|(hash, op)| { - let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id)); - op.perform(&self.content_path, &target) - .chain_err(|| format!("Failed to process image: {}", op.source)) - }).collect::>() + self.img_ops + .par_iter() + .map(|(hash, op)| { + let target = self.resized_path.join(Self::op_filename(*hash, op.collision_id)); + op.perform(&self.content_path, &target) + .chain_err(|| format!("Failed to process image: {}", op.source)) + }) + .collect::>() } } - /// Looks at file's extension and returns whether it's a supported image format pub fn file_is_img>(p: P) -> bool { - p.as_ref().extension().and_then(|s| s.to_str()).map(|ext| { - match ext.to_lowercase().as_str() { + p.as_ref() + .extension() + .and_then(|s| s.to_str()) + .map(|ext| match ext.to_lowercase().as_str() { "jpg" | "jpeg" => true, "png" => true, "gif" => true, "bmp" => true, _ => false, - } - }).unwrap_or(false) + }) + .unwrap_or(false) } diff --git a/components/library/src/content/file_info.rs b/components/library/src/content/file_info.rs index b098dc64..73ffaa57 100644 --- a/components/library/src/content/file_info.rs +++ b/components/library/src/content/file_info.rs @@ -114,7 +114,8 @@ mod tests { #[test] fn can_find_content_components() { - let res = find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md"); + let res = + find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md"); assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]); } } diff --git a/components/library/src/content/page.rs b/components/library/src/content/page.rs index 831f3aae..d0895177 100644 --- a/components/library/src/content/page.rs +++ b/components/library/src/content/page.rs @@ -2,19 +2,19 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; -use tera::{Tera, Context as TeraContext}; -use slug::slugify; -use slotmap::{Key}; use regex::Regex; +use slotmap::Key; +use slug::slugify; +use tera::{Context as TeraContext, Tera}; -use errors::{Result, ResultExt}; use config::Config; -use utils::fs::{read_file, find_related_assets}; +use errors::{Result, ResultExt}; +use front_matter::{split_page_content, InsertAnchor, PageFrontMatter}; +use library::Library; +use rendering::{render_content, Header, RenderContext}; +use utils::fs::{find_related_assets, read_file}; use utils::site::get_reading_analytics; use utils::templates::render_template; -use front_matter::{PageFrontMatter, InsertAnchor, split_page_content}; -use rendering::{RenderContext, Header, render_content}; -use library::Library; use content::file_info::FileInfo; use content::ser::SerializingPage; @@ -24,7 +24,6 @@ lazy_static! { static ref DATE_IN_FILENAME: Regex = Regex::new(r"^^([12]\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\d|3[01]))(_|-)").unwrap(); } - #[derive(Clone, Debug, PartialEq)] pub struct Page { /// All info about the actual file @@ -71,7 +70,6 @@ pub struct Page { pub reading_time: Option, } - impl Page { pub fn new>(file_path: P, meta: PageFrontMatter) -> Page { let file_path = file_path.as_ref(); @@ -155,7 +153,9 @@ impl Page { page.path = format!("{}/", page.path); } - page.components = page.path.split('/') + page.components = page + .path + .split('/') .map(|p| p.to_string()) .filter(|p| !p.is_empty()) .collect::>(); @@ -182,13 +182,13 @@ impl Page { // against the remaining path. Note that the current behaviour effectively means that // the `ignored_content` setting in the config file is limited to single-file glob // patterns (no "**" patterns). - page.assets = assets.into_iter() - .filter(|path| - match path.file_name() { - None => true, - Some(file) => !globset.is_match(file) - } - ).collect(); + page.assets = assets + .into_iter() + .filter(|path| match path.file_name() { + None => true, + Some(file) => !globset.is_match(file), + }) + .collect(); } else { page.assets = assets; } @@ -210,13 +210,8 @@ impl Page { config: &Config, anchor_insert: InsertAnchor, ) -> Result<()> { - let mut context = RenderContext::new( - tera, - config, - &self.permalink, - permalinks, - anchor_insert, - ); + let mut context = + RenderContext::new(tera, config, &self.permalink, permalinks, anchor_insert); context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None)); @@ -234,7 +229,7 @@ impl Page { pub fn render_html(&self, tera: &Tera, config: &Config, library: &Library) -> Result { let tpl_name = match self.meta.template { Some(ref l) => l.to_string(), - None => "page.html".to_string() + None => "page.html".to_string(), }; let mut context = TeraContext::new(); @@ -249,7 +244,8 @@ impl Page { /// Creates a vectors of asset URLs. fn serialize_assets(&self) -> Vec { - self.assets.iter() + self.assets + .iter() .filter_map(|asset| asset.file_name()) .filter_map(|filename| filename.to_str()) .map(|filename| self.path.clone() + filename) @@ -294,19 +290,18 @@ impl Default for Page { #[cfg(test)] mod tests { use std::collections::HashMap; + use std::fs::{create_dir, File}; use std::io::Write; - use std::fs::{File, create_dir}; use std::path::Path; - use tera::Tera; - use tempfile::tempdir; use globset::{Glob, GlobSetBuilder}; + use tempfile::tempdir; + use tera::Tera; - use config::Config; use super::Page; + use config::Config; use front_matter::InsertAnchor; - #[test] fn test_can_parse_a_valid_page() { let content = r#" @@ -324,7 +319,8 @@ Hello world"#; &Tera::default(), &Config::default(), InsertAnchor::None, - ).unwrap(); + ) + .unwrap(); assert_eq!(page.meta.title.unwrap(), "Hello".to_string()); assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string()); @@ -426,16 +422,13 @@ Hello world"#; +++ +++ Hello world -"#.to_string(); +"# + .to_string(); let res = Page::parse(Path::new("hello.md"), &content, &config); assert!(res.is_ok()); let mut page = res.unwrap(); - page.render_markdown( - &HashMap::default(), - &Tera::default(), - &config, - InsertAnchor::None, - ).unwrap(); + page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None) + .unwrap(); assert_eq!(page.summary, Some("

Hello world

\n".to_string())); } @@ -453,10 +446,7 @@ Hello world File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &Config::default(), - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); assert!(res.is_ok()); let page = res.unwrap(); assert_eq!(page.file.parent, path.join("content").join("posts")); @@ -479,10 +469,7 @@ Hello world File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &Config::default(), - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); assert!(res.is_ok()); let page = res.unwrap(); assert_eq!(page.file.parent, path.join("content").join("posts")); @@ -510,10 +497,7 @@ Hello world let mut config = Config::default(); config.ignored_content_globset = Some(gsb.build().unwrap()); - let res = Page::from_file( - nested_path.join("index.md").as_path(), - &config, - ); + let res = Page::from_file(nested_path.join("index.md").as_path(), &config); assert!(res.is_ok()); let page = res.unwrap(); @@ -528,7 +512,8 @@ Hello world +++ +++ Hello world -"#.to_string(); +"# + .to_string(); let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config); assert!(res.is_ok()); let page = res.unwrap(); @@ -539,14 +524,14 @@ Hello world #[test] fn frontmatter_date_override_filename_date() { - let config = Config::default(); let content = r#" +++ date = 2018-09-09 +++ Hello world -"#.to_string(); +"# + .to_string(); let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config); assert!(res.is_ok()); let page = res.unwrap(); diff --git a/components/library/src/content/section.rs b/components/library/src/content/section.rs index 1b504fed..82c54ee2 100644 --- a/components/library/src/content/section.rs +++ b/components/library/src/content/section.rs @@ -1,22 +1,21 @@ use std::collections::HashMap; use std::path::{Path, PathBuf}; -use tera::{Tera, Context as TeraContext}; use slotmap::Key; +use tera::{Context as TeraContext, Tera}; use config::Config; -use front_matter::{SectionFrontMatter, split_section_content}; use errors::{Result, ResultExt}; -use utils::fs::{read_file, find_related_assets}; -use utils::templates::render_template; +use front_matter::{split_section_content, SectionFrontMatter}; +use rendering::{render_content, Header, RenderContext}; +use utils::fs::{find_related_assets, read_file}; use utils::site::get_reading_analytics; -use rendering::{RenderContext, Header, render_content}; +use utils::templates::render_template; use content::file_info::FileInfo; use content::ser::SerializingSection; use library::Library; - #[derive(Clone, Debug, PartialEq)] pub struct Section { /// All info about the actual file @@ -86,7 +85,9 @@ impl Section { section.word_count = Some(word_count); section.reading_time = Some(reading_time); section.path = format!("{}/", section.file.components.join("/")); - section.components = section.path.split('/') + section.components = section + .path + .split('/') .map(|p| p.to_string()) .filter(|p| !p.is_empty()) .collect::>(); @@ -111,13 +112,13 @@ impl Section { // against the remaining path. Note that the current behaviour effectively means that // the `ignored_content` setting in the config file is limited to single-file glob // patterns (no "**" patterns). - section.assets = assets.into_iter() - .filter(|path| - match path.file_name() { - None => true, - Some(file) => !globset.is_match(file) - } - ).collect(); + section.assets = assets + .into_iter() + .filter(|path| match path.file_name() { + None => true, + Some(file) => !globset.is_match(file), + }) + .collect(); } else { section.assets = assets; } @@ -185,7 +186,8 @@ impl Section { /// Creates a vectors of asset URLs. fn serialize_assets(&self) -> Vec { - self.assets.iter() + self.assets + .iter() .filter_map(|asset| asset.file_name()) .filter_map(|filename| filename.to_str()) .map(|filename| self.path.clone() + filename) @@ -227,14 +229,14 @@ impl Default for Section { #[cfg(test)] mod tests { + use std::fs::{create_dir, File}; use std::io::Write; - use std::fs::{File, create_dir}; - use tempfile::tempdir; use globset::{Glob, GlobSetBuilder}; + use tempfile::tempdir; - use config::Config; use super::Section; + use config::Config; #[test] fn section_with_assets_gets_right_info() { @@ -250,10 +252,7 @@ mod tests { File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("fail.png")).unwrap(); - let res = Section::from_file( - nested_path.join("_index.md").as_path(), - &Config::default(), - ); + let res = Section::from_file(nested_path.join("_index.md").as_path(), &Config::default()); assert!(res.is_ok()); let section = res.unwrap(); assert_eq!(section.assets.len(), 3); @@ -279,10 +278,7 @@ mod tests { let mut config = Config::default(); config.ignored_content_globset = Some(gsb.build().unwrap()); - let res = Section::from_file( - nested_path.join("_index.md").as_path(), - &config, - ); + let res = Section::from_file(nested_path.join("_index.md").as_path(), &config); assert!(res.is_ok()); let page = res.unwrap(); diff --git a/components/library/src/content/ser.rs b/components/library/src/content/ser.rs index cf7d97ec..6fbe51b2 100644 --- a/components/library/src/content/ser.rs +++ b/components/library/src/content/ser.rs @@ -1,13 +1,12 @@ //! What we are sending to the templates when rendering them use std::collections::HashMap; -use tera::{Value, Map}; +use tera::{Map, Value}; -use library::Library; use content::{Page, Section}; +use library::Library; use rendering::Header; - #[derive(Clone, Debug, PartialEq, Serialize)] pub struct SerializingPage<'a> { relative_path: &'a str, @@ -49,11 +48,23 @@ impl<'a> SerializingPage<'a> { day = Some(d.2); } let pages = library.pages(); - let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); - let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect(); + let lighter = page + .lighter + .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); + let heavier = page + .heavier + .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); + let earlier = page + .earlier + .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); + let later = page + .later + .map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library)))); + let ancestors = page + .ancestors + .iter() + .map(|k| library.get_section_by_key(*k).file.relative.clone()) + .collect(); SerializingPage { relative_path: &page.file.relative, @@ -95,7 +106,10 @@ impl<'a> SerializingPage<'a> { day = Some(d.2); } let ancestors = if let Some(ref lib) = library { - page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect() + page.ancestors + .iter() + .map(|k| lib.get_section_by_key(*k).file.relative.clone()) + .collect() } else { vec![] }; @@ -130,7 +144,6 @@ impl<'a> SerializingPage<'a> { } } - #[derive(Clone, Debug, PartialEq, Serialize)] pub struct SerializingSection<'a> { relative_path: &'a str, @@ -145,7 +158,7 @@ pub struct SerializingSection<'a> { word_count: Option, reading_time: Option, toc: &'a [Header], - assets: &'a [String], + assets: &'a [String], pages: Vec>, subsections: Vec<&'a str>, } @@ -163,7 +176,11 @@ impl<'a> SerializingSection<'a> { subsections.push(library.get_section_path_by_key(*k)); } - let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect(); + let ancestors = section + .ancestors + .iter() + .map(|k| library.get_section_by_key(*k).file.relative.clone()) + .collect(); SerializingSection { relative_path: §ion.file.relative, @@ -187,7 +204,11 @@ impl<'a> SerializingSection<'a> { /// Same as from_section but doesn't fetch pages and sections pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self { let ancestors = if let Some(ref lib) = library { - section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect() + section + .ancestors + .iter() + .map(|k| lib.get_section_by_key(*k).file.relative.clone()) + .collect() } else { vec![] }; diff --git a/components/library/src/lib.rs b/components/library/src/lib.rs index 7a0e3284..9f851e05 100644 --- a/components/library/src/lib.rs +++ b/components/library/src/lib.rs @@ -1,39 +1,39 @@ -extern crate tera; -extern crate slug; extern crate serde; +extern crate slug; +extern crate tera; #[macro_use] extern crate serde_derive; extern crate chrono; -extern crate slotmap; extern crate rayon; +extern crate slotmap; #[macro_use] extern crate lazy_static; extern crate regex; +#[cfg(test)] +extern crate globset; #[cfg(test)] extern crate tempfile; #[cfg(test)] extern crate toml; -#[cfg(test)] -extern crate globset; -extern crate front_matter; extern crate config; -extern crate utils; +extern crate front_matter; extern crate rendering; +extern crate utils; #[macro_use] extern crate errors; mod content; -mod taxonomies; +mod library; mod pagination; mod sorting; -mod library; +mod taxonomies; -pub use slotmap::{Key, DenseSlotMap}; +pub use slotmap::{DenseSlotMap, Key}; -pub use sorting::sort_actual_pages_by_date; -pub use content::{Page, SerializingPage, Section, SerializingSection}; +pub use content::{Page, Section, SerializingPage, SerializingSection}; pub use library::Library; -pub use taxonomies::{Taxonomy, TaxonomyItem, find_taxonomies}; pub use pagination::Paginator; +pub use sorting::sort_actual_pages_by_date; +pub use taxonomies::{find_taxonomies, Taxonomy, TaxonomyItem}; diff --git a/components/library/src/library.rs b/components/library/src/library.rs index 2219054f..13f750ad 100644 --- a/components/library/src/library.rs +++ b/components/library/src/library.rs @@ -5,9 +5,8 @@ use slotmap::{DenseSlotMap, Key}; use front_matter::SortBy; -use sorting::{find_siblings, sort_pages_by_weight, sort_pages_by_date}; use content::{Page, Section}; - +use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; /// Houses everything about pages and sections /// Think of it as a database where each page and section has an id (Key here) @@ -81,12 +80,13 @@ impl Library { /// Find out the direct subsections of each subsection if there are some /// as well as the pages for each section pub fn populate_sections(&mut self) { - let (root_path, index_path) = self.sections + let (root_path, index_path) = self + .sections .values() .find(|s| s.is_index()) .map(|s| (s.file.parent.clone(), s.file.path.clone())) .unwrap(); - let root_key = self.paths_to_sections[&index_path]; + let root_key = self.paths_to_sections[&index_path]; // We are going to get both the ancestors and grandparents for each section in one go let mut ancestors: HashMap> = HashMap::new(); @@ -130,7 +130,8 @@ impl Library { let parent_section_path = page.file.parent.join("_index.md"); if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) { self.sections.get_mut(*section_key).unwrap().pages.push(key); - page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]); + page.ancestors = + ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]); // Don't forget to push the actual parent page.ancestors.push(*section_key); } @@ -150,7 +151,8 @@ impl Library { children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b])); section.subsections = children; } - section.ancestors = ancestors.get(§ion.file.path).cloned().unwrap_or_else(|| vec![]); + section.ancestors = + ancestors.get(§ion.file.path).cloned().unwrap_or_else(|| vec![]); } } @@ -161,7 +163,8 @@ impl Library { let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by { SortBy::None => continue, SortBy::Date => { - let data = section.pages + let data = section + .pages .iter() .map(|k| { if let Some(page) = self.pages.get(*k) { @@ -173,9 +176,10 @@ impl Library { .collect(); sort_pages_by_date(data) - }, + } SortBy::Weight => { - let data = section.pages + let data = section + .pages .iter() .map(|k| { if let Some(page) = self.pages.get(*k) { @@ -194,13 +198,18 @@ impl Library { for (key, (sorted, cannot_be_sorted, sort_by)) in updates { // Find sibling between sorted pages first - let with_siblings = find_siblings(sorted.iter().map(|k| { - if let Some(page) = self.pages.get(*k) { - (k, page.is_draft()) - } else { - unreachable!("Sorting got an unknown page") - } - }).collect()); + let with_siblings = find_siblings( + sorted + .iter() + .map(|k| { + if let Some(page) = self.pages.get(*k) { + (k, page.is_draft()) + } else { + unreachable!("Sorting got an unknown page") + } + }) + .collect(), + ); for (k2, val1, val2) in with_siblings { if let Some(page) = self.pages.get_mut(k2) { @@ -208,12 +217,12 @@ impl Library { SortBy::Date => { page.earlier = val2; page.later = val1; - }, + } SortBy::Weight => { page.lighter = val1; page.heavier = val2; - }, - SortBy::None => unreachable!("Impossible to find siblings in SortBy::None") + } + SortBy::None => unreachable!("Impossible to find siblings in SortBy::None"), } } else { unreachable!("Sorting got an unknown page") @@ -229,10 +238,8 @@ impl Library { /// Find all the orphan pages: pages that are in a folder without an `_index.md` pub fn get_all_orphan_pages(&self) -> Vec<&Page> { - let pages_in_sections = self.sections - .values() - .flat_map(|s| &s.pages) - .collect::>(); + let pages_in_sections = + self.sections.values().flat_map(|s| &s.pages).collect::>(); self.pages .iter() @@ -245,7 +252,7 @@ impl Library { let page_key = self.paths_to_pages[path]; for s in self.sections.values() { if s.pages.contains(&page_key) { - return Some(s) + return Some(s); } } diff --git a/components/library/src/pagination/mod.rs b/components/library/src/pagination/mod.rs index cebefb6e..47a660e1 100644 --- a/components/library/src/pagination/mod.rs +++ b/components/library/src/pagination/mod.rs @@ -1,16 +1,15 @@ use std::collections::HashMap; -use tera::{Tera, Context, to_value, Value}; -use slotmap::{Key}; +use slotmap::Key; +use tera::{to_value, Context, Tera, Value}; -use errors::{Result, ResultExt}; use config::Config; +use errors::{Result, ResultExt}; use utils::templates::render_template; -use content::{Section, SerializingSection, SerializingPage}; -use taxonomies::{TaxonomyItem, Taxonomy}; +use content::{Section, SerializingPage, SerializingSection}; use library::Library; - +use taxonomies::{Taxonomy, TaxonomyItem}; #[derive(Clone, Debug, PartialEq)] enum PaginationRoot<'a> { @@ -18,7 +17,6 @@ enum PaginationRoot<'a> { Taxonomy(&'a Taxonomy), } - /// A list of all the pages in the paginator with their index and links #[derive(Clone, Debug, PartialEq, Serialize)] pub struct Pager<'a> { @@ -33,13 +31,13 @@ pub struct Pager<'a> { } impl<'a> Pager<'a> { - fn new(index: usize, pages: Vec>, permalink: String, path: String) -> Pager<'a> { - Pager { - index, - permalink, - path, - pages, - } + fn new( + index: usize, + pages: Vec>, + permalink: String, + path: String, + ) -> Pager<'a> { + Pager { index, permalink, path, pages } } } @@ -83,7 +81,11 @@ impl<'a> Paginator<'a> { /// Create a new paginator from a taxonomy /// It will always at least create one pager (the first) even if there are not enough pages to paginate - pub fn from_taxonomy(taxonomy: &'a Taxonomy, item: &'a TaxonomyItem, library: &'a Library) -> Paginator<'a> { + pub fn from_taxonomy( + taxonomy: &'a Taxonomy, + item: &'a TaxonomyItem, + library: &'a Library, + ) -> Paginator<'a> { let paginate_by = taxonomy.kind.paginate_by.unwrap(); let mut paginator = Paginator { all_pages: &item.pages, @@ -92,7 +94,11 @@ impl<'a> Paginator<'a> { root: PaginationRoot::Taxonomy(taxonomy), permalink: item.permalink.clone(), path: format!("{}/{}", taxonomy.kind.name, item.slug), - paginate_path: taxonomy.kind.paginate_path.clone().unwrap_or_else(|| "pages".to_string()), + paginate_path: taxonomy + .kind + .paginate_path + .clone() + .unwrap_or_else(|| "pages".to_string()), is_index: false, }; @@ -142,12 +148,7 @@ impl<'a> Paginator<'a> { format!("{}/{}", self.path, page_path) }; - pagers.push(Pager::new( - index + 1, - page, - permalink, - pager_path, - )); + pagers.push(Pager::new(index + 1, page, permalink, pager_path)); } // We always have the index one at least @@ -184,19 +185,29 @@ impl<'a> Paginator<'a> { paginator.insert("next", Value::Null); } paginator.insert("number_pagers", to_value(&self.pagers.len()).unwrap()); - paginator.insert("base_url", to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap()); + paginator.insert( + "base_url", + to_value(&format!("{}{}/", self.permalink, self.paginate_path)).unwrap(), + ); paginator.insert("pages", to_value(¤t_pager.pages).unwrap()); paginator.insert("current_index", to_value(current_pager.index).unwrap()); paginator } - pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera, library: &Library) -> Result { + pub fn render_pager( + &self, + pager: &Pager, + config: &Config, + tera: &Tera, + library: &Library, + ) -> Result { let mut context = Context::new(); context.insert("config", &config); let template_name = match self.root { PaginationRoot::Section(s) => { - context.insert("section", &SerializingSection::from_section_basic(s, Some(library))); + context + .insert("section", &SerializingSection::from_section_basic(s, Some(library))); s.get_template_name() } PaginationRoot::Taxonomy(t) => { @@ -217,11 +228,11 @@ impl<'a> Paginator<'a> { mod tests { use tera::to_value; - use front_matter::SectionFrontMatter; - use content::{Page, Section}; use config::Taxonomy as TaxonomyConfig; - use taxonomies::{Taxonomy, TaxonomyItem}; + use content::{Page, Section}; + use front_matter::SectionFrontMatter; use library::Library; + use taxonomies::{Taxonomy, TaxonomyItem}; use super::Paginator; diff --git a/components/library/src/sorting.rs b/components/library/src/sorting.rs index cfcf053c..20844d83 100644 --- a/components/library/src/sorting.rs +++ b/components/library/src/sorting.rs @@ -1,8 +1,8 @@ use std::cmp::Ordering; +use chrono::NaiveDateTime; use rayon::prelude::*; use slotmap::Key; -use chrono::NaiveDateTime; use content::Page; @@ -21,19 +21,17 @@ pub fn sort_actual_pages_by_date(a: &&Page, b: &&Page) -> Ordering { /// Pages without date will be put in the unsortable bucket /// The permalink is used to break ties pub fn sort_pages_by_date(pages: Vec<(&Key, Option, &str)>) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages - .into_par_iter() - .partition(|page| page.1.is_some()); + let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = + pages.into_par_iter().partition(|page| page.1.is_some()); - can_be_sorted - .par_sort_unstable_by(|a, b| { - let ord = b.1.unwrap().cmp(&a.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(&b.2) - } else { - ord - } - }); + can_be_sorted.par_sort_unstable_by(|a, b| { + let ord = b.1.unwrap().cmp(&a.1.unwrap()); + if ord == Ordering::Equal { + a.2.cmp(&b.2) + } else { + ord + } + }); (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) } @@ -42,19 +40,17 @@ pub fn sort_pages_by_date(pages: Vec<(&Key, Option, &str)>) -> (V /// Pages without weight will be put in the unsortable bucket /// The permalink is used to break ties pub fn sort_pages_by_weight(pages: Vec<(&Key, Option, &str)>) -> (Vec, Vec) { - let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = pages - .into_par_iter() - .partition(|page| page.1.is_some()); + let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) = + pages.into_par_iter().partition(|page| page.1.is_some()); - can_be_sorted - .par_sort_unstable_by(|a, b| { - let ord = a.1.unwrap().cmp(&b.1.unwrap()); - if ord == Ordering::Equal { - a.2.cmp(&b.2) - } else { - ord - } - }); + can_be_sorted.par_sort_unstable_by(|a, b| { + let ord = a.1.unwrap().cmp(&b.1.unwrap()); + if ord == Ordering::Equal { + a.2.cmp(&b.2) + } else { + ord + } + }); (can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect()) } @@ -118,9 +114,9 @@ pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option, Option mod tests { use slotmap::DenseSlotMap; - use front_matter::{PageFrontMatter}; + use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; use content::Page; - use super::{sort_pages_by_date, sort_pages_by_weight, find_siblings}; + use front_matter::PageFrontMatter; fn create_page_with_date(date: &str) -> Page { let mut front_matter = PageFrontMatter::default(); @@ -179,7 +175,6 @@ mod tests { assert_eq!(pages[2], key2); } - #[test] fn ignore_page_with_missing_field() { let mut dense = DenseSlotMap::new(); @@ -196,7 +191,7 @@ mod tests { (&key3, page3.meta.weight, page3.permalink.as_ref()), ]; - let (pages,unsorted) = sort_pages_by_weight(input); + let (pages, unsorted) = sort_pages_by_weight(input); assert_eq!(pages.len(), 2); assert_eq!(unsorted.len(), 1); } @@ -211,11 +206,8 @@ mod tests { let page3 = create_page_with_weight(3); let key3 = dense.insert(page3.clone()); - let input = vec![ - (&key1, page1.is_draft()), - (&key2, page2.is_draft()), - (&key3, page3.is_draft()), - ]; + let input = + vec![(&key1, page1.is_draft()), (&key2, page2.is_draft()), (&key3, page3.is_draft())]; let pages = find_siblings(input); diff --git a/components/library/src/taxonomies/mod.rs b/components/library/src/taxonomies/mod.rs index 0be07117..f245cd33 100644 --- a/components/library/src/taxonomies/mod.rs +++ b/components/library/src/taxonomies/mod.rs @@ -1,16 +1,16 @@ use std::collections::HashMap; +use slotmap::Key; use slug::slugify; use tera::{Context, Tera}; -use slotmap::{Key}; use config::{Config, Taxonomy as TaxonomyConfig}; use errors::{Result, ResultExt}; use utils::templates::render_template; use content::SerializingPage; -use sorting::sort_pages_by_date; use library::Library; +use sorting::sort_pages_by_date; #[derive(Debug, Clone, PartialEq, Serialize)] struct SerializedTaxonomyItem<'a> { @@ -34,7 +34,6 @@ impl<'a> SerializedTaxonomyItem<'a> { slug: &item.slug, permalink: &item.permalink, pages, - } } } @@ -70,12 +69,7 @@ impl TaxonomyItem { // We still append pages without dates at the end pages.extend(ignored_pages); - TaxonomyItem { - name: name.to_string(), - permalink, - slug, - pages, - } + TaxonomyItem { name: name.to_string(), permalink, slug, pages } } } @@ -87,11 +81,9 @@ pub struct SerializedTaxonomy<'a> { impl<'a> SerializedTaxonomy<'a> { pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self { - let items: Vec = taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); - SerializedTaxonomy { - kind: &taxonomy.kind, - items, - } + let items: Vec = + taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); + SerializedTaxonomy { kind: &taxonomy.kind, items } } } @@ -104,19 +96,19 @@ pub struct Taxonomy { } impl Taxonomy { - fn new(kind: TaxonomyConfig, config: &Config, items: HashMap>, library: &Library) -> Taxonomy { + fn new( + kind: TaxonomyConfig, + config: &Config, + items: HashMap>, + library: &Library, + ) -> Taxonomy { let mut sorted_items = vec![]; for (name, pages) in items { - sorted_items.push( - TaxonomyItem::new(&name, &kind.name, config, pages, library) - ); + sorted_items.push(TaxonomyItem::new(&name, &kind.name, config, pages, library)); } sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); - Taxonomy { - kind, - items: sorted_items, - } + Taxonomy { kind, items: sorted_items } } pub fn len(&self) -> usize { @@ -127,22 +119,37 @@ impl Taxonomy { self.len() == 0 } - pub fn render_term(&self, item: &TaxonomyItem, tera: &Tera, config: &Config, library: &Library) -> Result { + pub fn render_term( + &self, + item: &TaxonomyItem, + tera: &Tera, + config: &Config, + library: &Library, + ) -> Result { let mut context = Context::new(); context.insert("config", config); context.insert("term", &SerializedTaxonomyItem::from_item(item, library)); context.insert("taxonomy", &self.kind); - context.insert("current_url", &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug))); + context.insert( + "current_url", + &config.make_permalink(&format!("{}/{}", self.kind.name, item.slug)), + ); context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug)); render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme) .chain_err(|| format!("Failed to render single term {} page.", self.kind.name)) } - pub fn render_all_terms(&self, tera: &Tera, config: &Config, library: &Library) -> Result { + pub fn render_all_terms( + &self, + tera: &Tera, + config: &Config, + library: &Library, + ) -> Result { let mut context = Context::new(); context.insert("config", config); - let terms: Vec = self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); + let terms: Vec = + self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect(); context.insert("terms", &terms); context.insert("taxonomy", &self.kind); context.insert("current_url", &config.make_permalink(&self.kind.name)); @@ -175,19 +182,22 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result Result LinkResult { // Need to actually do the link checking let res = match client.get(url).headers(headers).send() { - Ok(response) => LinkResult { - code: Some(response.status()), - error: None, - }, - Err(e) => LinkResult { - code: None, - error: Some(e.description().to_string()), - }, + Ok(response) => LinkResult { code: Some(response.status()), error: None }, + Err(e) => LinkResult { code: None, error: Some(e.description().to_string()) }, }; LINKS.write().unwrap().insert(url.to_string(), res.clone()); diff --git a/components/rebuild/src/lib.rs b/components/rebuild/src/lib.rs index b21b173a..3a5cc4c6 100644 --- a/components/rebuild/src/lib.rs +++ b/components/rebuild/src/lib.rs @@ -1,16 +1,15 @@ extern crate site; #[macro_use] extern crate errors; -extern crate library; extern crate front_matter; +extern crate library; -use std::path::{Path, Component}; +use std::path::{Component, Path}; use errors::Result; -use site::Site; -use library::{Page, Section}; use front_matter::{PageFrontMatter, SectionFrontMatter}; - +use library::{Page, Section}; +use site::Site; #[derive(Debug, Clone, Copy, PartialEq)] pub enum PageChangesNeeded { @@ -37,7 +36,10 @@ pub enum SectionChangesNeeded { /// Evaluates all the params in the front matter that changed so we can do the smallest /// delta in the serve command /// Order matters as the actions will be done in insertion order -fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &SectionFrontMatter) -> Vec { +fn find_section_front_matter_changes( + current: &SectionFrontMatter, + new: &SectionFrontMatter, +) -> Vec { let mut changes_needed = vec![]; if current.sort_by != new.sort_by { @@ -54,7 +56,8 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section if current.paginate_by != new.paginate_by || current.paginate_path != new.paginate_path - || current.insert_anchor_links != new.insert_anchor_links { + || current.insert_anchor_links != new.insert_anchor_links + { changes_needed.push(SectionChangesNeeded::RenderWithPages); // Nothing else we can do return changes_needed; @@ -68,14 +71,18 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section /// Evaluates all the params in the front matter that changed so we can do the smallest /// delta in the serve command /// Order matters as the actions will be done in insertion order -fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMatter) -> Vec { +fn find_page_front_matter_changes( + current: &PageFrontMatter, + other: &PageFrontMatter, +) -> Vec { let mut changes_needed = vec![]; if current.taxonomies != other.taxonomies { changes_needed.push(PageChangesNeeded::Taxonomies); } - if current.date != other.date || current.order != other.order || current.weight != other.weight { + if current.date != other.date || current.order != other.order || current.weight != other.weight + { changes_needed.push(PageChangesNeeded::Sort); } @@ -86,7 +93,9 @@ fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMa /// Handles a path deletion: could be a page, a section, a folder fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> { // Ignore the event if this path was not known - if !site.library.contains_section(&path.to_path_buf()) && !site.library.contains_page(&path.to_path_buf()) { + if !site.library.contains_section(&path.to_path_buf()) + && !site.library.contains_page(&path.to_path_buf()) + { return Ok(()); } @@ -127,14 +136,21 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { } // Front matter changed - for changes in find_section_front_matter_changes(&site.library.get_section(&pathbuf).unwrap().meta, &prev.meta) { + for changes in find_section_front_matter_changes( + &site.library.get_section(&pathbuf).unwrap().meta, + &prev.meta, + ) { // Sort always comes first if present so the rendering will be fine match changes { SectionChangesNeeded::Sort => { site.register_tera_global_fns(); } - SectionChangesNeeded::Render => site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)?, - SectionChangesNeeded::RenderWithPages => site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)?, + SectionChangesNeeded::Render => { + site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)? + } + SectionChangesNeeded::RenderWithPages => { + site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)? + } // not a common enough operation to make it worth optimizing SectionChangesNeeded::Delete => { site.build()?; @@ -157,7 +173,7 @@ macro_rules! render_parent_section { if let Some(s) = $site.library.find_parent_section($path) { $site.render_section(s, false)?; }; - } + }; } /// Handles a page being edited in some ways @@ -181,7 +197,10 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { } // Front matter changed - for changes in find_page_front_matter_changes(&site.library.get_page(&pathbuf).unwrap().meta, &prev.meta) { + for changes in find_page_front_matter_changes( + &site.library.get_page(&pathbuf).unwrap().meta, + &prev.meta, + ) { site.register_tera_global_fns(); // Sort always comes first if present so the rendering will be fine @@ -213,7 +232,6 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { } } - /// What happens when a section or a page is changed pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> { let is_section = path.file_name().unwrap() == "_index.md"; @@ -294,16 +312,15 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> { } } - #[cfg(test)] mod tests { use std::collections::HashMap; - use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy}; use super::{ - find_page_front_matter_changes, find_section_front_matter_changes, - PageChangesNeeded, SectionChangesNeeded, + find_page_front_matter_changes, find_section_front_matter_changes, PageChangesNeeded, + SectionChangesNeeded, }; + use front_matter::{PageFrontMatter, SectionFrontMatter, SortBy}; #[test] fn can_find_taxonomy_changes_in_page_frontmatter() { @@ -320,7 +337,10 @@ mod tests { taxonomies.insert("categories".to_string(), vec!["a category".to_string()]); let current = PageFrontMatter { taxonomies, order: Some(1), ..PageFrontMatter::default() }; let changes = find_page_front_matter_changes(¤t, &PageFrontMatter::default()); - assert_eq!(changes, vec![PageChangesNeeded::Taxonomies, PageChangesNeeded::Sort, PageChangesNeeded::Render]); + assert_eq!( + changes, + vec![PageChangesNeeded::Taxonomies, PageChangesNeeded::Sort, PageChangesNeeded::Render] + ); } #[test] diff --git a/components/rebuild/tests/rebuild.rs b/components/rebuild/tests/rebuild.rs index 42bbae2e..561c2b18 100644 --- a/components/rebuild/tests/rebuild.rs +++ b/components/rebuild/tests/rebuild.rs @@ -1,89 +1,88 @@ +extern crate fs_extra; extern crate rebuild; extern crate site; extern crate tempfile; -extern crate fs_extra; use std::env; use std::fs::{remove_dir_all, File}; use std::io::prelude::*; use fs_extra::dir; -use tempfile::tempdir; use site::Site; +use tempfile::tempdir; use rebuild::after_content_change; // Loads the test_site in a tempdir and build it there // Returns (site_path_in_tempdir, site) macro_rules! load_and_build_site { - ($tmp_dir: expr) => { - { - let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); - path.push("test_site"); - let mut options = dir::CopyOptions::new(); - options.copy_inside = true; - dir::copy(&path, &$tmp_dir, &options).unwrap(); + ($tmp_dir: expr) => {{ + let mut path = + env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf(); + path.push("test_site"); + let mut options = dir::CopyOptions::new(); + options.copy_inside = true; + dir::copy(&path, &$tmp_dir, &options).unwrap(); - let site_path = $tmp_dir.path().join("test_site"); - // delete useless sections for those tests - remove_dir_all(site_path.join("content").join("paginated")).unwrap(); - remove_dir_all(site_path.join("content").join("posts")).unwrap(); + let site_path = $tmp_dir.path().join("test_site"); + // delete useless sections for those tests + remove_dir_all(site_path.join("content").join("paginated")).unwrap(); + remove_dir_all(site_path.join("content").join("posts")).unwrap(); - let mut site = Site::new(&site_path, "config.toml").unwrap(); - site.load().unwrap(); - let public = &site_path.join("public"); - site.set_output_path(&public); - site.build().unwrap(); + let mut site = Site::new(&site_path, "config.toml").unwrap(); + site.load().unwrap(); + let public = &site_path.join("public"); + site.set_output_path(&public); + site.build().unwrap(); - (site_path, site) - } - } + (site_path, site) + }}; } /// Replace the file at the path (starting from root) by the given content /// and return the file path that was modified macro_rules! edit_file { - ($site_path: expr, $path: expr, $content: expr) => { - { - let mut t = $site_path.clone(); - for c in $path.split('/') { - t.push(c); - } - let mut file = File::create(&t).expect("Could not open/create file"); - file.write_all($content).expect("Could not write to the file"); - t + ($site_path: expr, $path: expr, $content: expr) => {{ + let mut t = $site_path.clone(); + for c in $path.split('/') { + t.push(c); } - } + let mut file = File::create(&t).expect("Could not open/create file"); + file.write_all($content).expect("Could not write to the file"); + t + }}; } macro_rules! file_contains { - ($site_path: expr, $path: expr, $text: expr) => { - { - let mut path = $site_path.clone(); - for component in $path.split("/") { - path.push(component); - } - let mut file = File::open(&path).unwrap(); - let mut s = String::new(); - file.read_to_string(&mut s).unwrap(); - println!("{:?} -> {}", path, s); - s.contains($text) + ($site_path: expr, $path: expr, $text: expr) => {{ + let mut path = $site_path.clone(); + for component in $path.split("/") { + path.push(component); } - } + let mut file = File::open(&path).unwrap(); + let mut s = String::new(); + file.read_to_string(&mut s).unwrap(); + println!("{:?} -> {}", path, s); + s.contains($text) + }}; } #[test] fn can_rebuild_after_simple_change_to_page_content() { let tmp_dir = tempdir().expect("create temp dir"); let (site_path, mut site) = load_and_build_site!(tmp_dir); - let file_path = edit_file!(site_path, "content/rebuild/first.md", br#" + let file_path = edit_file!( + site_path, + "content/rebuild/first.md", + br#" +++ title = "first" weight = 1 date = 2017-01-01 +++ -Some content"#); +Some content"# + ); let res = after_content_change(&mut site, &file_path); assert!(res.is_ok()); @@ -94,14 +93,18 @@ Some content"#); fn can_rebuild_after_title_change_page_global_func_usage() { let tmp_dir = tempdir().expect("create temp dir"); let (site_path, mut site) = load_and_build_site!(tmp_dir); - let file_path = edit_file!(site_path, "content/rebuild/first.md", br#" + let file_path = edit_file!( + site_path, + "content/rebuild/first.md", + br#" +++ title = "Premier" weight = 10 date = 2017-01-01 +++ -# A title"#); +# A title"# + ); let res = after_content_change(&mut site, &file_path); assert!(res.is_ok()); @@ -112,15 +115,23 @@ date = 2017-01-01 fn can_rebuild_after_sort_change_in_section() { let tmp_dir = tempdir().expect("create temp dir"); let (site_path, mut site) = load_and_build_site!(tmp_dir); - let file_path = edit_file!(site_path, "content/rebuild/_index.md", br#" + let file_path = edit_file!( + site_path, + "content/rebuild/_index.md", + br#" +++ paginate_by = 1 sort_by = "weight" template = "rebuild.html" +++ -"#); +"# + ); let res = after_content_change(&mut site, &file_path); assert!(res.is_ok()); - assert!(file_contains!(site_path, "public/rebuild/index.html", "

first

second

")); + assert!(file_contains!( + site_path, + "public/rebuild/index.html", + "

first

second

" + )); } diff --git a/components/rendering/benches/all.rs b/components/rendering/benches/all.rs index 1f5f17a9..2049c5a8 100644 --- a/components/rendering/benches/all.rs +++ b/components/rendering/benches/all.rs @@ -1,18 +1,18 @@ #![feature(test)] -extern crate test; extern crate tera; +extern crate test; -extern crate rendering; extern crate config; extern crate front_matter; +extern crate rendering; use std::collections::HashMap; use std::path::Path; -use tera::Tera; -use rendering::{RenderContext, render_content, render_shortcodes}; -use front_matter::InsertAnchor; use config::Config; +use front_matter::InsertAnchor; +use rendering::{render_content, render_shortcodes, RenderContext}; +use tera::Tera; static CONTENT: &'static str = r#" # Modus cognitius profanam ne duae virtutis mundi @@ -92,7 +92,8 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) { tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); let permalinks_ctx = HashMap::new(); let config = Config::default(); - let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); + let context = + RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); b.iter(|| render_content(CONTENT, &context).unwrap()); } @@ -103,7 +104,8 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) { let permalinks_ctx = HashMap::new(); let mut config = Config::default(); config.highlight_code = false; - let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); + let context = + RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); b.iter(|| render_content(CONTENT, &context).unwrap()); } @@ -114,7 +116,8 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) { let mut config = Config::default(); config.highlight_code = false; let permalinks_ctx = HashMap::new(); - let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); + let context = + RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); b.iter(|| render_content(&content2, &context).unwrap()); } @@ -125,8 +128,8 @@ fn bench_render_shortcodes_one_present(b: &mut test::Bencher) { tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap(); let config = Config::default(); let permalinks_ctx = HashMap::new(); - let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); + let context = + RenderContext::new(&tera, &config, "", &permalinks_ctx, Path::new(""), InsertAnchor::None); b.iter(|| render_shortcodes(CONTENT, &context)); } - diff --git a/components/rendering/src/context.rs b/components/rendering/src/context.rs index 829c6ce9..8d877d1e 100644 --- a/components/rendering/src/context.rs +++ b/components/rendering/src/context.rs @@ -1,9 +1,8 @@ use std::collections::HashMap; -use tera::{Tera, Context}; -use front_matter::InsertAnchor; use config::Config; - +use front_matter::InsertAnchor; +use tera::{Context, Tera}; /// All the information from the zola site that is needed to render HTML from markdown #[derive(Debug)] diff --git a/components/rendering/src/lib.rs b/components/rendering/src/lib.rs index d9dc12dc..5a7ef195 100644 --- a/components/rendering/src/lib.rs +++ b/components/rendering/src/lib.rs @@ -1,35 +1,35 @@ -extern crate tera; -extern crate syntect; extern crate pulldown_cmark; extern crate slug; +extern crate syntect; +extern crate tera; #[macro_use] extern crate serde_derive; -extern crate serde; extern crate pest; +extern crate serde; #[macro_use] extern crate pest_derive; #[macro_use] extern crate errors; -extern crate front_matter; -extern crate utils; extern crate config; +extern crate front_matter; extern crate link_checker; +extern crate utils; #[cfg(test)] extern crate templates; mod context; mod markdown; -mod table_of_contents; mod shortcode; +mod table_of_contents; use errors::Result; -use markdown::markdown_to_html; -pub use table_of_contents::Header; -pub use shortcode::render_shortcodes; pub use context::RenderContext; +use markdown::markdown_to_html; +pub use shortcode::render_shortcodes; +pub use table_of_contents::Header; pub fn render_content(content: &str, context: &RenderContext) -> Result { // Don't do anything if there is nothing like a shortcode in the content diff --git a/components/rendering/src/markdown.rs b/components/rendering/src/markdown.rs index d983a508..415de952 100644 --- a/components/rendering/src/markdown.rs +++ b/components/rendering/src/markdown.rs @@ -1,18 +1,20 @@ -use std::borrow::Cow::{Owned, Borrowed}; +use std::borrow::Cow::{Borrowed, Owned}; +use self::cmark::{Event, Options, Parser, Tag, OPTION_ENABLE_FOOTNOTES, OPTION_ENABLE_TABLES}; use pulldown_cmark as cmark; -use self::cmark::{Parser, Event, Tag, Options, OPTION_ENABLE_TABLES, OPTION_ENABLE_FOOTNOTES}; use slug::slugify; use syntect::easy::HighlightLines; -use syntect::html::{start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground}; +use syntect::html::{ + start_highlighted_html_snippet, styled_line_to_highlighted_html, IncludeBackground, +}; +use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET}; use errors::Result; -use utils::site::resolve_internal_link; -use config::highlighting::{get_highlighter, THEME_SET, SYNTAX_SET}; use link_checker::check_url; +use utils::site::resolve_internal_link; -use table_of_contents::{TempHeader, Header, make_table_of_contents}; use context::RenderContext; +use table_of_contents::{make_table_of_contents, Header, TempHeader}; const CONTINUE_READING: &str = "

\n"; @@ -113,7 +115,8 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result Result { if is_colocated_asset_link(&src) { - return Event::Start( - Tag::Image( - Owned(format!("{}{}", context.current_page_permalink, src)), - title, - ) - ); + return Event::Start(Tag::Image( + Owned(format!("{}{}", context.current_page_permalink, src)), + title, + )); } Event::Start(Tag::Image(src, title)) @@ -157,13 +158,14 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result String { match input.chars().next().unwrap() { '"' => input.replace('"', "").to_string(), @@ -39,7 +38,7 @@ fn parse_literal(pair: Pair) -> Value { Rule::int => { val = Some(to_value(p.as_str().parse::().unwrap()).unwrap()); } - _ => unreachable!("Unknown literal: {:?}", p) + _ => unreachable!("Unknown literal: {:?}", p), }; } @@ -53,20 +52,29 @@ fn parse_shortcode_call(pair: Pair) -> (String, Map) { for p in pair.into_inner() { match p.as_rule() { - Rule::ident => { name = Some(p.into_span().as_str().to_string()); } + Rule::ident => { + name = Some(p.into_span().as_str().to_string()); + } Rule::kwarg => { let mut arg_name = None; let mut arg_val = None; for p2 in p.into_inner() { match p2.as_rule() { - Rule::ident => { arg_name = Some(p2.into_span().as_str().to_string()); } - Rule::literal => { arg_val = Some(parse_literal(p2)); } + Rule::ident => { + arg_name = Some(p2.into_span().as_str().to_string()); + } + Rule::literal => { + arg_val = Some(parse_literal(p2)); + } Rule::array => { let mut vals = vec![]; for p3 in p2.into_inner() { match p3.as_rule() { Rule::literal => vals.push(parse_literal(p3)), - _ => unreachable!("Got something other than literal in an array: {:?}", p3), + _ => unreachable!( + "Got something other than literal in an array: {:?}", + p3 + ), } } arg_val = Some(Value::Array(vals)); @@ -77,14 +85,18 @@ fn parse_shortcode_call(pair: Pair) -> (String, Map) { args.insert(arg_name.unwrap(), arg_val.unwrap()); } - _ => unreachable!("Got something unexpected in a shortcode: {:?}", p) + _ => unreachable!("Got something unexpected in a shortcode: {:?}", p), } } (name.unwrap(), args) } - -fn render_shortcode(name: &str, args: &Map, context: &RenderContext, body: Option<&str>) -> Result { +fn render_shortcode( + name: &str, + args: &Map, + context: &RenderContext, + body: Option<&str>, +) -> Result { let mut tera_context = Context::new(); for (key, value) in args.iter() { tera_context.insert(key, value); @@ -96,7 +108,8 @@ fn render_shortcode(name: &str, args: &Map, context: &RenderConte tera_context.extend(context.tera_context.clone()); let tpl_name = format!("shortcodes/{}.html", name); - let res = context.tera + let res = context + .tera .render(&tpl_name, &tera_context) .chain_err(|| format!("Failed to render {} shortcode", name))?; @@ -109,38 +122,36 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result p, Err(e) => { - let fancy_e = e.renamed_rules(|rule| { - match *rule { - Rule::int => "an integer".to_string(), - Rule::float => "a float".to_string(), - Rule::string => "a string".to_string(), - Rule::literal => "a literal (int, float, string, bool)".to_string(), - Rule::array => "an array".to_string(), - Rule::kwarg => "a keyword argument".to_string(), - Rule::ident => "an identifier".to_string(), - Rule::inline_shortcode => "an inline shortcode".to_string(), - Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(), - Rule::sc_body_start => "the start of a shortcode".to_string(), - Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(), - Rule::text => "some text".to_string(), - Rule::EOI => "end of input".to_string(), - Rule::double_quoted_string => "double quoted string".to_string(), - Rule::single_quoted_string => "single quoted string".to_string(), - Rule::backquoted_quoted_string => "backquoted quoted string".to_string(), - Rule::boolean => "a boolean (true, false)".to_string(), - Rule::all_chars => "a alphanumerical character".to_string(), - Rule::kwargs => "a list of keyword arguments".to_string(), - Rule::sc_def => "a shortcode definition".to_string(), - Rule::shortcode_with_body => "a shortcode with body".to_string(), - Rule::ignored_shortcode_with_body => "an ignored shortcode with body".to_string(), - Rule::sc_body_end => "{% end %}".to_string(), - Rule::ignored_sc_body_end => "{%/* end */%}".to_string(), - Rule::text_in_body_sc => "text in a shortcode body".to_string(), - Rule::text_in_ignored_body_sc => "text in an ignored shortcode body".to_string(), - Rule::content => "some content".to_string(), - Rule::page => "a page".to_string(), - Rule::WHITESPACE => "whitespace".to_string(), - } + let fancy_e = e.renamed_rules(|rule| match *rule { + Rule::int => "an integer".to_string(), + Rule::float => "a float".to_string(), + Rule::string => "a string".to_string(), + Rule::literal => "a literal (int, float, string, bool)".to_string(), + Rule::array => "an array".to_string(), + Rule::kwarg => "a keyword argument".to_string(), + Rule::ident => "an identifier".to_string(), + Rule::inline_shortcode => "an inline shortcode".to_string(), + Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(), + Rule::sc_body_start => "the start of a shortcode".to_string(), + Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(), + Rule::text => "some text".to_string(), + Rule::EOI => "end of input".to_string(), + Rule::double_quoted_string => "double quoted string".to_string(), + Rule::single_quoted_string => "single quoted string".to_string(), + Rule::backquoted_quoted_string => "backquoted quoted string".to_string(), + Rule::boolean => "a boolean (true, false)".to_string(), + Rule::all_chars => "a alphanumerical character".to_string(), + Rule::kwargs => "a list of keyword arguments".to_string(), + Rule::sc_def => "a shortcode definition".to_string(), + Rule::shortcode_with_body => "a shortcode with body".to_string(), + Rule::ignored_shortcode_with_body => "an ignored shortcode with body".to_string(), + Rule::sc_body_end => "{% end %}".to_string(), + Rule::ignored_sc_body_end => "{%/* end */%}".to_string(), + Rule::text_in_body_sc => "text in a shortcode body".to_string(), + Rule::text_in_ignored_body_sc => "text in an ignored shortcode body".to_string(), + Rule::content => "some content".to_string(), + Rule::page => "a page".to_string(), + Rule::WHITESPACE => "whitespace".to_string(), }); bail!("{}", fancy_e); } @@ -164,9 +175,7 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result { res.push_str( - &p.into_span().as_str() - .replacen("{{/*", "{{", 1) - .replacen("*/}}", "}}", 1) + &p.into_span().as_str().replacen("{{/*", "{{", 1).replacen("*/}}", "}}", 1), ); } Rule::ignored_shortcode_with_body => { @@ -174,16 +183,17 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result { res.push_str( - &p2.into_span().as_str() + &p2.into_span() + .as_str() .replacen("{%/*", "{%", 1) - .replacen("*/%}", "%}", 1) + .replacen("*/%}", "%}", 1), ); } Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()), _ => unreachable!("Got something weird in an ignored shortcode: {:?}", p2), } } - }, + } Rule::EOI => (), _ => unreachable!("unexpected page rule: {:?}", p.as_rule()), } @@ -196,10 +206,10 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result { @@ -297,7 +307,7 @@ mod tests { {% hello() %} Body {{ var }} {% end %} - "# + "#, ]; for i in inputs { assert_lex_rule!(Rule::page, i); @@ -318,19 +328,25 @@ mod tests { #[test] fn can_unignore_shortcode_with_body() { - let res = render_shortcodes(r#" + let res = render_shortcodes( + r#" Hello World -{%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#, &Tera::default()); +{%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#, + &Tera::default(), + ); assert_eq!(res, "\nHello World\n{% youtube() %}Some body {{ hello() }}{% end %}"); } // https://github.com/Keats/gutenberg/issues/383 #[test] fn unignore_shortcode_with_body_does_not_swallow_initial_whitespace() { - let res = render_shortcodes(r#" + let res = render_shortcodes( + r#" Hello World {%/* youtube() */%} -Some body {{ hello() }}{%/* end */%}"#, &Tera::default()); +Some body {{ hello() }}{%/* end */%}"#, + &Tera::default(), + ); assert_eq!(res, "\nHello World\n{% youtube() %}\nSome body {{ hello() }}{% end %}"); } @@ -338,28 +354,20 @@ Some body {{ hello() }}{%/* end */%}"#, &Tera::default()); fn can_parse_shortcode_arguments() { let inputs = vec![ ("{{ youtube() }}", "youtube", Map::new()), - ( - "{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}", - "youtube", - { - let mut m = Map::new(); - m.insert("id".to_string(), to_value(1).unwrap()); - m.insert("autoplay".to_string(), to_value(true).unwrap()); - m.insert("hello".to_string(), to_value("salut").unwrap()); - m.insert("float".to_string(), to_value(1.2).unwrap()); - m - } - ), - ( - "{{ gallery(photos=['something', 'else'], fullscreen=true) }}", - "gallery", - { - let mut m = Map::new(); - m.insert("photos".to_string(), to_value(["something", "else"]).unwrap()); - m.insert("fullscreen".to_string(), to_value(true).unwrap()); - m - } - ), + ("{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}", "youtube", { + let mut m = Map::new(); + m.insert("id".to_string(), to_value(1).unwrap()); + m.insert("autoplay".to_string(), to_value(true).unwrap()); + m.insert("hello".to_string(), to_value("salut").unwrap()); + m.insert("float".to_string(), to_value(1.2).unwrap()); + m + }), + ("{{ gallery(photos=['something', 'else'], fullscreen=true) }}", "gallery", { + let mut m = Map::new(); + m.insert("photos".to_string(), to_value(["something", "else"]).unwrap()); + m.insert("fullscreen".to_string(), to_value(true).unwrap()); + m + }), ]; for (i, n, a) in inputs { diff --git a/components/rendering/src/table_of_contents.rs b/components/rendering/src/table_of_contents.rs index 40f11acd..5cc115e0 100644 --- a/components/rendering/src/table_of_contents.rs +++ b/components/rendering/src/table_of_contents.rs @@ -1,6 +1,5 @@ -use tera::{Tera, Context as TeraContext}; use front_matter::InsertAnchor; - +use tera::{Context as TeraContext, Tera}; #[derive(Debug, PartialEq, Clone, Serialize)] pub struct Header { @@ -65,9 +64,26 @@ impl TempHeader { }; match insert_anchor { - InsertAnchor::None => format!("{t}\n", lvl = self.level, t = self.html, id = self.id), - InsertAnchor::Left => format!("{a}{t}\n", lvl = self.level, a = anchor_link, t = self.html, id = self.id), - InsertAnchor::Right => format!("{t}{a}\n", lvl = self.level, a = anchor_link, t = self.html, id = self.id), + InsertAnchor::None => format!( + "{t}\n", + lvl = self.level, + t = self.html, + id = self.id + ), + InsertAnchor::Left => format!( + "{a}{t}\n", + lvl = self.level, + a = anchor_link, + t = self.html, + id = self.id + ), + InsertAnchor::Right => format!( + "{t}{a}\n", + lvl = self.level, + a = anchor_link, + t = self.html, + id = self.id + ), } } } @@ -78,9 +94,12 @@ impl Default for TempHeader { } } - /// Recursively finds children of a header -fn find_children(parent_level: i32, start_at: usize, temp_headers: &[TempHeader]) -> (usize, Vec
) { +fn find_children( + parent_level: i32, + start_at: usize, + temp_headers: &[TempHeader], +) -> (usize, Vec
) { let mut headers = vec![]; let mut start_at = start_at; @@ -124,7 +143,6 @@ fn find_children(parent_level: i32, start_at: usize, temp_headers: &[TempHeader] (start_at, headers) } - /// Converts the flat temp headers into a nested set of headers /// representing the hierarchy pub fn make_table_of_contents(temp_headers: &[TempHeader]) -> Vec
{ @@ -148,11 +166,7 @@ mod tests { #[test] fn can_make_basic_toc() { - let input = vec![ - TempHeader::new(1), - TempHeader::new(1), - TempHeader::new(1), - ]; + let input = vec![TempHeader::new(1), TempHeader::new(1), TempHeader::new(1)]; let toc = make_table_of_contents(&input); assert_eq!(toc.len(), 3); } diff --git a/components/rendering/tests/markdown.rs b/components/rendering/tests/markdown.rs index 08707e52..675accb2 100644 --- a/components/rendering/tests/markdown.rs +++ b/components/rendering/tests/markdown.rs @@ -1,8 +1,8 @@ -extern crate tera; -extern crate front_matter; -extern crate templates; -extern crate rendering; extern crate config; +extern crate front_matter; +extern crate rendering; +extern crate templates; +extern crate tera; use std::collections::HashMap; @@ -10,9 +10,8 @@ use tera::Tera; use config::Config; use front_matter::InsertAnchor; +use rendering::{render_content, RenderContext}; use templates::ZOLA_TERA; -use rendering::{RenderContext, render_content}; - #[test] fn can_do_render_content_simple() { @@ -32,10 +31,7 @@ fn doesnt_highlight_code_block_with_highlighting_off() { config.highlight_code = false; let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); let res = render_content("```\n$ gutenberg server\n```", &context).unwrap(); - assert_eq!( - res.body, - "
$ gutenberg server\n
\n" - ); + assert_eq!(res.body, "
$ gutenberg server\n
\n"); } #[test] @@ -86,11 +82,15 @@ fn can_render_shortcode() { let permalinks_ctx = HashMap::new(); let config = Config::default(); let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None); - let res = render_content(r#" + let res = render_content( + r#" Hello {{ youtube(id="ub36ffWAqgQ") }} - "#, &context).unwrap(); + "#, + &context, + ) + .unwrap(); assert!(res.body.contains("

Hello

\n
")); assert!(res.body.contains(r#"