Merge remote-tracking branch 'origin/next' into readme

This commit is contained in:
Michael Plotke 2019-04-09 13:36:31 -04:00
commit e2ed50b6e0
89 changed files with 4577 additions and 2042 deletions

View file

@ -10,5 +10,5 @@ What is the issue? Is the documentation unclear? Is it missing information?
## Proposed solution ## Proposed solution
A quick explanation of what you would like to see to solve the issue. A quick explanation of what you would like to see to solve the issue.
If you want to add content, please explain what you were looking fod and what was If you want to add content, please explain what you were looking for and what was
your process while looking at the current documentation. your process while looking at the current documentation.

4
.gitignore vendored
View file

@ -8,6 +8,7 @@ small-blog
medium-blog medium-blog
big-blog big-blog
huge-blog huge-blog
extra-huge-blog
small-kb small-kb
medium-kb medium-kb
huge-kb huge-kb
@ -21,3 +22,6 @@ snap/.snapcraft
parts parts
prime prime
stage stage
# nixos dependencies snippet
shell.nix

View file

@ -1,10 +1,33 @@
# Changelog # Changelog
## 0.6.0 (unreleased) ## 0.6.1 (unreleased)
## 0.6.0 (2019-03-25)
### Breaking
- `earlier/later` and `lighter/heavier` are not set anymore on pages when rendering
a section
- The table of content for a page/section is now only available as the `toc` variable when
rendering it and not anymore on the `page`/`section` variable
- Default directory for `load_data` is now the root of the site instead of the `content` directory
- Change variable sent to the sitemap template, see documentation for details
### Other
- Add support for content in multiple languages - Add support for content in multiple languages
- Lower latency on serve before rebuilding from 2 to 1 second - Lower latency on serve before rebuilding from 2 to 1 second
- Allow processing PNG and produced images are less blurry - Allow processing PNG and produced images are less blurry
- Add an id (`zola-continue-reading`) to the paragraph generated after a summary
- Add Dracula syntax highlighting theme
- Fix using inline styles in headers
- Fix sections with render=false being shown in sitemap
- Sitemap is now split when there are more than 30 000 links in it
- Add link to sitemap in robots.txt
- Markdown rendering is now fully CommonMark compliant
- `load_data` now defaults to loading file as plain text, unless `format` is passed
or the extension matches csv/toml/json
- Sitemap entries get an additional `extra` field for pages only
- Add a `base-path` command line option to `build` and `serve`
## 0.5.1 (2018-12-14) ## 0.5.1 (2018-12-14)

1635
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package] [package]
name = "zola" name = "zola"
version = "0.6.0" version = "0.6.1"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
license = "MIT" license = "MIT"
readme = "README.md" readme = "README.md"

View file

@ -20,3 +20,6 @@
| [Jens Getreu's blog](https://blog.getreu.net) | | | [Jens Getreu's blog](https://blog.getreu.net) | |
| [Matthias Endler](https://matthias-endler.de) | https://github.com/mre/mre.github.io | | [Matthias Endler](https://matthias-endler.de) | https://github.com/mre/mre.github.io |
| [Michael Plotke](https://michael.plotke.me) | https://gitlab.com/bdjnk/michael | | [Michael Plotke](https://michael.plotke.me) | https://gitlab.com/bdjnk/michael |
| [shaleenjain.com](https://shaleenjain.com) | https://github.com/shalzz/shalzz.github.io |
| [Hello, Rust!](https://hello-rust.show) | https://github.com/hello-rust/hello-rust.github.io |
| [maxdeviant.com](https://maxdeviant.com/) | |

View file

@ -13,3 +13,4 @@ lazy_static = "1"
syntect = "3" syntect = "3"
errors = { path = "../errors" } errors = { path = "../errors" }
utils = { path = "../utils" }

View file

@ -1,6 +1,4 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use chrono::Utc; use chrono::Utc;
@ -9,9 +7,10 @@ use syntect::parsing::{SyntaxSet, SyntaxSetBuilder};
use toml; use toml;
use toml::Value as Toml; use toml::Value as Toml;
use errors::{Result, ResultExt}; use errors::Result;
use highlighting::THEME_SET; use highlighting::THEME_SET;
use theme::Theme; use theme::Theme;
use utils::fs::read_file_with_error;
// We want a default base url for tests // We want a default base url for tests
static DEFAULT_BASE_URL: &'static str = "http://a-website.com"; static DEFAULT_BASE_URL: &'static str = "http://a-website.com";
@ -42,6 +41,9 @@ pub struct Taxonomy {
pub paginate_path: Option<String>, pub paginate_path: Option<String>,
/// Whether to generate a RSS feed only for each taxonomy term, defaults to false /// Whether to generate a RSS feed only for each taxonomy term, defaults to false
pub rss: bool, pub rss: bool,
/// The language for that taxonomy, only used in multilingual sites.
/// Defaults to the config `default_language` if not set
pub lang: String,
} }
impl Taxonomy { impl Taxonomy {
@ -64,7 +66,13 @@ impl Taxonomy {
impl Default for Taxonomy { impl Default for Taxonomy {
fn default() -> Taxonomy { fn default() -> Taxonomy {
Taxonomy { name: String::new(), paginate_by: None, paginate_path: None, rss: false } Taxonomy {
name: String::new(),
paginate_by: None,
paginate_path: None,
rss: false,
lang: String::new(),
}
} }
} }
@ -165,20 +173,23 @@ impl Config {
Some(glob_set_builder.build().expect("Bad ignored_content in config file.")); Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
} }
for taxonomy in config.taxonomies.iter_mut() {
if taxonomy.lang.is_empty() {
taxonomy.lang = config.default_language.clone();
}
}
Ok(config) Ok(config)
} }
/// Parses a config file from the given path /// Parses a config file from the given path
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> { pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> {
let mut content = String::new();
let path = path.as_ref(); let path = path.as_ref();
let file_name = path.file_name().unwrap(); let file_name = path.file_name().unwrap();
File::open(path) let content = read_file_with_error(
.chain_err(|| { path,
format!("No `{:?}` file found. Are you in the right directory?", file_name) &format!("No `{:?}` file found. Are you in the right directory?", file_name),
})? )?;
.read_to_string(&mut content)?;
Config::parse(&content) Config::parse(&content)
} }

View file

@ -1,14 +1,16 @@
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
extern crate toml;
#[macro_use]
extern crate errors;
extern crate chrono; extern crate chrono;
extern crate globset; extern crate globset;
extern crate toml;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
extern crate syntect; extern crate syntect;
#[macro_use]
extern crate errors;
extern crate utils;
mod config; mod config;
pub mod highlighting; pub mod highlighting;
mod theme; mod theme;

View file

@ -1,11 +1,10 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::path::PathBuf; use std::path::PathBuf;
use toml::Value as Toml; use toml::Value as Toml;
use errors::{Result, ResultExt}; use errors::Result;
use utils::fs::read_file_with_error;
/// Holds the data from a `theme.toml` file. /// Holds the data from a `theme.toml` file.
/// There are other fields than `extra` in it but Zola /// There are other fields than `extra` in it but Zola
@ -40,15 +39,12 @@ impl Theme {
/// Parses a theme file from the given path /// Parses a theme file from the given path
pub fn from_file(path: &PathBuf) -> Result<Theme> { pub fn from_file(path: &PathBuf) -> Result<Theme> {
let mut content = String::new(); let content = read_file_with_error(
File::open(path) path,
.chain_err(|| { "No `theme.toml` file found. \
"No `theme.toml` file found. \ Is the `theme` defined in your `config.toml present in the `themes` directory \
Is the `theme` defined in your `config.toml present in the `themes` directory \ and does it have a `theme.toml` inside?",
and does it have a `theme.toml` inside?" )?;
})?
.read_to_string(&mut content)?;
Theme::parse(&content) Theme::parse(&content)
} }
} }

View file

@ -4,8 +4,7 @@ version = "0.1.0"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
[dependencies] [dependencies]
error-chain = "0.12" tera = "1.0.0-alpha.3"
tera = "0.11"
toml = "0.4" toml = "0.4"
image = "0.20" image = "0.21"
syntect = "3" syntect = "3"

View file

@ -1,27 +1,109 @@
#![allow(unused_doc_comments)]
#[macro_use]
extern crate error_chain;
extern crate image; extern crate image;
extern crate syntect; extern crate syntect;
extern crate tera; extern crate tera;
extern crate toml; extern crate toml;
error_chain! { use std::convert::Into;
errors {} use std::error::Error as StdError;
use std::fmt;
links { #[derive(Debug)]
Tera(tera::Error, tera::ErrorKind); pub enum ErrorKind {
} Msg(String),
Tera(tera::Error),
Io(::std::io::Error),
Toml(toml::de::Error),
Image(image::ImageError),
Syntect(syntect::LoadingError),
}
foreign_links { /// The Error type
Io(::std::io::Error); #[derive(Debug)]
Toml(toml::de::Error); pub struct Error {
Image(image::ImageError); /// Kind of error
Syntect(syntect::LoadingError); pub kind: ErrorKind,
pub source: Option<Box<dyn StdError>>,
}
unsafe impl Sync for Error {}
unsafe impl Send for Error {}
impl StdError for Error {
fn source(&self) -> Option<&(dyn StdError + 'static)> {
let mut source = self.source.as_ref().map(|c| &**c);
if source.is_none() {
match self.kind {
ErrorKind::Tera(ref err) => source = err.source(),
_ => (),
};
}
source
} }
} }
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.kind {
ErrorKind::Msg(ref message) => write!(f, "{}", message),
ErrorKind::Tera(ref e) => write!(f, "{}", e),
ErrorKind::Io(ref e) => write!(f, "{}", e),
ErrorKind::Toml(ref e) => write!(f, "{}", e),
ErrorKind::Image(ref e) => write!(f, "{}", e),
ErrorKind::Syntect(ref e) => write!(f, "{}", e),
}
}
}
impl Error {
/// Creates generic error
pub fn msg(value: impl ToString) -> Self {
Self { kind: ErrorKind::Msg(value.to_string()), source: None }
}
/// Creates generic error with a cause
pub fn chain(value: impl ToString, source: impl Into<Box<dyn StdError>>) -> Self {
Self { kind: ErrorKind::Msg(value.to_string()), source: Some(source.into()) }
}
}
impl From<&str> for Error {
fn from(e: &str) -> Self {
Self::msg(e)
}
}
impl From<String> for Error {
fn from(e: String) -> Self {
Self::msg(e)
}
}
impl From<toml::de::Error> for Error {
fn from(e: toml::de::Error) -> Self {
Self { kind: ErrorKind::Toml(e), source: None }
}
}
impl From<syntect::LoadingError> for Error {
fn from(e: syntect::LoadingError) -> Self {
Self { kind: ErrorKind::Syntect(e), source: None }
}
}
impl From<tera::Error> for Error {
fn from(e: tera::Error) -> Self {
Self { kind: ErrorKind::Tera(e), source: None }
}
}
impl From<::std::io::Error> for Error {
fn from(e: ::std::io::Error) -> Self {
Self { kind: ErrorKind::Io(e), source: None }
}
}
impl From<image::ImageError> for Error {
fn from(e: image::ImageError) -> Self {
Self { kind: ErrorKind::Image(e), source: None }
}
}
/// Convenient wrapper around std::Result.
pub type Result<T> = ::std::result::Result<T, Error>;
// So we can use bail! in all other crates // So we can use bail! in all other crates
#[macro_export] #[macro_export]
macro_rules! bail { macro_rules! bail {

View file

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
[dependencies] [dependencies]
tera = "0.11" tera = "1.0.0-alpha.3"
chrono = "0.4" chrono = "0.4"
serde = "1" serde = "1"
serde_derive = "1" serde_derive = "1"

View file

@ -12,7 +12,7 @@ extern crate toml;
extern crate errors; extern crate errors;
extern crate utils; extern crate utils;
use errors::{Result, ResultExt}; use errors::{Error, Result};
use regex::Regex; use regex::Regex;
use std::path::Path; use std::path::Path;
@ -71,8 +71,11 @@ pub fn split_section_content(
content: &str, content: &str,
) -> Result<(SectionFrontMatter, String)> { ) -> Result<(SectionFrontMatter, String)> {
let (front_matter, content) = split_content(file_path, content)?; let (front_matter, content) = split_content(file_path, content)?;
let meta = SectionFrontMatter::parse(&front_matter).chain_err(|| { let meta = SectionFrontMatter::parse(&front_matter).map_err(|e| {
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()) Error::chain(
format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()),
e,
)
})?; })?;
Ok((meta, content)) Ok((meta, content))
} }
@ -81,8 +84,11 @@ pub fn split_section_content(
/// Returns a parsed `PageFrontMatter` and the rest of the content /// Returns a parsed `PageFrontMatter` and the rest of the content
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> { pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> {
let (front_matter, content) = split_content(file_path, content)?; let (front_matter, content) = split_content(file_path, content)?;
let meta = PageFrontMatter::parse(&front_matter).chain_err(|| { let meta = PageFrontMatter::parse(&front_matter).map_err(|e| {
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()) Error::chain(
format!("Error when parsing front matter of page `{}`", file_path.to_string_lossy()),
e,
)
})?; })?;
Ok((meta, content)) Ok((meta, content))
} }

View file

@ -6,8 +6,8 @@ authors = ["Vojtěch Král <vojtech@kral.hk>"]
[dependencies] [dependencies]
lazy_static = "1" lazy_static = "1"
regex = "1.0" regex = "1.0"
tera = "0.11" tera = "1.0.0-alpha.3"
image = "0.20" image = "0.21"
rayon = "1" rayon = "1"
errors = { path = "../errors" } errors = { path = "../errors" }

View file

@ -20,7 +20,7 @@ use image::{FilterType, GenericImageView};
use rayon::prelude::*; use rayon::prelude::*;
use regex::Regex; use regex::Regex;
use errors::{Result, ResultExt}; use errors::{Error, Result};
use utils::fs as ufs; use utils::fs as ufs;
static RESIZED_SUBDIR: &'static str = "processed_images"; static RESIZED_SUBDIR: &'static str = "processed_images";
@ -456,7 +456,7 @@ impl Processor {
let target = let target =
self.resized_path.join(Self::op_filename(*hash, op.collision_id, op.format)); self.resized_path.join(Self::op_filename(*hash, op.collision_id, op.format));
op.perform(&self.content_path, &target) op.perform(&self.content_path, &target)
.chain_err(|| format!("Failed to process image: {}", op.source)) .map_err(|e| Error::chain(format!("Failed to process image: {}", op.source), e))
}) })
.collect::<Result<()>>() .collect::<Result<()>>()
} }

View file

@ -7,7 +7,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
slotmap = "0.2" slotmap = "0.2"
rayon = "1" rayon = "1"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
tera = "0.11" tera = "1.0.0-alpha.3"
serde = "1" serde = "1"
serde_derive = "1" serde_derive = "1"
slug = "0.1" slug = "0.1"

View file

@ -52,11 +52,13 @@ pub struct FileInfo {
} }
impl FileInfo { impl FileInfo {
pub fn new_page(path: &Path) -> FileInfo { pub fn new_page(path: &Path, base_path: &PathBuf) -> FileInfo {
let file_path = path.to_path_buf(); let file_path = path.to_path_buf();
let mut parent = file_path.parent().unwrap().to_path_buf(); let mut parent = file_path.parent().expect("Get parent of page").to_path_buf();
let name = path.file_stem().unwrap().to_string_lossy().to_string(); let name = path.file_stem().unwrap().to_string_lossy().to_string();
let mut components = find_content_components(&file_path); let mut components = find_content_components(
&file_path.strip_prefix(base_path).expect("Strip base path prefix for page"),
);
let relative = if !components.is_empty() { let relative = if !components.is_empty() {
format!("{}/{}.md", components.join("/"), name) format!("{}/{}.md", components.join("/"), name)
} else { } else {
@ -85,11 +87,13 @@ impl FileInfo {
} }
} }
pub fn new_section(path: &Path) -> FileInfo { pub fn new_section(path: &Path, base_path: &PathBuf) -> FileInfo {
let file_path = path.to_path_buf(); let file_path = path.to_path_buf();
let parent = path.parent().unwrap().to_path_buf(); let parent = path.parent().expect("Get parent of section").to_path_buf();
let name = path.file_stem().unwrap().to_string_lossy().to_string(); let name = path.file_stem().unwrap().to_string_lossy().to_string();
let components = find_content_components(path); let components = find_content_components(
&file_path.strip_prefix(base_path).expect("Strip base path prefix for section"),
);
let relative = if !components.is_empty() { let relative = if !components.is_empty() {
format!("{}/{}.md", components.join("/"), name) format!("{}/{}.md", components.join("/"), name)
} else { } else {
@ -112,14 +116,14 @@ impl FileInfo {
/// Look for a language in the filename. /// Look for a language in the filename.
/// If a language has been found, update the name of the file in this struct to /// If a language has been found, update the name of the file in this struct to
/// remove it and return the language code /// remove it and return the language code
pub fn find_language(&mut self, config: &Config) -> Result<Option<String>> { pub fn find_language(&mut self, config: &Config) -> Result<String> {
// No languages? Nothing to do // No languages? Nothing to do
if !config.is_multilingual() { if !config.is_multilingual() {
return Ok(None); return Ok(config.default_language.clone());
} }
if !self.name.contains('.') { if !self.name.contains('.') {
return Ok(None); return Ok(config.default_language.clone());
} }
// Go with the assumption that no one is using `.` in filenames when using i18n // Go with the assumption that no one is using `.` in filenames when using i18n
@ -136,7 +140,7 @@ impl FileInfo {
self.canonical = self.parent.join(&self.name); self.canonical = self.parent.join(&self.name);
let lang = parts.swap_remove(0); let lang = parts.swap_remove(0);
Ok(Some(lang)) Ok(lang)
} }
} }
@ -158,7 +162,7 @@ impl Default for FileInfo {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::Path; use std::path::{Path, PathBuf};
use config::{Config, Language}; use config::{Config, Language};
@ -170,11 +174,22 @@ mod tests {
find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md"); find_content_components("/home/vincent/code/site/content/posts/tutorials/python.md");
assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]); assert_eq!(res, ["posts".to_string(), "tutorials".to_string()]);
} }
#[test] #[test]
fn can_find_components_in_page_with_assets() { fn can_find_components_in_page_with_assets() {
let file = FileInfo::new_page(&Path::new( let file = FileInfo::new_page(
"/home/vincent/code/site/content/posts/tutorials/python/index.md", &Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.md"),
)); &PathBuf::new(),
);
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
}
#[test]
fn doesnt_fail_with_multiple_content_directories() {
let file = FileInfo::new_page(
&Path::new("/home/vincent/code/content/site/content/posts/tutorials/python/index.md"),
&PathBuf::from("/home/vincent/code/content/site"),
);
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]); assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
} }
@ -182,45 +197,49 @@ mod tests {
fn can_find_valid_language_in_page() { fn can_find_valid_language_in_page() {
let mut config = Config::default(); let mut config = Config::default();
config.languages.push(Language { code: String::from("fr"), rss: false }); config.languages.push(Language { code: String::from("fr"), rss: false });
let mut file = FileInfo::new_page(&Path::new( let mut file = FileInfo::new_page(
"/home/vincent/code/site/content/posts/tutorials/python.fr.md", &Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
)); &PathBuf::new(),
);
let res = file.find_language(&config); let res = file.find_language(&config);
assert!(res.is_ok()); assert!(res.is_ok());
assert_eq!(res.unwrap(), Some(String::from("fr"))); assert_eq!(res.unwrap(), "fr");
} }
#[test] #[test]
fn can_find_valid_language_in_page_with_assets() { fn can_find_valid_language_in_page_with_assets() {
let mut config = Config::default(); let mut config = Config::default();
config.languages.push(Language { code: String::from("fr"), rss: false }); config.languages.push(Language { code: String::from("fr"), rss: false });
let mut file = FileInfo::new_page(&Path::new( let mut file = FileInfo::new_page(
"/home/vincent/code/site/content/posts/tutorials/python/index.fr.md", &Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
)); &PathBuf::new(),
);
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]); assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
let res = file.find_language(&config); let res = file.find_language(&config);
assert!(res.is_ok()); assert!(res.is_ok());
assert_eq!(res.unwrap(), Some(String::from("fr"))); assert_eq!(res.unwrap(), "fr");
} }
#[test] #[test]
fn do_nothing_on_unknown_language_in_page_with_i18n_off() { fn do_nothing_on_unknown_language_in_page_with_i18n_off() {
let config = Config::default(); let config = Config::default();
let mut file = FileInfo::new_page(&Path::new( let mut file = FileInfo::new_page(
"/home/vincent/code/site/content/posts/tutorials/python.fr.md", &Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
)); &PathBuf::new(),
);
let res = file.find_language(&config); let res = file.find_language(&config);
assert!(res.is_ok()); assert!(res.is_ok());
assert!(res.unwrap().is_none()); assert_eq!(res.unwrap(), config.default_language);
} }
#[test] #[test]
fn errors_on_unknown_language_in_page_with_i18n_on() { fn errors_on_unknown_language_in_page_with_i18n_on() {
let mut config = Config::default(); let mut config = Config::default();
config.languages.push(Language { code: String::from("it"), rss: false }); config.languages.push(Language { code: String::from("it"), rss: false });
let mut file = FileInfo::new_page(&Path::new( let mut file = FileInfo::new_page(
"/home/vincent/code/site/content/posts/tutorials/python.fr.md", &Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
)); &PathBuf::new(),
);
let res = file.find_language(&config); let res = file.find_language(&config);
assert!(res.is_err()); assert!(res.is_err());
} }
@ -229,11 +248,12 @@ mod tests {
fn can_find_valid_language_in_section() { fn can_find_valid_language_in_section() {
let mut config = Config::default(); let mut config = Config::default();
config.languages.push(Language { code: String::from("fr"), rss: false }); config.languages.push(Language { code: String::from("fr"), rss: false });
let mut file = FileInfo::new_section(&Path::new( let mut file = FileInfo::new_section(
"/home/vincent/code/site/content/posts/tutorials/_index.fr.md", &Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"),
)); &PathBuf::new(),
);
let res = file.find_language(&config); let res = file.find_language(&config);
assert!(res.is_ok()); assert!(res.is_ok());
assert_eq!(res.unwrap(), Some(String::from("fr"))); assert_eq!(res.unwrap(), "fr");
} }
} }

View file

@ -8,7 +8,7 @@ use slug::slugify;
use tera::{Context as TeraContext, Tera}; use tera::{Context as TeraContext, Tera};
use config::Config; use config::Config;
use errors::{Result, ResultExt}; use errors::{Error, Result};
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter}; use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
use library::Library; use library::Library;
use rendering::{render_content, Header, RenderContext}; use rendering::{render_content, Header, RenderContext};
@ -71,19 +71,19 @@ pub struct Page {
/// How long would it take to read the raw content. /// How long would it take to read the raw content.
/// See `get_reading_analytics` on how it is calculated /// See `get_reading_analytics` on how it is calculated
pub reading_time: Option<usize>, pub reading_time: Option<usize>,
/// The language of that page. `None` if the user doesn't setup `languages` in config. /// The language of that page. Equal to the default lang if the user doesn't setup `languages` in config.
/// Corresponds to the lang in the {slug}.{lang}.md file scheme /// Corresponds to the lang in the {slug}.{lang}.md file scheme
pub lang: Option<String>, pub lang: String,
/// Contains all the translated version of that page /// Contains all the translated version of that page
pub translations: Vec<Key>, pub translations: Vec<Key>,
} }
impl Page { impl Page {
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter) -> Page { pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter, base_path: &PathBuf) -> Page {
let file_path = file_path.as_ref(); let file_path = file_path.as_ref();
Page { Page {
file: FileInfo::new_page(file_path), file: FileInfo::new_page(file_path, base_path),
meta, meta,
ancestors: vec![], ancestors: vec![],
raw_content: "".to_string(), raw_content: "".to_string(),
@ -102,7 +102,7 @@ impl Page {
toc: vec![], toc: vec![],
word_count: None, word_count: None,
reading_time: None, reading_time: None,
lang: None, lang: String::new(),
translations: Vec::new(), translations: Vec::new(),
} }
} }
@ -114,9 +114,14 @@ impl Page {
/// Parse a page given the content of the .md file /// Parse a page given the content of the .md file
/// Files without front matter or with invalid front matter are considered /// Files without front matter or with invalid front matter are considered
/// erroneous /// erroneous
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Page> { pub fn parse(
file_path: &Path,
content: &str,
config: &Config,
base_path: &PathBuf,
) -> Result<Page> {
let (meta, content) = split_page_content(file_path, content)?; let (meta, content) = split_page_content(file_path, content)?;
let mut page = Page::new(file_path, meta); let mut page = Page::new(file_path, meta, base_path);
page.lang = page.file.find_language(config)?; page.lang = page.file.find_language(config)?;
@ -126,7 +131,16 @@ impl Page {
page.reading_time = Some(reading_time); page.reading_time = Some(reading_time);
let mut slug_from_dated_filename = None; let mut slug_from_dated_filename = None;
if let Some(ref caps) = RFC3339_DATE.captures(&page.file.name.replace(".md", "")) { let file_path = if page.file.name == "index" {
if let Some(parent) = page.file.path.parent() {
parent.file_name().unwrap().to_str().unwrap().to_string()
} else {
page.file.name.replace(".md", "")
}
} else {
page.file.name.replace(".md", "")
};
if let Some(ref caps) = RFC3339_DATE.captures(&file_path) {
slug_from_dated_filename = Some(caps.name("slug").unwrap().as_str().to_string()); slug_from_dated_filename = Some(caps.name("slug").unwrap().as_str().to_string());
if page.meta.date.is_none() { if page.meta.date.is_none() {
page.meta.date = Some(caps.name("datetime").unwrap().as_str().to_string()); page.meta.date = Some(caps.name("datetime").unwrap().as_str().to_string());
@ -139,7 +153,11 @@ impl Page {
slug.trim().to_string() slug.trim().to_string()
} else if page.file.name == "index" { } else if page.file.name == "index" {
if let Some(parent) = page.file.path.parent() { if let Some(parent) = page.file.path.parent() {
slugify(parent.file_name().unwrap().to_str().unwrap()) if let Some(slug) = slug_from_dated_filename {
slugify(&slug)
} else {
slugify(parent.file_name().unwrap().to_str().unwrap())
}
} else { } else {
slugify(&page.file.name) slugify(&page.file.name)
} }
@ -153,7 +171,7 @@ impl Page {
}; };
if let Some(ref p) = page.meta.path { if let Some(ref p) = page.meta.path {
page.path = p.trim().trim_left_matches('/').to_string(); page.path = p.trim().trim_start_matches('/').to_string();
} else { } else {
let mut path = if page.file.components.is_empty() { let mut path = if page.file.components.is_empty() {
page.slug.clone() page.slug.clone()
@ -161,8 +179,8 @@ impl Page {
format!("{}/{}", page.file.components.join("/"), page.slug) format!("{}/{}", page.file.components.join("/"), page.slug)
}; };
if let Some(ref lang) = page.lang { if page.lang != config.default_language {
path = format!("{}/{}", lang, path); path = format!("{}/{}", page.lang, path);
} }
page.path = path; page.path = path;
@ -183,10 +201,14 @@ impl Page {
} }
/// Read and parse a .md file into a Page struct /// Read and parse a .md file into a Page struct
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config) -> Result<Page> { pub fn from_file<P: AsRef<Path>>(
path: P,
config: &Config,
base_path: &PathBuf,
) -> Result<Page> {
let path = path.as_ref(); let path = path.as_ref();
let content = read_file(path)?; let content = read_file(path)?;
let mut page = Page::parse(path, &content, config)?; let mut page = Page::parse(path, &content, config, base_path)?;
if page.file.name == "index" { if page.file.name == "index" {
let parent_dir = path.parent().unwrap(); let parent_dir = path.parent().unwrap();
@ -233,8 +255,9 @@ impl Page {
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None)); context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
let res = render_content(&self.raw_content, &context) let res = render_content(&self.raw_content, &context).map_err(|e| {
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
})?;
self.summary = res.summary_len.map(|l| res.body[0..l].to_owned()); self.summary = res.summary_len.map(|l| res.body[0..l].to_owned());
self.content = res.body; self.content = res.body;
@ -256,9 +279,11 @@ impl Page {
context.insert("current_path", &self.path); context.insert("current_path", &self.path);
context.insert("page", &self.to_serialized(library)); context.insert("page", &self.to_serialized(library));
context.insert("lang", &self.lang); context.insert("lang", &self.lang);
context.insert("toc", &self.toc);
render_template(&tpl_name, tera, &context, &config.theme) render_template(&tpl_name, tera, context, &config.theme).map_err(|e| {
.chain_err(|| format!("Failed to render page '{}'", self.file.path.display())) Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e)
})
} }
/// Creates a vectors of asset URLs. /// Creates a vectors of asset URLs.
@ -302,7 +327,7 @@ impl Default for Page {
toc: vec![], toc: vec![],
word_count: None, word_count: None,
reading_time: None, reading_time: None,
lang: None, lang: String::new(),
translations: Vec::new(), translations: Vec::new(),
} }
} }
@ -313,7 +338,7 @@ mod tests {
use std::collections::HashMap; use std::collections::HashMap;
use std::fs::{create_dir, File}; use std::fs::{create_dir, File};
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::{Path, PathBuf};
use globset::{Glob, GlobSetBuilder}; use globset::{Glob, GlobSetBuilder};
use tempfile::tempdir; use tempfile::tempdir;
@ -332,7 +357,7 @@ description = "hey there"
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse(Path::new("post.md"), content, &Config::default()); let res = Page::parse(Path::new("post.md"), content, &Config::default(), &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let mut page = res.unwrap(); let mut page = res.unwrap();
page.render_markdown( page.render_markdown(
@ -358,7 +383,8 @@ Hello world"#;
Hello world"#; Hello world"#;
let mut conf = Config::default(); let mut conf = Config::default();
conf.base_url = "http://hello.com/".to_string(); conf.base_url = "http://hello.com/".to_string();
let res = Page::parse(Path::new("content/posts/intro/start.md"), content, &conf); let res =
Page::parse(Path::new("content/posts/intro/start.md"), content, &conf, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.path, "posts/intro/hello-world/"); assert_eq!(page.path, "posts/intro/hello-world/");
@ -374,7 +400,7 @@ Hello world"#;
+++ +++
Hello world"#; Hello world"#;
let config = Config::default(); let config = Config::default();
let res = Page::parse(Path::new("start.md"), content, &config); let res = Page::parse(Path::new("start.md"), content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.path, "hello-world/"); assert_eq!(page.path, "hello-world/");
@ -390,7 +416,12 @@ Hello world"#;
+++ +++
Hello world"#; Hello world"#;
let config = Config::default(); let config = Config::default();
let res = Page::parse(Path::new("content/posts/intro/start.md"), content, &config); let res = Page::parse(
Path::new("content/posts/intro/start.md"),
content,
&config,
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.path, "hello-world/"); assert_eq!(page.path, "hello-world/");
@ -406,7 +437,12 @@ Hello world"#;
+++ +++
Hello world"#; Hello world"#;
let config = Config::default(); let config = Config::default();
let res = Page::parse(Path::new("content/posts/intro/start.md"), content, &config); let res = Page::parse(
Path::new("content/posts/intro/start.md"),
content,
&config,
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.path, "hello-world/"); assert_eq!(page.path, "hello-world/");
@ -422,14 +458,15 @@ Hello world"#;
slug = "hello-world" slug = "hello-world"
+++ +++
Hello world"#; Hello world"#;
let res = Page::parse(Path::new("start.md"), content, &Config::default()); let res = Page::parse(Path::new("start.md"), content, &Config::default(), &PathBuf::new());
assert!(res.is_err()); assert!(res.is_err());
} }
#[test] #[test]
fn can_make_slug_from_non_slug_filename() { fn can_make_slug_from_non_slug_filename() {
let config = Config::default(); let config = Config::default();
let res = Page::parse(Path::new(" file with space.md"), "+++\n+++", &config); let res =
Page::parse(Path::new(" file with space.md"), "+++\n+++", &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.slug, "file-with-space"); assert_eq!(page.slug, "file-with-space");
@ -445,7 +482,7 @@ Hello world"#;
Hello world Hello world
<!-- more -->"# <!-- more -->"#
.to_string(); .to_string();
let res = Page::parse(Path::new("hello.md"), &content, &config); let res = Page::parse(Path::new("hello.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let mut page = res.unwrap(); let mut page = res.unwrap();
page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None) page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None)
@ -467,7 +504,11 @@ Hello world
File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap(); File::create(nested_path.join("fail.png")).unwrap();
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); let res = Page::from_file(
nested_path.join("index.md").as_path(),
&Config::default(),
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.file.parent, path.join("content").join("posts")); assert_eq!(page.file.parent, path.join("content").join("posts"));
@ -490,7 +531,11 @@ Hello world
File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap(); File::create(nested_path.join("fail.png")).unwrap();
let res = Page::from_file(nested_path.join("index.md").as_path(), &Config::default()); let res = Page::from_file(
nested_path.join("index.md").as_path(),
&Config::default(),
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.file.parent, path.join("content").join("posts")); assert_eq!(page.file.parent, path.join("content").join("posts"));
@ -499,6 +544,35 @@ Hello world
assert_eq!(page.permalink, "http://a-website.com/posts/hey/"); assert_eq!(page.permalink, "http://a-website.com/posts/hey/");
} }
// https://github.com/getzola/zola/issues/607
#[test]
fn page_with_assets_and_date_in_folder_name() {
let tmp_dir = tempdir().expect("create temp dir");
let path = tmp_dir.path();
create_dir(&path.join("content")).expect("create content temp dir");
create_dir(&path.join("content").join("posts")).expect("create posts temp dir");
let nested_path = path.join("content").join("posts").join("2013-06-02_with-assets");
create_dir(&nested_path).expect("create nested temp dir");
let mut f = File::create(nested_path.join("index.md")).unwrap();
f.write_all(b"+++\n\n+++\n").unwrap();
File::create(nested_path.join("example.js")).unwrap();
File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap();
let res = Page::from_file(
nested_path.join("index.md").as_path(),
&Config::default(),
&PathBuf::new(),
);
assert!(res.is_ok());
let page = res.unwrap();
assert_eq!(page.file.parent, path.join("content").join("posts"));
assert_eq!(page.slug, "with-assets");
assert_eq!(page.meta.date, Some("2013-06-02".to_string()));
assert_eq!(page.assets.len(), 3);
assert_eq!(page.permalink, "http://a-website.com/posts/with-assets/");
}
#[test] #[test]
fn page_with_ignored_assets_filters_out_correct_files() { fn page_with_ignored_assets_filters_out_correct_files() {
let tmp_dir = tempdir().expect("create temp dir"); let tmp_dir = tempdir().expect("create temp dir");
@ -518,7 +592,7 @@ Hello world
let mut config = Config::default(); let mut config = Config::default();
config.ignored_content_globset = Some(gsb.build().unwrap()); config.ignored_content_globset = Some(gsb.build().unwrap());
let res = Page::from_file(nested_path.join("index.md").as_path(), &config); let res = Page::from_file(nested_path.join("index.md").as_path(), &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
@ -535,7 +609,7 @@ Hello world
Hello world Hello world
<!-- more -->"# <!-- more -->"#
.to_string(); .to_string();
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config); let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
@ -552,7 +626,12 @@ Hello world
Hello world Hello world
<!-- more -->"# <!-- more -->"#
.to_string(); .to_string();
let res = Page::parse(Path::new("2018-10-02T15:00:00Z-hello.md"), &content, &config); let res = Page::parse(
Path::new("2018-10-02T15:00:00Z-hello.md"),
&content,
&config,
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
@ -570,7 +649,7 @@ date = 2018-09-09
Hello world Hello world
<!-- more -->"# <!-- more -->"#
.to_string(); .to_string();
let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config); let res = Page::parse(Path::new("2018-10-08_hello.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
@ -587,10 +666,10 @@ Hello world
+++ +++
Bonjour le monde"# Bonjour le monde"#
.to_string(); .to_string();
let res = Page::parse(Path::new("hello.fr.md"), &content, &config); let res = Page::parse(Path::new("hello.fr.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.lang, Some("fr".to_string())); assert_eq!(page.lang, "fr".to_string());
assert_eq!(page.slug, "hello"); assert_eq!(page.slug, "hello");
assert_eq!(page.permalink, "http://a-website.com/fr/hello/"); assert_eq!(page.permalink, "http://a-website.com/fr/hello/");
} }
@ -604,11 +683,12 @@ Bonjour le monde"#
+++ +++
Bonjour le monde"# Bonjour le monde"#
.to_string(); .to_string();
let res = Page::parse(Path::new("2018-10-08_hello.fr.md"), &content, &config); let res =
Page::parse(Path::new("2018-10-08_hello.fr.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.meta.date, Some("2018-10-08".to_string())); assert_eq!(page.meta.date, Some("2018-10-08".to_string()));
assert_eq!(page.lang, Some("fr".to_string())); assert_eq!(page.lang, "fr".to_string());
assert_eq!(page.slug, "hello"); assert_eq!(page.slug, "hello");
assert_eq!(page.permalink, "http://a-website.com/fr/hello/"); assert_eq!(page.permalink, "http://a-website.com/fr/hello/");
} }
@ -623,10 +703,10 @@ path = "bonjour"
+++ +++
Bonjour le monde"# Bonjour le monde"#
.to_string(); .to_string();
let res = Page::parse(Path::new("hello.fr.md"), &content, &config); let res = Page::parse(Path::new("hello.fr.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
assert_eq!(page.lang, Some("fr".to_string())); assert_eq!(page.lang, "fr".to_string());
assert_eq!(page.slug, "hello"); assert_eq!(page.slug, "hello");
assert_eq!(page.permalink, "http://a-website.com/bonjour/"); assert_eq!(page.permalink, "http://a-website.com/bonjour/");
} }

View file

@ -5,7 +5,7 @@ use slotmap::Key;
use tera::{Context as TeraContext, Tera}; use tera::{Context as TeraContext, Tera};
use config::Config; use config::Config;
use errors::{Result, ResultExt}; use errors::{Error, Result};
use front_matter::{split_section_content, SectionFrontMatter}; use front_matter::{split_section_content, SectionFrontMatter};
use rendering::{render_content, Header, RenderContext}; use rendering::{render_content, Header, RenderContext};
use utils::fs::{find_related_assets, read_file}; use utils::fs::{find_related_assets, read_file};
@ -51,19 +51,23 @@ pub struct Section {
/// How long would it take to read the raw content. /// How long would it take to read the raw content.
/// See `get_reading_analytics` on how it is calculated /// See `get_reading_analytics` on how it is calculated
pub reading_time: Option<usize>, pub reading_time: Option<usize>,
/// The language of that section. `None` if the user doesn't setup `languages` in config. /// The language of that section. Equal to the default lang if the user doesn't setup `languages` in config.
/// Corresponds to the lang in the _index.{lang}.md file scheme /// Corresponds to the lang in the _index.{lang}.md file scheme
pub lang: Option<String>, pub lang: String,
/// Contains all the translated version of that section /// Contains all the translated version of that section
pub translations: Vec<Key>, pub translations: Vec<Key>,
} }
impl Section { impl Section {
pub fn new<P: AsRef<Path>>(file_path: P, meta: SectionFrontMatter) -> Section { pub fn new<P: AsRef<Path>>(
file_path: P,
meta: SectionFrontMatter,
base_path: &PathBuf,
) -> Section {
let file_path = file_path.as_ref(); let file_path = file_path.as_ref();
Section { Section {
file: FileInfo::new_section(file_path), file: FileInfo::new_section(file_path, base_path),
meta, meta,
ancestors: vec![], ancestors: vec![],
path: "".to_string(), path: "".to_string(),
@ -79,24 +83,29 @@ impl Section {
toc: vec![], toc: vec![],
word_count: None, word_count: None,
reading_time: None, reading_time: None,
lang: None, lang: String::new(),
translations: Vec::new(), translations: Vec::new(),
} }
} }
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Section> { pub fn parse(
file_path: &Path,
content: &str,
config: &Config,
base_path: &PathBuf,
) -> Result<Section> {
let (meta, content) = split_section_content(file_path, content)?; let (meta, content) = split_section_content(file_path, content)?;
let mut section = Section::new(file_path, meta); let mut section = Section::new(file_path, meta, base_path);
section.lang = section.file.find_language(config)?; section.lang = section.file.find_language(config)?;
section.raw_content = content; section.raw_content = content;
let (word_count, reading_time) = get_reading_analytics(&section.raw_content); let (word_count, reading_time) = get_reading_analytics(&section.raw_content);
section.word_count = Some(word_count); section.word_count = Some(word_count);
section.reading_time = Some(reading_time); section.reading_time = Some(reading_time);
let path = format!("{}/", section.file.components.join("/")); let path = section.file.components.join("/");
if let Some(ref lang) = section.lang { if section.lang != config.default_language {
section.path = format!("{}/{}", lang, path); section.path = format!("{}/{}", section.lang, path);
} else { } else {
section.path = path; section.path = format!("{}/", path);
} }
section.components = section section.components = section
.path .path
@ -109,10 +118,14 @@ impl Section {
} }
/// Read and parse a .md file into a Page struct /// Read and parse a .md file into a Page struct
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config) -> Result<Section> { pub fn from_file<P: AsRef<Path>>(
path: P,
config: &Config,
base_path: &PathBuf,
) -> Result<Section> {
let path = path.as_ref(); let path = path.as_ref();
let content = read_file(path)?; let content = read_file(path)?;
let mut section = Section::parse(path, &content, config)?; let mut section = Section::parse(path, &content, config, base_path)?;
let parent_dir = path.parent().unwrap(); let parent_dir = path.parent().unwrap();
let assets = find_related_assets(parent_dir); let assets = find_related_assets(parent_dir);
@ -171,8 +184,9 @@ impl Section {
context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None)); context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None));
let res = render_content(&self.raw_content, &context) let res = render_content(&self.raw_content, &context).map_err(|e| {
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?; Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
})?;
self.content = res.body; self.content = res.body;
self.toc = res.toc; self.toc = res.toc;
Ok(()) Ok(())
@ -188,9 +202,11 @@ impl Section {
context.insert("current_path", &self.path); context.insert("current_path", &self.path);
context.insert("section", &self.to_serialized(library)); context.insert("section", &self.to_serialized(library));
context.insert("lang", &self.lang); context.insert("lang", &self.lang);
context.insert("toc", &self.toc);
render_template(tpl_name, tera, &context, &config.theme) render_template(tpl_name, tera, context, &config.theme).map_err(|e| {
.chain_err(|| format!("Failed to render section '{}'", self.file.path.display())) Error::chain(format!("Failed to render section '{}'", self.file.path.display()), e)
})
} }
/// Is this the index section? /// Is this the index section?
@ -237,7 +253,7 @@ impl Default for Section {
toc: vec![], toc: vec![],
reading_time: None, reading_time: None,
word_count: None, word_count: None,
lang: None, lang: String::new(),
translations: Vec::new(), translations: Vec::new(),
} }
} }
@ -247,7 +263,7 @@ impl Default for Section {
mod tests { mod tests {
use std::fs::{create_dir, File}; use std::fs::{create_dir, File};
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::{Path, PathBuf};
use globset::{Glob, GlobSetBuilder}; use globset::{Glob, GlobSetBuilder};
use tempfile::tempdir; use tempfile::tempdir;
@ -269,7 +285,11 @@ mod tests {
File::create(nested_path.join("graph.jpg")).unwrap(); File::create(nested_path.join("graph.jpg")).unwrap();
File::create(nested_path.join("fail.png")).unwrap(); File::create(nested_path.join("fail.png")).unwrap();
let res = Section::from_file(nested_path.join("_index.md").as_path(), &Config::default()); let res = Section::from_file(
nested_path.join("_index.md").as_path(),
&Config::default(),
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let section = res.unwrap(); let section = res.unwrap();
assert_eq!(section.assets.len(), 3); assert_eq!(section.assets.len(), 3);
@ -295,7 +315,8 @@ mod tests {
let mut config = Config::default(); let mut config = Config::default();
config.ignored_content_globset = Some(gsb.build().unwrap()); config.ignored_content_globset = Some(gsb.build().unwrap());
let res = Section::from_file(nested_path.join("_index.md").as_path(), &config); let res =
Section::from_file(nested_path.join("_index.md").as_path(), &config, &PathBuf::new());
assert!(res.is_ok()); assert!(res.is_ok());
let page = res.unwrap(); let page = res.unwrap();
@ -312,10 +333,33 @@ mod tests {
+++ +++
Bonjour le monde"# Bonjour le monde"#
.to_string(); .to_string();
let res = Section::parse(Path::new("content/hello/nested/_index.fr.md"), &content, &config); let res = Section::parse(
Path::new("content/hello/nested/_index.fr.md"),
&content,
&config,
&PathBuf::new(),
);
assert!(res.is_ok()); assert!(res.is_ok());
let section = res.unwrap(); let section = res.unwrap();
assert_eq!(section.lang, Some("fr".to_string())); assert_eq!(section.lang, "fr".to_string());
assert_eq!(section.permalink, "http://a-website.com/fr/hello/nested/"); assert_eq!(section.permalink, "http://a-website.com/fr/hello/nested/");
} }
// https://zola.discourse.group/t/rfc-i18n/13/17?u=keats
#[test]
fn can_make_links_to_translated_sections_without_double_trailing_slash() {
let mut config = Config::default();
config.languages.push(Language { code: String::from("fr"), rss: false });
let content = r#"
+++
+++
Bonjour le monde"#
.to_string();
let res =
Section::parse(Path::new("content/_index.fr.md"), &content, &config, &PathBuf::new());
assert!(res.is_ok());
let section = res.unwrap();
assert_eq!(section.lang, "fr".to_string());
assert_eq!(section.permalink, "http://a-website.com/fr/");
}
} }

View file

@ -5,11 +5,10 @@ use tera::{Map, Value};
use content::{Page, Section}; use content::{Page, Section};
use library::Library; use library::Library;
use rendering::Header;
#[derive(Clone, Debug, PartialEq, Serialize)] #[derive(Clone, Debug, PartialEq, Serialize)]
pub struct TranslatedContent<'a> { pub struct TranslatedContent<'a> {
lang: &'a Option<String>, lang: &'a str,
permalink: &'a str, permalink: &'a str,
title: &'a Option<String>, title: &'a Option<String>,
} }
@ -67,10 +66,9 @@ pub struct SerializingPage<'a> {
summary: &'a Option<String>, summary: &'a Option<String>,
word_count: Option<usize>, word_count: Option<usize>,
reading_time: Option<usize>, reading_time: Option<usize>,
toc: &'a [Header],
assets: &'a [String], assets: &'a [String],
draft: bool, draft: bool,
lang: &'a Option<String>, lang: &'a str,
lighter: Option<Box<SerializingPage<'a>>>, lighter: Option<Box<SerializingPage<'a>>>,
heavier: Option<Box<SerializingPage<'a>>>, heavier: Option<Box<SerializingPage<'a>>>,
earlier: Option<Box<SerializingPage<'a>>>, earlier: Option<Box<SerializingPage<'a>>>,
@ -129,7 +127,6 @@ impl<'a> SerializingPage<'a> {
summary: &page.summary, summary: &page.summary,
word_count: page.word_count, word_count: page.word_count,
reading_time: page.reading_time, reading_time: page.reading_time,
toc: &page.toc,
assets: &page.serialized_assets, assets: &page.serialized_assets,
draft: page.is_draft(), draft: page.is_draft(),
lang: &page.lang, lang: &page.lang,
@ -185,7 +182,6 @@ impl<'a> SerializingPage<'a> {
summary: &page.summary, summary: &page.summary,
word_count: page.word_count, word_count: page.word_count,
reading_time: page.reading_time, reading_time: page.reading_time,
toc: &page.toc,
assets: &page.serialized_assets, assets: &page.serialized_assets,
draft: page.is_draft(), draft: page.is_draft(),
lang: &page.lang, lang: &page.lang,
@ -211,8 +207,7 @@ pub struct SerializingSection<'a> {
components: &'a [String], components: &'a [String],
word_count: Option<usize>, word_count: Option<usize>,
reading_time: Option<usize>, reading_time: Option<usize>,
lang: &'a Option<String>, lang: &'a str,
toc: &'a [Header],
assets: &'a [String], assets: &'a [String],
pages: Vec<SerializingPage<'a>>, pages: Vec<SerializingPage<'a>>,
subsections: Vec<&'a str>, subsections: Vec<&'a str>,
@ -225,7 +220,7 @@ impl<'a> SerializingSection<'a> {
let mut subsections = Vec::with_capacity(section.subsections.len()); let mut subsections = Vec::with_capacity(section.subsections.len());
for k in &section.pages { for k in &section.pages {
pages.push(library.get_page_by_key(*k).to_serialized(library)); pages.push(library.get_page_by_key(*k).to_serialized_basic(library));
} }
for k in &section.subsections { for k in &section.subsections {
@ -251,7 +246,6 @@ impl<'a> SerializingSection<'a> {
components: &section.components, components: &section.components,
word_count: section.word_count, word_count: section.word_count,
reading_time: section.reading_time, reading_time: section.reading_time,
toc: &section.toc,
assets: &section.serialized_assets, assets: &section.serialized_assets,
lang: &section.lang, lang: &section.lang,
pages, pages,
@ -290,7 +284,6 @@ impl<'a> SerializingSection<'a> {
components: &section.components, components: &section.components,
word_count: section.word_count, word_count: section.word_count,
reading_time: section.reading_time, reading_time: section.reading_time,
toc: &section.toc,
assets: &section.serialized_assets, assets: &section.serialized_assets,
lang: &section.lang, lang: &section.lang,
pages: vec![], pages: vec![],

View file

@ -5,6 +5,7 @@ use slotmap::{DenseSlotMap, Key};
use front_matter::SortBy; use front_matter::SortBy;
use config::Config;
use content::{Page, Section}; use content::{Page, Section};
use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; use sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
@ -82,7 +83,7 @@ impl Library {
/// Find out the direct subsections of each subsection if there are some /// Find out the direct subsections of each subsection if there are some
/// as well as the pages for each section /// as well as the pages for each section
pub fn populate_sections(&mut self) { pub fn populate_sections(&mut self, config: &Config) {
let root_path = let root_path =
self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap(); self.sections.values().find(|s| s.is_index()).map(|s| s.file.parent.clone()).unwrap();
// We are going to get both the ancestors and grandparents for each section in one go // We are going to get both the ancestors and grandparents for each section in one go
@ -128,8 +129,8 @@ impl Library {
} }
for (key, page) in &mut self.pages { for (key, page) in &mut self.pages {
let parent_filename = if let Some(ref lang) = page.lang { let parent_filename = if page.lang != config.default_language {
format!("_index.{}.md", lang) format!("_index.{}.md", page.lang)
} else { } else {
"_index.md".to_string() "_index.md".to_string()
}; };
@ -330,15 +331,19 @@ impl Library {
.collect() .collect()
} }
pub fn find_parent_section<P: AsRef<Path>>(&self, path: P) -> Option<&Section> { /// Find the parent section & all grandparents section that have transparent=true
let page_key = self.paths_to_pages[path.as_ref()]; /// Only used in rebuild.
for s in self.sections.values() { pub fn find_parent_sections<P: AsRef<Path>>(&self, path: P) -> Vec<&Section> {
if s.pages.contains(&page_key) { let mut parents = vec![];
return Some(s); let page = self.get_page(path.as_ref()).unwrap();
for ancestor in page.ancestors.iter().rev() {
let section = self.get_section_by_key(*ancestor);
if parents.is_empty() || section.meta.transparent {
parents.push(section);
} }
} }
None parents
} }
/// Only used in tests /// Only used in tests

View file

@ -4,7 +4,7 @@ use slotmap::Key;
use tera::{to_value, Context, Tera, Value}; use tera::{to_value, Context, Tera, Value};
use config::Config; use config::Config;
use errors::{Result, ResultExt}; use errors::{Error, Result};
use utils::templates::render_template; use utils::templates::render_template;
use content::{Section, SerializingPage, SerializingSection}; use content::{Section, SerializingPage, SerializingSection};
@ -221,13 +221,14 @@ impl<'a> Paginator<'a> {
context.insert("current_path", &pager.path); context.insert("current_path", &pager.path);
context.insert("paginator", &self.build_paginator_context(pager)); context.insert("paginator", &self.build_paginator_context(pager));
render_template(&self.template, tera, &context, &config.theme) render_template(&self.template, tera, context, &config.theme)
.chain_err(|| format!("Failed to render pager {}", pager.index)) .map_err(|e| Error::chain(format!("Failed to render pager {}", pager.index), e))
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::PathBuf;
use tera::to_value; use tera::to_value;
use config::Taxonomy as TaxonomyConfig; use config::Taxonomy as TaxonomyConfig;
@ -242,7 +243,7 @@ mod tests {
let mut f = SectionFrontMatter::default(); let mut f = SectionFrontMatter::default();
f.paginate_by = Some(2); f.paginate_by = Some(2);
f.paginate_path = "page".to_string(); f.paginate_path = "page".to_string();
let mut s = Section::new("content/_index.md", f); let mut s = Section::new("content/_index.md", f, &PathBuf::new());
if !is_index { if !is_index {
s.path = "posts/".to_string(); s.path = "posts/".to_string();
s.permalink = "https://vincent.is/posts/".to_string(); s.permalink = "https://vincent.is/posts/".to_string();

View file

@ -113,6 +113,7 @@ pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use slotmap::DenseSlotMap; use slotmap::DenseSlotMap;
use std::path::PathBuf;
use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight}; use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
use content::Page; use content::Page;
@ -122,13 +123,13 @@ mod tests {
let mut front_matter = PageFrontMatter::default(); let mut front_matter = PageFrontMatter::default();
front_matter.date = Some(date.to_string()); front_matter.date = Some(date.to_string());
front_matter.date_to_datetime(); front_matter.date_to_datetime();
Page::new("content/hello.md", front_matter) Page::new("content/hello.md", front_matter, &PathBuf::new())
} }
fn create_page_with_weight(weight: usize) -> Page { fn create_page_with_weight(weight: usize) -> Page {
let mut front_matter = PageFrontMatter::default(); let mut front_matter = PageFrontMatter::default();
front_matter.weight = Some(weight); front_matter.weight = Some(weight);
Page::new("content/hello.md", front_matter) Page::new("content/hello.md", front_matter, &PathBuf::new())
} }
#[test] #[test]

View file

@ -5,7 +5,7 @@ use slug::slugify;
use tera::{Context, Tera}; use tera::{Context, Tera};
use config::{Config, Taxonomy as TaxonomyConfig}; use config::{Config, Taxonomy as TaxonomyConfig};
use errors::{Result, ResultExt}; use errors::{Error, Result};
use utils::templates::render_template; use utils::templates::render_template;
use content::SerializingPage; use content::SerializingPage;
@ -48,7 +48,13 @@ pub struct TaxonomyItem {
} }
impl TaxonomyItem { impl TaxonomyItem {
pub fn new(name: &str, path: &str, config: &Config, keys: Vec<Key>, library: &Library) -> Self { pub fn new(
name: &str,
taxonomy: &TaxonomyConfig,
config: &Config,
keys: Vec<Key>,
library: &Library,
) -> Self {
// Taxonomy are almost always used for blogs so we filter by dates // Taxonomy are almost always used for blogs so we filter by dates
// and it's not like we can sort things across sections by anything other // and it's not like we can sort things across sections by anything other
// than dates // than dates
@ -64,7 +70,11 @@ impl TaxonomyItem {
.collect(); .collect();
let (mut pages, ignored_pages) = sort_pages_by_date(data); let (mut pages, ignored_pages) = sort_pages_by_date(data);
let slug = slugify(name); let slug = slugify(name);
let permalink = config.make_permalink(&format!("/{}/{}", path, slug)); let permalink = if taxonomy.lang != config.default_language {
config.make_permalink(&format!("/{}/{}/{}", taxonomy.lang, taxonomy.name, slug))
} else {
config.make_permalink(&format!("/{}/{}", taxonomy.name, slug))
};
// We still append pages without dates at the end // We still append pages without dates at the end
pages.extend(ignored_pages); pages.extend(ignored_pages);
@ -108,7 +118,7 @@ impl Taxonomy {
) -> Taxonomy { ) -> Taxonomy {
let mut sorted_items = vec![]; let mut sorted_items = vec![];
for (name, pages) in items { for (name, pages) in items {
sorted_items.push(TaxonomyItem::new(&name, &kind.name, config, pages, library)); sorted_items.push(TaxonomyItem::new(&name, &kind, config, pages, library));
} }
sorted_items.sort_by(|a, b| a.name.cmp(&b.name)); sorted_items.sort_by(|a, b| a.name.cmp(&b.name));
@ -140,8 +150,10 @@ impl Taxonomy {
); );
context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug)); context.insert("current_path", &format!("/{}/{}", self.kind.name, item.slug));
render_template(&format!("{}/single.html", self.kind.name), tera, &context, &config.theme) render_template(&format!("{}/single.html", self.kind.name), tera, context, &config.theme)
.chain_err(|| format!("Failed to render single term {} page.", self.kind.name)) .map_err(|e| {
Error::chain(format!("Failed to render single term {} page.", self.kind.name), e)
})
} }
pub fn render_all_terms( pub fn render_all_terms(
@ -159,8 +171,10 @@ impl Taxonomy {
context.insert("current_url", &config.make_permalink(&self.kind.name)); context.insert("current_url", &config.make_permalink(&self.kind.name));
context.insert("current_path", &self.kind.name); context.insert("current_path", &self.kind.name);
render_template(&format!("{}/list.html", self.kind.name), tera, &context, &config.theme) render_template(&format!("{}/list.html", self.kind.name), tera, context, &config.theme)
.chain_err(|| format!("Failed to render a list of {} page.", self.kind.name)) .map_err(|e| {
Error::chain(format!("Failed to render a list of {} page.", self.kind.name), e)
})
} }
pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> { pub fn to_serialized<'a>(&'a self, library: &'a Library) -> SerializedTaxonomy<'a> {
@ -186,6 +200,14 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
for (name, val) in &page.meta.taxonomies { for (name, val) in &page.meta.taxonomies {
if taxonomies_def.contains_key(name) { if taxonomies_def.contains_key(name) {
if taxonomies_def[name].lang != page.lang {
bail!(
"Page `{}` has taxonomy `{}` which is not available in that language",
page.file.path.display(),
name
);
}
all_taxonomies.entry(name).or_insert_with(HashMap::new); all_taxonomies.entry(name).or_insert_with(HashMap::new);
for v in val { for v in val {
@ -220,7 +242,7 @@ mod tests {
use super::*; use super::*;
use std::collections::HashMap; use std::collections::HashMap;
use config::{Config, Taxonomy as TaxonomyConfig}; use config::{Config, Language, Taxonomy as TaxonomyConfig};
use content::Page; use content::Page;
use library::Library; use library::Library;
@ -230,9 +252,21 @@ mod tests {
let mut library = Library::new(2, 0, false); let mut library = Library::new(2, 0, false);
config.taxonomies = vec![ config.taxonomies = vec![
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() }, TaxonomyConfig {
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }, name: "categories".to_string(),
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }, lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "authors".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
]; ];
let mut page1 = Page::default(); let mut page1 = Page::default();
@ -240,6 +274,7 @@ mod tests {
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]); taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
page1.meta.taxonomies = taxo_page1; page1.meta.taxonomies = taxo_page1;
page1.lang = config.default_language.clone();
library.insert_page(page1); library.insert_page(page1);
let mut page2 = Page::default(); let mut page2 = Page::default();
@ -247,6 +282,7 @@ mod tests {
taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]); taxo_page2.insert("tags".to_string(), vec!["rust".to_string(), "js".to_string()]);
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]); taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
page2.meta.taxonomies = taxo_page2; page2.meta.taxonomies = taxo_page2;
page2.lang = config.default_language.clone();
library.insert_page(page2); library.insert_page(page2);
let mut page3 = Page::default(); let mut page3 = Page::default();
@ -254,6 +290,7 @@ mod tests {
taxo_page3.insert("tags".to_string(), vec!["js".to_string()]); taxo_page3.insert("tags".to_string(), vec!["js".to_string()]);
taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]); taxo_page3.insert("authors".to_string(), vec!["Vincent Prouillet".to_string()]);
page3.meta.taxonomies = taxo_page3; page3.meta.taxonomies = taxo_page3;
page3.lang = config.default_language.clone();
library.insert_page(page3); library.insert_page(page3);
let taxonomies = find_taxonomies(&config, &library).unwrap(); let taxonomies = find_taxonomies(&config, &library).unwrap();
@ -309,12 +346,141 @@ mod tests {
let mut config = Config::default(); let mut config = Config::default();
let mut library = Library::new(2, 0, false); let mut library = Library::new(2, 0, false);
config.taxonomies = config.taxonomies = vec![TaxonomyConfig {
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }]; name: "authors".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
}];
let mut page1 = Page::default(); let mut page1 = Page::default();
let mut taxo_page1 = HashMap::new(); let mut taxo_page1 = HashMap::new();
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]); taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
page1.meta.taxonomies = taxo_page1; page1.meta.taxonomies = taxo_page1;
page1.lang = config.default_language.clone();
library.insert_page(page1);
let taxonomies = find_taxonomies(&config, &library);
assert!(taxonomies.is_err());
let err = taxonomies.unwrap_err();
// no path as this is created by Default
assert_eq!(
format!("{}", err),
"Page `` has taxonomy `tags` which is not defined in config.toml"
);
}
#[test]
fn can_make_taxonomies_in_multiple_languages() {
let mut config = Config::default();
config.languages.push(Language { rss: false, code: "fr".to_string() });
let mut library = Library::new(2, 0, true);
config.taxonomies = vec![
TaxonomyConfig {
name: "categories".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
},
TaxonomyConfig {
name: "auteurs".to_string(),
lang: "fr".to_string(),
..TaxonomyConfig::default()
},
];
let mut page1 = Page::default();
let mut taxo_page1 = HashMap::new();
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
taxo_page1.insert("categories".to_string(), vec!["Programming tutorials".to_string()]);
page1.meta.taxonomies = taxo_page1;
page1.lang = config.default_language.clone();
library.insert_page(page1);
let mut page2 = Page::default();
let mut taxo_page2 = HashMap::new();
taxo_page2.insert("tags".to_string(), vec!["rust".to_string()]);
taxo_page2.insert("categories".to_string(), vec!["Other".to_string()]);
page2.meta.taxonomies = taxo_page2;
page2.lang = config.default_language.clone();
library.insert_page(page2);
let mut page3 = Page::default();
page3.lang = "fr".to_string();
let mut taxo_page3 = HashMap::new();
taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]);
page3.meta.taxonomies = taxo_page3;
library.insert_page(page3);
let taxonomies = find_taxonomies(&config, &library).unwrap();
let (tags, categories, authors) = {
let mut t = None;
let mut c = None;
let mut a = None;
for x in taxonomies {
match x.kind.name.as_ref() {
"tags" => t = Some(x),
"categories" => c = Some(x),
"auteurs" => a = Some(x),
_ => unreachable!(),
}
}
(t.unwrap(), c.unwrap(), a.unwrap())
};
assert_eq!(tags.items.len(), 2);
assert_eq!(categories.items.len(), 2);
assert_eq!(authors.items.len(), 1);
assert_eq!(tags.items[0].name, "db");
assert_eq!(tags.items[0].slug, "db");
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
assert_eq!(tags.items[0].pages.len(), 1);
assert_eq!(tags.items[1].name, "rust");
assert_eq!(tags.items[1].slug, "rust");
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/rust/");
assert_eq!(tags.items[1].pages.len(), 2);
assert_eq!(authors.items[0].name, "Vincent Prouillet");
assert_eq!(authors.items[0].slug, "vincent-prouillet");
assert_eq!(
authors.items[0].permalink,
"http://a-website.com/fr/auteurs/vincent-prouillet/"
);
assert_eq!(authors.items[0].pages.len(), 1);
assert_eq!(categories.items[0].name, "Other");
assert_eq!(categories.items[0].slug, "other");
assert_eq!(categories.items[0].permalink, "http://a-website.com/categories/other/");
assert_eq!(categories.items[0].pages.len(), 1);
assert_eq!(categories.items[1].name, "Programming tutorials");
assert_eq!(categories.items[1].slug, "programming-tutorials");
assert_eq!(
categories.items[1].permalink,
"http://a-website.com/categories/programming-tutorials/"
);
assert_eq!(categories.items[1].pages.len(), 1);
}
#[test]
fn errors_on_taxonomy_of_different_language() {
let mut config = Config::default();
config.languages.push(Language { rss: false, code: "fr".to_string() });
let mut library = Library::new(2, 0, false);
config.taxonomies =
vec![TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }];
let mut page1 = Page::default();
page1.lang = "fr".to_string();
let mut taxo_page1 = HashMap::new();
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
page1.meta.taxonomies = taxo_page1;
library.insert_page(page1); library.insert_page(page1);
let taxonomies = find_taxonomies(&config, &library); let taxonomies = find_taxonomies(&config, &library);
@ -322,8 +488,8 @@ mod tests {
let err = taxonomies.unwrap_err(); let err = taxonomies.unwrap_err();
// no path as this is created by Default // no path as this is created by Default
assert_eq!( assert_eq!(
err.description(), format!("{}", err),
"Page `` has taxonomy `tags` which is not defined in config.toml" "Page `` has taxonomy `tags` which is not available in that language"
); );
} }
} }

View file

@ -98,25 +98,27 @@ fn find_page_front_matter_changes(
/// Handles a path deletion: could be a page, a section, a folder /// Handles a path deletion: could be a page, a section, a folder
fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> { fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()> {
// Ignore the event if this path was not known
if !site.library.contains_section(&path.to_path_buf())
&& !site.library.contains_page(&path.to_path_buf())
{ {
return Ok(()); let mut library = site.library.write().unwrap();
} // Ignore the event if this path was not known
if !library.contains_section(&path.to_path_buf())
if is_section { && !library.contains_page(&path.to_path_buf())
if let Some(s) = site.library.remove_section(&path.to_path_buf()) { {
site.permalinks.remove(&s.file.relative); return Ok(());
} }
} else if let Some(p) = site.library.remove_page(&path.to_path_buf()) {
site.permalinks.remove(&p.file.relative);
if !p.meta.taxonomies.is_empty() { if is_section {
site.populate_taxonomies()?; if let Some(s) = library.remove_section(&path.to_path_buf()) {
site.permalinks.remove(&s.file.relative);
}
} else if let Some(p) = library.remove_page(&path.to_path_buf()) {
site.permalinks.remove(&p.file.relative);
} }
} }
// We might have delete the root _index.md so ensure we have at least the default one
// before populating
site.create_default_index_sections()?;
site.populate_sections(); site.populate_sections();
site.populate_taxonomies()?; site.populate_taxonomies()?;
// Ensure we have our fn updated so it doesn't contain the permalink(s)/section/page deleted // Ensure we have our fn updated so it doesn't contain the permalink(s)/section/page deleted
@ -129,35 +131,41 @@ fn delete_element(site: &mut Site, path: &Path, is_section: bool) -> Result<()>
/// Handles a `_index.md` (a section) being edited in some ways /// Handles a `_index.md` (a section) being edited in some ways
fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> { fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
let section = Section::from_file(path, &site.config)?; let section = Section::from_file(path, &site.config, &site.base_path)?;
let pathbuf = path.to_path_buf(); let pathbuf = path.to_path_buf();
match site.add_section(section, true)? { match site.add_section(section, true)? {
// Updating a section // Updating a section
Some(prev) => { Some(prev) => {
site.populate_sections(); site.populate_sections();
{
let library = site.library.read().unwrap();
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta { if library.get_section(&pathbuf).unwrap().meta == prev.meta {
// Front matter didn't change, only content did // Front matter didn't change, only content did
// so we render only the section page, not its pages // so we render only the section page, not its pages
return site.render_section(&site.library.get_section(&pathbuf).unwrap(), false); return site.render_section(&library.get_section(&pathbuf).unwrap(), false);
}
} }
// Front matter changed // Front matter changed
for changes in find_section_front_matter_changes( let changes = find_section_front_matter_changes(
&site.library.get_section(&pathbuf).unwrap().meta, &site.library.read().unwrap().get_section(&pathbuf).unwrap().meta,
&prev.meta, &prev.meta,
) { );
for change in changes {
// Sort always comes first if present so the rendering will be fine // Sort always comes first if present so the rendering will be fine
match changes { match change {
SectionChangesNeeded::Sort => { SectionChangesNeeded::Sort => {
site.register_tera_global_fns(); site.register_tera_global_fns();
} }
SectionChangesNeeded::Render => { SectionChangesNeeded::Render => site.render_section(
site.render_section(&site.library.get_section(&pathbuf).unwrap(), false)? &site.library.read().unwrap().get_section(&pathbuf).unwrap(),
} false,
SectionChangesNeeded::RenderWithPages => { )?,
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true)? SectionChangesNeeded::RenderWithPages => site.render_section(
} &site.library.read().unwrap().get_section(&pathbuf).unwrap(),
true,
)?,
// not a common enough operation to make it worth optimizing // not a common enough operation to make it worth optimizing
SectionChangesNeeded::Delete | SectionChangesNeeded::Transparent => { SectionChangesNeeded::Delete | SectionChangesNeeded::Transparent => {
site.build()?; site.build()?;
@ -170,49 +178,54 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
None => { None => {
site.populate_sections(); site.populate_sections();
site.register_tera_global_fns(); site.register_tera_global_fns();
site.render_section(&site.library.get_section(&pathbuf).unwrap(), true) site.render_section(&site.library.read().unwrap().get_section(&pathbuf).unwrap(), true)
} }
} }
} }
macro_rules! render_parent_section { macro_rules! render_parent_sections {
($site: expr, $path: expr) => { ($site: expr, $path: expr) => {
if let Some(s) = $site.library.find_parent_section($path) { for s in $site.library.read().unwrap().find_parent_sections($path) {
$site.render_section(s, false)?; $site.render_section(s, false)?;
}; }
}; };
} }
/// Handles a page being edited in some ways /// Handles a page being edited in some ways
fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> { fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
let page = Page::from_file(path, &site.config)?; let page = Page::from_file(path, &site.config, &site.base_path)?;
let pathbuf = path.to_path_buf(); let pathbuf = path.to_path_buf();
match site.add_page(page, true)? { match site.add_page(page, true)? {
// Updating a page // Updating a page
Some(prev) => { Some(prev) => {
site.populate_sections(); site.populate_sections();
site.populate_taxonomies()?; site.populate_taxonomies()?;
site.register_tera_global_fns();
{
let library = site.library.read().unwrap();
// Front matter didn't change, only content did // Front matter didn't change, only content did
if site.library.get_page(&pathbuf).unwrap().meta == prev.meta { if library.get_page(&pathbuf).unwrap().meta == prev.meta {
// Other than the page itself, the summary might be seen // Other than the page itself, the summary might be seen
// on a paginated list for a blog for example // on a paginated list for a blog for example
if site.library.get_page(&pathbuf).unwrap().summary.is_some() { if library.get_page(&pathbuf).unwrap().summary.is_some() {
render_parent_section!(site, path); render_parent_sections!(site, path);
}
return site.render_page(&library.get_page(&pathbuf).unwrap());
} }
site.register_tera_global_fns();
return site.render_page(&site.library.get_page(&pathbuf).unwrap());
} }
// Front matter changed // Front matter changed
for changes in find_page_front_matter_changes( let changes = find_page_front_matter_changes(
&site.library.get_page(&pathbuf).unwrap().meta, &site.library.read().unwrap().get_page(&pathbuf).unwrap().meta,
&prev.meta, &prev.meta,
) { );
for change in changes {
site.register_tera_global_fns(); site.register_tera_global_fns();
// Sort always comes first if present so the rendering will be fine // Sort always comes first if present so the rendering will be fine
match changes { match change {
PageChangesNeeded::Taxonomies => { PageChangesNeeded::Taxonomies => {
site.populate_taxonomies()?; site.populate_taxonomies()?;
site.render_taxonomies()?; site.render_taxonomies()?;
@ -221,8 +234,10 @@ fn handle_page_editing(site: &mut Site, path: &Path) -> Result<()> {
site.render_index()?; site.render_index()?;
} }
PageChangesNeeded::Render => { PageChangesNeeded::Render => {
render_parent_section!(site, path); render_parent_sections!(site, path);
site.render_page(&site.library.get_page(&path.to_path_buf()).unwrap())?; site.render_page(
&site.library.read().unwrap().get_page(&path.to_path_buf()).unwrap(),
)?;
} }
}; };
} }
@ -275,8 +290,11 @@ pub fn after_content_rename(site: &mut Site, old: &Path, new: &Path) -> Result<(
if new_path.file_name().unwrap() == "_index.md" { if new_path.file_name().unwrap() == "_index.md" {
// We aren't entirely sure where the original thing was so just try to delete whatever was // We aren't entirely sure where the original thing was so just try to delete whatever was
// at the old path // at the old path
site.library.remove_page(&old.to_path_buf()); {
site.library.remove_section(&old.to_path_buf()); let mut library = site.library.write().unwrap();
library.remove_page(&old.to_path_buf());
library.remove_section(&old.to_path_buf());
}
return handle_section_editing(site, &new_path); return handle_section_editing(site, &new_path);
} }
@ -287,7 +305,7 @@ pub fn after_content_rename(site: &mut Site, old: &Path, new: &Path) -> Result<(
} else { } else {
old.to_path_buf() old.to_path_buf()
}; };
site.library.remove_page(&old_path); site.library.write().unwrap().remove_page(&old_path);
handle_page_editing(site, &new_path) handle_page_editing(site, &new_path)
} }
@ -350,7 +368,8 @@ pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> {
match filename { match filename {
"sitemap.xml" => site.render_sitemap(), "sitemap.xml" => site.render_sitemap(),
"rss.xml" => site.render_rss_feed(site.library.pages_values(), None), "rss.xml" => site.render_rss_feed(site.library.read().unwrap().pages_values(), None),
"split_sitemap_index.xml" => site.render_sitemap(),
"robots.txt" => site.render_robots(), "robots.txt" => site.render_robots(),
"single.html" | "list.html" => site.render_taxonomies(), "single.html" | "list.html" => site.render_taxonomies(),
"page.html" => { "page.html" => {

View file

@ -269,3 +269,20 @@ Edite
assert!(res.is_ok()); assert!(res.is_ok());
assert!(file_contains!(site_path, "public/fr/blog/with-assets/index.html", "Edite")); assert!(file_contains!(site_path, "public/fr/blog/with-assets/index.html", "Edite"));
} }
// https://github.com/getzola/zola/issues/620
#[test]
fn can_rebuild_after_renaming_section_and_deleting_file() {
let tmp_dir = tempdir().expect("create temp dir");
let (site_path, mut site) = load_and_build_site!(tmp_dir, "test_site");
let (old_path, new_path) = rename!(site_path, "content/posts/", "post/");
let res = after_content_rename(&mut site, &old_path, &new_path);
assert!(res.is_ok());
let path = site_path.join("content").join("_index.md");
fs::remove_file(&path).unwrap();
let res = after_content_change(&mut site, &path);
println!("{:?}", res);
assert!(res.is_ok());
}

View file

@ -4,9 +4,9 @@ version = "0.1.0"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
[dependencies] [dependencies]
tera = { version = "0.11", features = ["preserve_order"] } tera = { version = "1.0.0-alpha.3", features = ["preserve_order"] }
syntect = "3" syntect = "3"
pulldown-cmark = "0.2" pulldown-cmark = "0.4"
slug = "0.1" slug = "0.1"
serde = "1" serde = "1"
serde_derive = "1" serde_derive = "1"

View file

@ -1,6 +1,3 @@
use std::borrow::Cow::{Borrowed, Owned};
use self::cmark::{Event, Options, Parser, Tag};
use pulldown_cmark as cmark; use pulldown_cmark as cmark;
use slug::slugify; use slug::slugify;
use syntect::easy::HighlightLines; use syntect::easy::HighlightLines;
@ -9,14 +6,19 @@ use syntect::html::{
}; };
use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET}; use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET};
use errors::Result;
use link_checker::check_url;
use utils::site::resolve_internal_link;
use context::RenderContext; use context::RenderContext;
use table_of_contents::{make_table_of_contents, Header, TempHeader}; use errors::{Error, Result};
use front_matter::InsertAnchor;
use link_checker::check_url;
use table_of_contents::{make_table_of_contents, Header};
use utils::site::resolve_internal_link;
use utils::vec::InsertMany;
const CONTINUE_READING: &str = "<p><a name=\"continue-reading\"></a></p>\n"; use self::cmark::{Event, LinkType, Options, Parser, Tag};
const CONTINUE_READING: &str =
"<p id=\"zola-continue-reading\"><a name=\"continue-reading\"></a></p>\n";
const ANCHOR_LINK_TEMPLATE: &str = "anchor-link.html";
#[derive(Debug)] #[derive(Debug)]
pub struct Rendered { pub struct Rendered {
@ -25,6 +27,20 @@ pub struct Rendered {
pub toc: Vec<Header>, pub toc: Vec<Header>,
} }
// tracks a header in a slice of pulldown-cmark events
#[derive(Debug)]
struct HeaderRef {
start_idx: usize,
end_idx: usize,
level: i32,
}
impl HeaderRef {
fn new(start: usize, level: i32) -> HeaderRef {
HeaderRef { start_idx: start, end_idx: 0, level }
}
}
// We might have cases where the slug is already present in our list of anchor // We might have cases where the slug is already present in our list of anchor
// for example an article could have several titles named Example // for example an article could have several titles named Example
// We add a counter after the slug if the slug is already present, which // We add a counter after the slug if the slug is already present, which
@ -49,6 +65,71 @@ fn is_colocated_asset_link(link: &str) -> bool {
&& !link.starts_with("mailto:") && !link.starts_with("mailto:")
} }
fn fix_link(link_type: LinkType, link: &str, context: &RenderContext) -> Result<String> {
if link_type == LinkType::Email {
return Ok(link.to_string());
}
// A few situations here:
// - it could be a relative link (starting with `./`)
// - it could be a link to a co-located asset
// - it could be a normal link
let result = if link.starts_with("./") {
match resolve_internal_link(&link, context.permalinks) {
Ok(url) => url,
Err(_) => {
return Err(format!("Relative link {} not found.", link).into());
}
}
} else if is_colocated_asset_link(&link) {
format!("{}{}", context.current_page_permalink, link)
} else if context.config.check_external_links
&& !link.starts_with('#')
&& !link.starts_with("mailto:")
{
let res = check_url(&link);
if res.is_valid() {
link.to_string()
} else {
return Err(format!("Link {} is not valid: {}", link, res.message()).into());
}
} else {
link.to_string()
};
Ok(result)
}
/// get only text in a slice of events
fn get_text(parser_slice: &[Event]) -> String {
let mut title = String::new();
for event in parser_slice.iter() {
if let Event::Text(text) = event {
title += text;
}
}
title
}
fn get_header_refs(events: &[Event]) -> Vec<HeaderRef> {
let mut header_refs = vec![];
for (i, event) in events.iter().enumerate() {
match event {
Event::Start(Tag::Header(level)) => {
header_refs.push(HeaderRef::new(i, *level));
}
Event::End(Tag::Header(_)) => {
let msg = "Header end before start?";
header_refs.last_mut().expect(msg).end_idx = i;
}
_ => (),
}
}
header_refs
}
pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Rendered> { pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Rendered> {
// the rendered html // the rendered html
let mut html = String::with_capacity(content.len()); let mut html = String::with_capacity(content.len());
@ -57,17 +138,9 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
let mut background = IncludeBackground::Yes; let mut background = IncludeBackground::Yes;
let mut highlighter: Option<(HighlightLines, bool)> = None; let mut highlighter: Option<(HighlightLines, bool)> = None;
// If we get text in header, we need to insert the id and a anchor
let mut in_header = false;
// pulldown_cmark can send several text events for a title if there are markdown
// specific characters like `!` in them. We only want to insert the anchor the first time
let mut header_created = false;
let mut anchors: Vec<String> = vec![];
let mut headers = vec![]; let mut inserted_anchors: Vec<String> = vec![];
// Defaults to a 0 level so not a real header let mut headers: Vec<Header> = vec![];
// It should be an Option ideally but not worth the hassle to update
let mut temp_header = TempHeader::default();
let mut opts = Options::empty(); let mut opts = Options::empty();
let mut has_summary = false; let mut has_summary = false;
@ -75,168 +148,129 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
opts.insert(Options::ENABLE_FOOTNOTES); opts.insert(Options::ENABLE_FOOTNOTES);
{ {
let parser = Parser::new_ext(content, opts).map(|event| { let mut events = Parser::new_ext(content, opts)
match event { .map(|event| {
Event::Text(text) => { match event {
// Header first Event::Text(text) => {
if in_header { // if we are in the middle of a code block
if header_created { if let Some((ref mut highlighter, in_extra)) = highlighter {
temp_header.add_text(&text); let highlighted = if in_extra {
return Event::Html(Borrowed("")); if let Some(ref extra) = context.config.extra_syntax_set {
} highlighter.highlight(&text, &extra)
// += as we might have some <code> or other things already there } else {
temp_header.add_text(&text); unreachable!(
header_created = true; "Got a highlighter from extra syntaxes but no extra?"
return Event::Html(Borrowed("")); );
} }
// if we are in the middle of a code block
if let Some((ref mut highlighter, in_extra)) = highlighter {
let highlighted = if in_extra {
if let Some(ref extra) = context.config.extra_syntax_set {
highlighter.highlight(&text, &extra)
} else { } else {
unreachable!("Got a highlighter from extra syntaxes but no extra?"); highlighter.highlight(&text, &SYNTAX_SET)
} };
} else { //let highlighted = &highlighter.highlight(&text, ss);
highlighter.highlight(&text, &SYNTAX_SET) let html = styled_line_to_highlighted_html(&highlighted, background);
}; return Event::Html(html.into());
//let highlighted = &highlighter.highlight(&text, ss);
let html = styled_line_to_highlighted_html(&highlighted, background);
return Event::Html(Owned(html));
}
// Business as usual
Event::Text(text)
}
Event::Start(Tag::CodeBlock(ref info)) => {
if !context.config.highlight_code {
return Event::Html(Borrowed("<pre><code>"));
}
let theme = &THEME_SET.themes[&context.config.highlight_theme];
highlighter = Some(get_highlighter(info, &context.config));
// This selects the background color the same way that start_coloured_html_snippet does
let color =
theme.settings.background.unwrap_or(::syntect::highlighting::Color::WHITE);
background = IncludeBackground::IfDifferent(color);
let snippet = start_highlighted_html_snippet(theme);
Event::Html(Owned(snippet.0))
}
Event::End(Tag::CodeBlock(_)) => {
if !context.config.highlight_code {
return Event::Html(Borrowed("</code></pre>\n"));
}
// reset highlight and close the code block
highlighter = None;
Event::Html(Borrowed("</pre>"))
}
Event::Start(Tag::Image(src, title)) => {
if is_colocated_asset_link(&src) {
return Event::Start(Tag::Image(
Owned(format!("{}{}", context.current_page_permalink, src)),
title,
));
}
Event::Start(Tag::Image(src, title))
}
Event::Start(Tag::Link(link, title)) => {
// A few situations here:
// - it could be a relative link (starting with `./`)
// - it could be a link to a co-located asset
// - it could be a normal link
// - any of those can be in a header or not: if it's in a header
// we need to append to a string
let fixed_link = if link.starts_with("./") {
match resolve_internal_link(&link, context.permalinks) {
Ok(url) => url,
Err(_) => {
error = Some(format!("Relative link {} not found.", link).into());
return Event::Html(Borrowed(""));
}
} }
} else if is_colocated_asset_link(&link) {
format!("{}{}", context.current_page_permalink, link) // Business as usual
} else if context.config.check_external_links Event::Text(text)
&& !link.starts_with('#') }
&& !link.starts_with("mailto:") Event::Start(Tag::CodeBlock(ref info)) => {
{ if !context.config.highlight_code {
let res = check_url(&link); return Event::Html("<pre><code>".into());
if res.is_valid() {
link.to_string()
} else {
error = Some(
format!("Link {} is not valid: {}", link, res.message()).into(),
);
String::new()
} }
} else {
link.to_string()
};
if in_header { let theme = &THEME_SET.themes[&context.config.highlight_theme];
let html = if title.is_empty() { highlighter = Some(get_highlighter(info, &context.config));
format!("<a href=\"{}\">", fixed_link) // This selects the background color the same way that start_coloured_html_snippet does
} else { let color = theme
format!("<a href=\"{}\" title=\"{}\">", fixed_link, title) .settings
.background
.unwrap_or(::syntect::highlighting::Color::WHITE);
background = IncludeBackground::IfDifferent(color);
let snippet = start_highlighted_html_snippet(theme);
Event::Html(snippet.0.into())
}
Event::End(Tag::CodeBlock(_)) => {
if !context.config.highlight_code {
return Event::Html("</code></pre>\n".into());
}
// reset highlight and close the code block
highlighter = None;
Event::Html("</pre>".into())
}
Event::Start(Tag::Image(link_type, src, title)) => {
if is_colocated_asset_link(&src) {
let link = format!("{}{}", context.current_page_permalink, &*src);
return Event::Start(Tag::Image(link_type, link.into(), title));
}
Event::Start(Tag::Image(link_type, src, title))
}
Event::Start(Tag::Link(link_type, link, title)) => {
let fixed_link = match fix_link(link_type, &link, context) {
Ok(fixed_link) => fixed_link,
Err(err) => {
error = Some(err);
return Event::Html("".into());
}
}; };
temp_header.add_html(&html);
return Event::Html(Borrowed(""));
}
Event::Start(Tag::Link(Owned(fixed_link), title)) Event::Start(Tag::Link(link_type, fixed_link.into(), title))
}
Event::End(Tag::Link(_, _)) => {
if in_header {
temp_header.add_html("</a>");
return Event::Html(Borrowed(""));
} }
event Event::Html(ref markup) if markup.contains("<!-- more -->") => {
} has_summary = true;
Event::Start(Tag::Code) => { Event::Html(CONTINUE_READING.into())
if in_header {
temp_header.add_html("<code>");
return Event::Html(Borrowed(""));
} }
event _ => event,
} }
Event::End(Tag::Code) => { })
if in_header { .collect::<Vec<_>>(); // We need to collect the events to make a second pass
temp_header.add_html("</code>");
return Event::Html(Borrowed(""));
}
event
}
Event::Start(Tag::Header(num)) => {
in_header = true;
temp_header = TempHeader::new(num);
Event::Html(Borrowed(""))
}
Event::End(Tag::Header(_)) => {
// End of a header, reset all the things and return the header string
let id = find_anchor(&anchors, slugify(&temp_header.title), 0); let header_refs = get_header_refs(&events);
anchors.push(id.clone());
temp_header.permalink = format!("{}#{}", context.current_page_permalink, id);
temp_header.id = id;
in_header = false; let mut anchors_to_insert = vec![];
header_created = false;
let val = temp_header.to_string(context.tera, context.insert_anchor); for header_ref in header_refs {
headers.push(temp_header.clone()); let start_idx = header_ref.start_idx;
temp_header = TempHeader::default(); let end_idx = header_ref.end_idx;
Event::Html(Owned(val)) let title = get_text(&events[start_idx + 1..end_idx]);
} let id = find_anchor(&inserted_anchors, slugify(&title), 0);
Event::Html(ref markup) if markup.contains("<!-- more -->") => { inserted_anchors.push(id.clone());
has_summary = true;
Event::Html(Borrowed(CONTINUE_READING)) // insert `id` to the tag
} let html = format!("<h{lvl} id=\"{id}\">", lvl = header_ref.level, id = id);
_ => event, events[start_idx] = Event::Html(html.into());
// generate anchors and places to insert them
if context.insert_anchor != InsertAnchor::None {
let anchor_idx = match context.insert_anchor {
InsertAnchor::Left => start_idx + 1,
InsertAnchor::Right => end_idx,
InsertAnchor::None => 0, // Not important
};
let mut c = tera::Context::new();
c.insert("id", &id);
let anchor_link = utils::templates::render_template(
&ANCHOR_LINK_TEMPLATE,
context.tera,
c,
&None,
)
.map_err(|e| Error::chain("Failed to render anchor link template", e))?;
anchors_to_insert.push((anchor_idx, Event::Html(anchor_link.into())));
} }
});
cmark::html::push_html(&mut html, parser); // record header to make table of contents
let permalink = format!("{}#{}", context.current_page_permalink, id);
let h = Header { level: header_ref.level, id, permalink, title, children: Vec::new() };
headers.push(h);
}
if context.insert_anchor != InsertAnchor::None {
events.insert_many(anchors_to_insert);
}
cmark::html::push_html(&mut html, events.into_iter());
} }
if let Some(e) = error { if let Some(e) = error {
@ -245,7 +279,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
Ok(Rendered { Ok(Rendered {
summary_len: if has_summary { html.find(CONTINUE_READING) } else { None }, summary_len: if has_summary { html.find(CONTINUE_READING) } else { None },
body: html, body: html,
toc: make_table_of_contents(&headers), toc: make_table_of_contents(headers),
}) })
} }
} }

View file

@ -4,7 +4,7 @@ use regex::Regex;
use tera::{to_value, Context, Map, Value}; use tera::{to_value, Context, Map, Value};
use context::RenderContext; use context::RenderContext;
use errors::{Result, ResultExt}; use errors::{Error, Result};
// This include forces recompiling this source file if the grammar file changes. // This include forces recompiling this source file if the grammar file changes.
// Uncomment it when doing changes to the .pest file // Uncomment it when doing changes to the .pest file
@ -58,7 +58,7 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) {
for p in pair.into_inner() { for p in pair.into_inner() {
match p.as_rule() { match p.as_rule() {
Rule::ident => { Rule::ident => {
name = Some(p.into_span().as_str().to_string()); name = Some(p.as_span().as_str().to_string());
} }
Rule::kwarg => { Rule::kwarg => {
let mut arg_name = None; let mut arg_name = None;
@ -66,7 +66,7 @@ fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) {
for p2 in p.into_inner() { for p2 in p.into_inner() {
match p2.as_rule() { match p2.as_rule() {
Rule::ident => { Rule::ident => {
arg_name = Some(p2.into_span().as_str().to_string()); arg_name = Some(p2.as_span().as_str().to_string());
} }
Rule::literal => { Rule::literal => {
arg_val = Some(parse_literal(p2)); arg_val = Some(parse_literal(p2));
@ -108,15 +108,14 @@ fn render_shortcode(
} }
if let Some(ref b) = body { if let Some(ref b) = body {
// Trimming right to avoid most shortcodes with bodies ending up with a HTML new line // Trimming right to avoid most shortcodes with bodies ending up with a HTML new line
tera_context.insert("body", b.trim_right()); tera_context.insert("body", b.trim_end());
} }
tera_context.extend(context.tera_context.clone()); tera_context.extend(context.tera_context.clone());
let tpl_name = format!("shortcodes/{}.html", name);
let res = context let template_name = format!("shortcodes/{}.html", name);
.tera
.render(&tpl_name, &tera_context) let res = utils::templates::render_template(&template_name, &context.tera, tera_context, &None)
.chain_err(|| format!("Failed to render {} shortcode", name))?; .map_err(|e| Error::chain(format!("Failed to render {} shortcode", name), e))?;
// Small hack to avoid having multiple blank lines because of Tera tags for example // Small hack to avoid having multiple blank lines because of Tera tags for example
// A blank like will cause the markdown parser to think we're out of HTML and start looking // A blank like will cause the markdown parser to think we're out of HTML and start looking
@ -170,7 +169,7 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
// We have at least a `page` pair // We have at least a `page` pair
for p in pairs.next().unwrap().into_inner() { for p in pairs.next().unwrap().into_inner() {
match p.as_rule() { match p.as_rule() {
Rule::text => res.push_str(p.into_span().as_str()), Rule::text => res.push_str(p.as_span().as_str()),
Rule::inline_shortcode => { Rule::inline_shortcode => {
let (name, args) = parse_shortcode_call(p); let (name, args) = parse_shortcode_call(p);
res.push_str(&render_shortcode(&name, &args, context, None)?); res.push_str(&render_shortcode(&name, &args, context, None)?);
@ -180,12 +179,12 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
// 3 items in inner: call, body, end // 3 items in inner: call, body, end
// we don't care about the closing tag // we don't care about the closing tag
let (name, args) = parse_shortcode_call(inner.next().unwrap()); let (name, args) = parse_shortcode_call(inner.next().unwrap());
let body = inner.next().unwrap().into_span().as_str(); let body = inner.next().unwrap().as_span().as_str();
res.push_str(&render_shortcode(&name, &args, context, Some(body))?); res.push_str(&render_shortcode(&name, &args, context, Some(body))?);
} }
Rule::ignored_inline_shortcode => { Rule::ignored_inline_shortcode => {
res.push_str( res.push_str(
&p.into_span().as_str().replacen("{{/*", "{{", 1).replacen("*/}}", "}}", 1), &p.as_span().as_str().replacen("{{/*", "{{", 1).replacen("*/}}", "}}", 1),
); );
} }
Rule::ignored_shortcode_with_body => { Rule::ignored_shortcode_with_body => {
@ -193,13 +192,13 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
match p2.as_rule() { match p2.as_rule() {
Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => { Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => {
res.push_str( res.push_str(
&p2.into_span() &p2.as_span()
.as_str() .as_str()
.replacen("{%/*", "{%", 1) .replacen("{%/*", "{%", 1)
.replacen("*/%}", "%}", 1), .replacen("*/%}", "%}", 1),
); );
} }
Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()), Rule::text_in_ignored_body_sc => res.push_str(p2.as_span().as_str()),
_ => unreachable!("Got something weird in an ignored shortcode: {:?}", p2), _ => unreachable!("Got something weird in an ignored shortcode: {:?}", p2),
} }
} }
@ -231,7 +230,7 @@ mod tests {
panic!(); panic!();
} }
assert!(res.is_ok()); assert!(res.is_ok());
assert_eq!(res.unwrap().last().unwrap().into_span().end(), $input.len()); assert_eq!(res.unwrap().last().unwrap().as_span().end(), $input.len());
}; };
} }

View file

@ -1,160 +1,59 @@
use front_matter::InsertAnchor; /// Populated while receiving events from the markdown parser
use tera::{Context as TeraContext, Tera};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize)]
pub struct Header { pub struct Header {
#[serde(skip_serializing)] #[serde(skip_serializing)]
pub level: i32, pub level: i32,
pub id: String, pub id: String,
pub title: String,
pub permalink: String, pub permalink: String,
pub title: String,
pub children: Vec<Header>, pub children: Vec<Header>,
} }
impl Header { impl Header {
pub fn from_temp_header(tmp: &TempHeader, children: Vec<Header>) -> Header { pub fn new(level: i32) -> Header {
Header { Header {
level: tmp.level,
id: tmp.id.clone(),
title: tmp.title.clone(),
permalink: tmp.permalink.clone(),
children,
}
}
}
/// Populated while receiving events from the markdown parser
#[derive(Debug, PartialEq, Clone)]
pub struct TempHeader {
pub level: i32,
pub id: String,
pub permalink: String,
pub title: String,
pub html: String,
}
impl TempHeader {
pub fn new(level: i32) -> TempHeader {
TempHeader {
level, level,
id: String::new(), id: String::new(),
permalink: String::new(), permalink: String::new(),
title: String::new(), title: String::new(),
html: String::new(), children: Vec::new(),
}
}
pub fn add_html(&mut self, val: &str) {
self.html += val;
}
pub fn add_text(&mut self, val: &str) {
self.html += val;
self.title += val;
}
/// Transform all the information we have about this header into the HTML string for it
pub fn to_string(&self, tera: &Tera, insert_anchor: InsertAnchor) -> String {
let anchor_link = if insert_anchor != InsertAnchor::None {
let mut c = TeraContext::new();
c.insert("id", &self.id);
tera.render("anchor-link.html", &c).unwrap()
} else {
String::new()
};
match insert_anchor {
InsertAnchor::None => format!(
"<h{lvl} id=\"{id}\">{t}</h{lvl}>\n",
lvl = self.level,
t = self.html,
id = self.id
),
InsertAnchor::Left => format!(
"<h{lvl} id=\"{id}\">{a}{t}</h{lvl}>\n",
lvl = self.level,
a = anchor_link,
t = self.html,
id = self.id
),
InsertAnchor::Right => format!(
"<h{lvl} id=\"{id}\">{t}{a}</h{lvl}>\n",
lvl = self.level,
a = anchor_link,
t = self.html,
id = self.id
),
} }
} }
} }
impl Default for TempHeader { impl Default for Header {
fn default() -> Self { fn default() -> Self {
TempHeader::new(0) Header::new(0)
} }
} }
/// Recursively finds children of a header
fn find_children(
parent_level: i32,
start_at: usize,
temp_headers: &[TempHeader],
) -> (usize, Vec<Header>) {
let mut headers = vec![];
let mut start_at = start_at;
// If we have children, we will need to skip some headers since they are already inserted
let mut to_skip = 0;
for h in &temp_headers[start_at..] {
// stop when we encounter a title at the same level or higher
// than the parent one. Here a lower integer is considered higher as we are talking about
// HTML headers: h1, h2, h3, h4, h5 and h6
if h.level <= parent_level {
return (start_at, headers);
}
// Do we need to skip some headers?
if to_skip > 0 {
to_skip -= 1;
continue;
}
let (end, children) = find_children(h.level, start_at + 1, temp_headers);
headers.push(Header::from_temp_header(h, children));
// we didn't find any children
if end == start_at {
start_at += 1;
to_skip = 0;
} else {
// calculates how many we need to skip. Since the find_children start_at starts at 1,
// we need to remove 1 to ensure correctness
to_skip = end - start_at - 1;
start_at = end;
}
// we don't want to index out of bounds
if start_at + 1 > temp_headers.len() {
return (start_at, headers);
}
}
(start_at, headers)
}
/// Converts the flat temp headers into a nested set of headers /// Converts the flat temp headers into a nested set of headers
/// representing the hierarchy /// representing the hierarchy
pub fn make_table_of_contents(temp_headers: &[TempHeader]) -> Vec<Header> { pub fn make_table_of_contents(headers: Vec<Header>) -> Vec<Header> {
let mut toc = vec![]; let mut toc = vec![];
let mut start_idx = 0; 'parent: for header in headers {
for (i, h) in temp_headers.iter().enumerate() { if toc.is_empty() {
if i < start_idx { toc.push(header);
continue; continue;
} }
let (end_idx, children) = find_children(h.level, start_idx + 1, temp_headers);
start_idx = end_idx; // See if we have to insert as a child of a previous header
toc.push(Header::from_temp_header(h, children)); for h in toc.iter_mut().rev() {
// Look in its children first
for child in h.children.iter_mut().rev() {
if header.level > child.level {
child.children.push(header);
continue 'parent;
}
}
if header.level > h.level {
h.children.push(header);
continue 'parent;
}
}
// Nop, just insert it
toc.push(header)
} }
toc toc
@ -166,25 +65,25 @@ mod tests {
#[test] #[test]
fn can_make_basic_toc() { fn can_make_basic_toc() {
let input = vec![TempHeader::new(1), TempHeader::new(1), TempHeader::new(1)]; let input = vec![Header::new(1), Header::new(1), Header::new(1)];
let toc = make_table_of_contents(&input); let toc = make_table_of_contents(input);
assert_eq!(toc.len(), 3); assert_eq!(toc.len(), 3);
} }
#[test] #[test]
fn can_make_more_complex_toc() { fn can_make_more_complex_toc() {
let input = vec![ let input = vec![
TempHeader::new(1), Header::new(1),
TempHeader::new(2), Header::new(2),
TempHeader::new(2), Header::new(2),
TempHeader::new(3), Header::new(3),
TempHeader::new(2), Header::new(2),
TempHeader::new(1), Header::new(1),
TempHeader::new(2), Header::new(2),
TempHeader::new(3), Header::new(3),
TempHeader::new(3), Header::new(3),
]; ];
let toc = make_table_of_contents(&input); let toc = make_table_of_contents(input);
assert_eq!(toc.len(), 2); assert_eq!(toc.len(), 2);
assert_eq!(toc[0].children.len(), 3); assert_eq!(toc[0].children.len(), 3);
assert_eq!(toc[1].children.len(), 1); assert_eq!(toc[1].children.len(), 1);
@ -195,15 +94,16 @@ mod tests {
#[test] #[test]
fn can_make_messy_toc() { fn can_make_messy_toc() {
let input = vec![ let input = vec![
TempHeader::new(3), Header::new(3),
TempHeader::new(2), Header::new(2),
TempHeader::new(2), Header::new(2),
TempHeader::new(3), Header::new(3),
TempHeader::new(2), Header::new(2),
TempHeader::new(1), Header::new(1),
TempHeader::new(4), Header::new(4),
]; ];
let toc = make_table_of_contents(&input); let toc = make_table_of_contents(input);
println!("{:#?}", toc);
assert_eq!(toc.len(), 5); assert_eq!(toc.len(), 5);
assert_eq!(toc[2].children.len(), 1); assert_eq!(toc[2].children.len(), 1);
assert_eq!(toc[4].children.len(), 1); assert_eq!(toc[4].children.len(), 1);

View file

@ -44,7 +44,7 @@ fn can_highlight_code_block_no_lang() {
let res = render_content("```\n$ gutenberg server\n$ ping\n```", &context).unwrap(); let res = render_content("```\n$ gutenberg server\n$ ping\n```", &context).unwrap();
assert_eq!( assert_eq!(
res.body, res.body,
"<pre style=\"background-color:#2b303b;\">\n<span style=\"color:#c0c5ce;\">$ gutenberg server\n</span><span style=\"color:#c0c5ce;\">$ ping\n</span></pre>" "<pre style=\"background-color:#2b303b;\">\n<span style=\"color:#c0c5ce;\">$ gutenberg server\n$ ping\n</span></pre>"
); );
} }
@ -375,6 +375,19 @@ fn can_insert_anchor_right() {
); );
} }
#[test]
fn can_insert_anchor_for_multi_header() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::Right);
let res = render_content("# Hello\n# World", &context).unwrap();
assert_eq!(
res.body,
"<h1 id=\"hello\">Hello<a class=\"zola-anchor\" href=\"#hello\" aria-label=\"Anchor link for: hello\">🔗</a>\n</h1>\n\
<h1 id=\"world\">World<a class=\"zola-anchor\" href=\"#world\" aria-label=\"Anchor link for: world\">🔗</a>\n</h1>\n"
);
}
// See https://github.com/Keats/gutenberg/issues/42 // See https://github.com/Keats/gutenberg/issues/42
#[test] #[test]
fn can_insert_anchor_with_exclamation_mark() { fn can_insert_anchor_with_exclamation_mark() {
@ -522,6 +535,47 @@ fn can_understand_link_with_title_in_header() {
); );
} }
#[test]
fn can_understand_emphasis_in_header() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("# *Emphasis* text", &context).unwrap();
assert_eq!(res.body, "<h1 id=\"emphasis-text\"><em>Emphasis</em> text</h1>\n");
}
#[test]
fn can_understand_strong_in_header() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("# **Strong** text", &context).unwrap();
assert_eq!(res.body, "<h1 id=\"strong-text\"><strong>Strong</strong> text</h1>\n");
}
#[test]
fn can_understand_code_in_header() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("# `Code` text", &context).unwrap();
assert_eq!(res.body, "<h1 id=\"code-text\"><code>Code</code> text</h1>\n");
}
// See https://github.com/getzola/zola/issues/569
#[test]
fn can_understand_footnote_in_header() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("# text [^1] there\n[^1]: footnote", &context).unwrap();
assert_eq!(res.body, r##"<h1 id="text-there">text <sup class="footnote-reference"><a href="#1">1</a></sup> there</h1>
<div class="footnote-definition" id="1"><sup class="footnote-definition-label">1</sup>
<p>footnote</p>
</div>
"##);
}
#[test] #[test]
fn can_make_valid_relative_link_in_header() { fn can_make_valid_relative_link_in_header() {
let mut permalinks = HashMap::new(); let mut permalinks = HashMap::new();
@ -633,7 +687,7 @@ fn can_show_error_message_for_invalid_external_links() {
let res = render_content("[a link](http://google.comy)", &context); let res = render_content("[a link](http://google.comy)", &context);
assert!(res.is_err()); assert!(res.is_err());
let err = res.unwrap_err(); let err = res.unwrap_err();
assert!(err.description().contains("Link http://google.comy is not valid")); assert!(format!("{}", err).contains("Link http://google.comy is not valid"));
} }
#[test] #[test]
@ -675,17 +729,25 @@ fn can_handle_summaries() {
let config = Config::default(); let config = Config::default();
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None); let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content( let res = render_content(
"Hello [world]\n\n<!-- more -->\n\nBla bla\n\n[world]: https://vincent.is/about/", r#"
Hello [My site][world]
<!-- more -->
Bla bla
[world]: https://vincentprouillet.com
"#,
&context, &context,
) )
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
res.body, res.body,
"<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n<p><a name=\"continue-reading\"></a></p>\n<p>Bla bla</p>\n" "<p>Hello <a href=\"https://vincentprouillet.com\">My site</a></p>\n<p id=\"zola-continue-reading\"><a name=\"continue-reading\"></a></p>\n<p>Bla bla</p>\n"
); );
assert_eq!( assert_eq!(
res.summary_len, res.summary_len,
Some("<p>Hello <a href=\"https://vincent.is/about/\">world</a></p>\n".len()) Some("<p>Hello <a href=\"https://vincentprouillet.com/\">My site</a></p>".len())
); );
} }
@ -721,3 +783,31 @@ fn doesnt_try_to_highlight_content_from_shortcode() {
let res = render_content(markdown_string, &context).unwrap(); let res = render_content(markdown_string, &context).unwrap();
assert_eq!(res.body, expected); assert_eq!(res.body, expected);
} }
// TODO: re-enable once it's fixed in Tera
// https://github.com/Keats/tera/issues/373
//#[test]
//fn can_split_lines_shortcode_body() {
// let permalinks_ctx = HashMap::new();
// let mut tera = Tera::default();
// tera.extend(&ZOLA_TERA).unwrap();
//
// let shortcode = r#"{{ body | split(pat="\n") }}"#;
//
// let markdown_string = r#"
//{% alert() %}
//multi
//ple
//lines
//{% end %}
// "#;
//
// let expected = r#"<p>["multi", "ple", "lines"]</p>"#;
//
// tera.add_raw_template(&format!("shortcodes/{}.html", "alert"), shortcode).unwrap();
// let config = Config::default();
// let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
//
// let res = render_content(markdown_string, &context).unwrap();
// assert_eq!(res.body, expected);
//}

View file

@ -4,7 +4,7 @@ version = "0.1.0"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
[dependencies] [dependencies]
tera = "0.11" tera = "1.0.0-alpha.3"
glob = "0.2" glob = "0.2"
rayon = "1" rayon = "1"
serde = "1" serde = "1"

View file

@ -169,6 +169,7 @@ if __name__ == "__main__":
gen_site("medium-blog", [""], 250, is_blog=True) gen_site("medium-blog", [""], 250, is_blog=True)
gen_site("big-blog", [""], 1000, is_blog=True) gen_site("big-blog", [""], 1000, is_blog=True)
gen_site("huge-blog", [""], 10000, is_blog=True) gen_site("huge-blog", [""], 10000, is_blog=True)
gen_site("extra-huge-blog", [""], 100000, is_blog=True)
gen_site("small-kb", ["help", "help1", "help2", "help3", "help4", "help5", "help6", "help7", "help8", "help9"], 10) gen_site("small-kb", ["help", "help1", "help2", "help3", "help4", "help5", "help6", "help7", "help8", "help9"], 10)
gen_site("medium-kb", ["help", "help1", "help2", "help3", "help4", "help5", "help6", "help7", "help8", "help9"], 100) gen_site("medium-kb", ["help", "help1", "help2", "help3", "help4", "help5", "help6", "help7", "help8", "help9"], 100)

View file

@ -43,7 +43,7 @@ fn bench_render_rss_feed(b: &mut test::Bencher) {
let tmp_dir = tempdir().expect("create temp dir"); let tmp_dir = tempdir().expect("create temp dir");
let public = &tmp_dir.path().join("public"); let public = &tmp_dir.path().join("public");
site.set_output_path(&public); site.set_output_path(&public);
b.iter(|| site.render_rss_feed(site.library.pages_values(), None).unwrap()); b.iter(|| site.render_rss_feed(site.library.read().unwrap().pages_values(), None).unwrap());
} }
#[bench] #[bench]
@ -61,8 +61,9 @@ fn bench_render_paginated(b: &mut test::Bencher) {
let tmp_dir = tempdir().expect("create temp dir"); let tmp_dir = tempdir().expect("create temp dir");
let public = &tmp_dir.path().join("public"); let public = &tmp_dir.path().join("public");
site.set_output_path(&public); site.set_output_path(&public);
let section = site.library.sections_values()[0]; let library = site.library.read().unwrap();
let paginator = Paginator::from_section(&section, &site.library); let section = library.sections_values()[0];
let paginator = Paginator::from_section(&section, &library);
b.iter(|| site.render_paginated(public, &paginator)); b.iter(|| site.render_paginated(public, &paginator));
} }

View file

@ -19,10 +19,13 @@ extern crate utils;
#[cfg(test)] #[cfg(test)]
extern crate tempfile; extern crate tempfile;
use std::collections::HashMap;
mod sitemap;
use std::collections::{HashMap};
use std::fs::{copy, create_dir_all, remove_dir_all}; use std::fs::{copy, create_dir_all, remove_dir_all};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex, RwLock};
use glob::glob; use glob::glob;
use rayon::prelude::*; use rayon::prelude::*;
@ -30,7 +33,7 @@ use sass_rs::{compile_file, Options as SassOptions, OutputStyle};
use tera::{Context, Tera}; use tera::{Context, Tera};
use config::{get_config, Config}; use config::{get_config, Config};
use errors::{Result, ResultExt}; use errors::{Error, Result};
use front_matter::InsertAnchor; use front_matter::InsertAnchor;
use library::{ use library::{
find_taxonomies, sort_actual_pages_by_date, Library, Page, Paginator, Section, Taxonomy, find_taxonomies, sort_actual_pages_by_date, Library, Page, Paginator, Section, Taxonomy,
@ -40,20 +43,6 @@ use utils::fs::{copy_directory, create_directory, create_file, ensure_directory_
use utils::net::get_available_port; use utils::net::get_available_port;
use utils::templates::{render_template, rewrite_theme_paths}; use utils::templates::{render_template, rewrite_theme_paths};
/// The sitemap only needs links and potentially date so we trim down
/// all pages to only that
#[derive(Debug, Serialize)]
struct SitemapEntry {
permalink: String,
date: Option<String>,
}
impl SitemapEntry {
pub fn new(permalink: String, date: Option<String>) -> SitemapEntry {
SitemapEntry { permalink, date }
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct Site { pub struct Site {
/// The base path of the zola site /// The base path of the zola site
@ -72,12 +61,12 @@ pub struct Site {
/// We need that if there are relative links in the content that need to be resolved /// We need that if there are relative links in the content that need to be resolved
pub permalinks: HashMap<String, String>, pub permalinks: HashMap<String, String>,
/// Contains all pages and sections of the site /// Contains all pages and sections of the site
pub library: Library, pub library: Arc<RwLock<Library>>,
} }
impl Site { impl Site {
/// Parse a site at the given path. Defaults to the current dir /// Parse a site at the given path. Defaults to the current dir
/// Passing in a path is only used in tests /// Passing in a path is possible using the `base-path` command line build option
pub fn new<P: AsRef<Path>>(path: P, config_file: &str) -> Result<Site> { pub fn new<P: AsRef<Path>>(path: P, config_file: &str) -> Result<Site> {
let path = path.as_ref(); let path = path.as_ref();
let mut config = get_config(path, config_file); let mut config = get_config(path, config_file);
@ -87,7 +76,8 @@ impl Site {
format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml"); format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml");
// Only parsing as we might be extending templates from themes and that would error // Only parsing as we might be extending templates from themes and that would error
// as we haven't loaded them yet // as we haven't loaded them yet
let mut tera = Tera::parse(&tpl_glob).chain_err(|| "Error parsing templates")?; let mut tera =
Tera::parse(&tpl_glob).map_err(|e| Error::chain("Error parsing templates", e))?;
if let Some(theme) = config.theme.clone() { if let Some(theme) = config.theme.clone() {
// Grab data from the extra section of the theme // Grab data from the extra section of the theme
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?; config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
@ -103,10 +93,10 @@ impl Site {
path.to_string_lossy().replace("\\", "/"), path.to_string_lossy().replace("\\", "/"),
format!("themes/{}/templates/**/*.*ml", theme) format!("themes/{}/templates/**/*.*ml", theme)
); );
let mut tera_theme = let mut tera_theme = Tera::parse(&theme_tpl_glob)
Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?; .map_err(|e| Error::chain("Error parsing templates from themes", e))?;
rewrite_theme_paths(&mut tera_theme, &theme); rewrite_theme_paths(&mut tera_theme, &theme);
// TODO: same as below // TODO: we do that twice, make it dry?
if theme_path.join("templates").join("robots.txt").exists() { if theme_path.join("templates").join("robots.txt").exists() {
tera_theme tera_theme
.add_template_file(theme_path.join("templates").join("robots.txt"), None)?; .add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
@ -141,7 +131,7 @@ impl Site {
taxonomies: Vec::new(), taxonomies: Vec::new(),
permalinks: HashMap::new(), permalinks: HashMap::new(),
// We will allocate it properly later on // We will allocate it properly later on
library: Library::new(0, 0, false), library: Arc::new(RwLock::new(Library::new(0, 0, false))),
}; };
Ok(site) Ok(site)
@ -167,9 +157,9 @@ impl Site {
self.live_reload = get_available_port(port_to_avoid); self.live_reload = get_available_port(port_to_avoid);
} }
/// Get all the orphan (== without section) pages in the site /// Get the number of orphan (== without section) pages in the site
pub fn get_all_orphan_pages(&self) -> Vec<&Page> { pub fn get_number_orphan_pages(&self) -> usize {
self.library.get_all_orphan_pages() self.library.read().unwrap().get_all_orphan_pages().len()
} }
pub fn set_base_url(&mut self, base_url: String) { pub fn set_base_url(&mut self, base_url: String) {
@ -196,8 +186,11 @@ impl Site {
entry.as_path().file_name().unwrap().to_str().unwrap().starts_with("_index.") entry.as_path().file_name().unwrap().to_str().unwrap().starts_with("_index.")
}); });
self.library = self.library = Arc::new(RwLock::new(Library::new(
Library::new(page_entries.len(), section_entries.len(), self.config.is_multilingual()); page_entries.len(),
section_entries.len(),
self.config.is_multilingual(),
)));
let sections = { let sections = {
let config = &self.config; let config = &self.config;
@ -206,7 +199,7 @@ impl Site {
.into_par_iter() .into_par_iter()
.map(|entry| { .map(|entry| {
let path = entry.as_path(); let path = entry.as_path();
Section::from_file(path, config) Section::from_file(path, config, &self.base_path)
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
}; };
@ -218,7 +211,7 @@ impl Site {
.into_par_iter() .into_par_iter()
.map(|entry| { .map(|entry| {
let path = entry.as_path(); let path = entry.as_path();
Page::from_file(path, config) Page::from_file(path, config, &self.base_path)
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()
}; };
@ -230,10 +223,34 @@ impl Site {
self.add_section(s, false)?; self.add_section(s, false)?;
} }
// Insert a default index section for each language if necessary so we don't need to create self.create_default_index_sections()?;
// a _index.md to render the index page at the root of the site
let mut pages_insert_anchors = HashMap::new();
for page in pages {
let p = page?;
pages_insert_anchors.insert(
p.file.path.clone(),
self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang),
);
self.add_page(p, false)?;
}
// taxonomy Tera fns are loaded in `register_early_global_fns`
// so we do need to populate it first.
self.populate_taxonomies()?;
self.register_early_global_fns();
self.populate_sections();
self.render_markdown()?;
self.register_tera_global_fns();
Ok(())
}
/// Insert a default index section for each language if necessary so we don't need to create
/// a _index.md to render the index page at the root of the site
pub fn create_default_index_sections(&mut self) -> Result<()> {
for (index_path, lang) in self.index_section_paths() { for (index_path, lang) in self.index_section_paths() {
if let Some(ref index_section) = self.library.get_section(&index_path) { if let Some(ref index_section) = self.library.read().unwrap().get_section(&index_path) {
if self.config.build_search_index && !index_section.meta.in_search_index { if self.config.build_search_index && !index_section.meta.in_search_index {
bail!( bail!(
"You have enabled search in the config but disabled it in the index section: \ "You have enabled search in the config but disabled it in the index section: \
@ -242,8 +259,9 @@ impl Site {
) )
} }
} }
let mut library = self.library.write().expect("Get lock for load");
// Not in else because of borrow checker // Not in else because of borrow checker
if !self.library.contains_section(&index_path) { if !library.contains_section(&index_path) {
let mut index_section = Section::default(); let mut index_section = Section::default();
index_section.file.parent = self.content_path.clone(); index_section.file.parent = self.content_path.clone();
index_section.file.filename = index_section.file.filename =
@ -261,26 +279,10 @@ impl Site {
index_section.file.path = self.content_path.join("_index.md"); index_section.file.path = self.content_path.join("_index.md");
index_section.file.relative = "_index.md".to_string(); index_section.file.relative = "_index.md".to_string();
} }
self.library.insert_section(index_section); library.insert_section(index_section);
} }
} }
let mut pages_insert_anchors = HashMap::new();
for page in pages {
let p = page?;
pages_insert_anchors.insert(
p.file.path.clone(),
self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang),
);
self.add_page(p, false)?;
}
self.register_early_global_fns();
self.populate_sections();
self.render_markdown()?;
self.populate_taxonomies()?;
self.register_tera_global_fns();
Ok(()) Ok(())
} }
@ -295,14 +297,15 @@ impl Site {
// This is needed in the first place because of silly borrow checker // This is needed in the first place because of silly borrow checker
let mut pages_insert_anchors = HashMap::new(); let mut pages_insert_anchors = HashMap::new();
for (_, p) in self.library.pages() { for (_, p) in self.library.read().unwrap().pages() {
pages_insert_anchors.insert( pages_insert_anchors.insert(
p.file.path.clone(), p.file.path.clone(),
self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang), self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang),
); );
} }
self.library let mut library = self.library.write().expect("Get lock for render_markdown");
library
.pages_mut() .pages_mut()
.values_mut() .values_mut()
.collect::<Vec<_>>() .collect::<Vec<_>>()
@ -313,7 +316,7 @@ impl Site {
}) })
.collect::<Result<()>>()?; .collect::<Result<()>>()?;
self.library library
.sections_mut() .sections_mut()
.values_mut() .values_mut()
.collect::<Vec<_>>() .collect::<Vec<_>>()
@ -329,29 +332,32 @@ impl Site {
pub fn register_early_global_fns(&mut self) { pub fn register_early_global_fns(&mut self) {
self.tera.register_function( self.tera.register_function(
"get_url", "get_url",
global_fns::make_get_url(self.permalinks.clone(), self.config.clone()), global_fns::GetUrl::new(self.config.clone(), self.permalinks.clone()),
); );
self.tera.register_function( self.tera.register_function(
"resize_image", "resize_image",
global_fns::make_resize_image(self.imageproc.clone()), global_fns::ResizeImage::new(self.imageproc.clone()),
);
self.tera.register_function("load_data", global_fns::LoadData::new(self.base_path.clone()));
self.tera.register_function("trans", global_fns::Trans::new(self.config.clone()));
self.tera.register_function(
"get_taxonomy_url",
global_fns::GetTaxonomyUrl::new(&self.taxonomies),
); );
} }
pub fn register_tera_global_fns(&mut self) { pub fn register_tera_global_fns(&mut self) {
self.tera.register_function("trans", global_fns::make_trans(self.config.clone())); self.tera.register_function(
self.tera.register_function("get_page", global_fns::make_get_page(&self.library)); "get_page",
self.tera.register_function("get_section", global_fns::make_get_section(&self.library)); global_fns::GetPage::new(self.base_path.clone(), self.library.clone()),
);
self.tera.register_function(
"get_section",
global_fns::GetSection::new(self.base_path.clone(), self.library.clone()),
);
self.tera.register_function( self.tera.register_function(
"get_taxonomy", "get_taxonomy",
global_fns::make_get_taxonomy(&self.taxonomies, &self.library), global_fns::GetTaxonomy::new(self.taxonomies.clone(), self.library.clone()),
);
self.tera.register_function(
"get_taxonomy_url",
global_fns::make_get_taxonomy_url(&self.taxonomies),
);
self.tera.register_function(
"load_data",
global_fns::make_load_data(self.content_path.clone(), self.base_path.clone()),
); );
} }
@ -366,8 +372,9 @@ impl Site {
self.find_parent_section_insert_anchor(&page.file.parent, &page.lang); self.find_parent_section_insert_anchor(&page.file.parent, &page.lang);
page.render_markdown(&self.permalinks, &self.tera, &self.config, insert_anchor)?; page.render_markdown(&self.permalinks, &self.tera, &self.config, insert_anchor)?;
} }
let prev = self.library.remove_page(&page.file.path); let mut library = self.library.write().expect("Get lock for add_page");
self.library.insert_page(page); let prev = library.remove_page(&page.file.path);
library.insert_page(page);
Ok(prev) Ok(prev)
} }
@ -381,8 +388,9 @@ impl Site {
if render { if render {
section.render_markdown(&self.permalinks, &self.tera, &self.config)?; section.render_markdown(&self.permalinks, &self.tera, &self.config)?;
} }
let prev = self.library.remove_section(&section.file.path); let mut library = self.library.write().expect("Get lock for add_section");
self.library.insert_section(section); let prev = library.remove_section(&section.file.path);
library.insert_section(section);
Ok(prev) Ok(prev)
} }
@ -392,14 +400,14 @@ impl Site {
pub fn find_parent_section_insert_anchor( pub fn find_parent_section_insert_anchor(
&self, &self,
parent_path: &PathBuf, parent_path: &PathBuf,
lang: &Option<String>, lang: &str,
) -> InsertAnchor { ) -> InsertAnchor {
let parent = if let Some(ref l) = lang { let parent = if lang != self.config.default_language {
parent_path.join(format!("_index.{}.md", l)) parent_path.join(format!("_index.{}.md", lang))
} else { } else {
parent_path.join("_index.md") parent_path.join("_index.md")
}; };
match self.library.get_section(&parent) { match self.library.read().unwrap().get_section(&parent) {
Some(s) => s.meta.insert_anchor_links, Some(s) => s.meta.insert_anchor_links,
None => InsertAnchor::None, None => InsertAnchor::None,
} }
@ -408,7 +416,8 @@ impl Site {
/// Find out the direct subsections of each subsection if there are some /// Find out the direct subsections of each subsection if there are some
/// as well as the pages for each section /// as well as the pages for each section
pub fn populate_sections(&mut self) { pub fn populate_sections(&mut self) {
self.library.populate_sections(); let mut library = self.library.write().expect("Get lock for populate_sections");
library.populate_sections(&self.config);
} }
/// Find all the tags and categories if it's asked in the config /// Find all the tags and categories if it's asked in the config
@ -417,7 +426,7 @@ impl Site {
return Ok(()); return Ok(());
} }
self.taxonomies = find_taxonomies(&self.config, &self.library)?; self.taxonomies = find_taxonomies(&self.config, &self.library.read().unwrap())?;
Ok(()) Ok(())
} }
@ -470,7 +479,8 @@ impl Site {
pub fn clean(&self) -> Result<()> { pub fn clean(&self) -> Result<()> {
if self.output_path.exists() { if self.output_path.exists() {
// Delete current `public` directory so we can start fresh // Delete current `public` directory so we can start fresh
remove_dir_all(&self.output_path).chain_err(|| "Couldn't delete output directory")?; remove_dir_all(&self.output_path)
.map_err(|e| Error::chain("Couldn't delete output directory", e))?;
} }
Ok(()) Ok(())
@ -495,7 +505,7 @@ impl Site {
create_directory(&current_path)?; create_directory(&current_path)?;
// Finally, create a index.html file there with the page rendered // Finally, create a index.html file there with the page rendered
let output = page.render_html(&self.tera, &self.config, &self.library)?; let output = page.render_html(&self.tera, &self.config, &self.library.read().unwrap())?;
create_file(&current_path.join("index.html"), &self.inject_livereload(output))?; create_file(&current_path.join("index.html"), &self.inject_livereload(output))?;
// Copy any asset we found previously into the same directory as the index.html // Copy any asset we found previously into the same directory as the index.html
@ -514,44 +524,8 @@ impl Site {
/// Deletes the `public` directory and builds the site /// Deletes the `public` directory and builds the site
pub fn build(&self) -> Result<()> { pub fn build(&self) -> Result<()> {
self.clean()?; self.clean()?;
// Render aliases first to allow overwriting
self.render_aliases()?;
self.render_sections()?;
self.render_orphan_pages()?;
self.render_sitemap()?;
if self.config.generate_rss {
let pages = if self.config.is_multilingual() {
self.library
.pages_values()
.iter()
.filter(|p| p.lang.is_none())
.map(|p| *p)
.collect()
} else {
self.library.pages_values()
};
self.render_rss_feed(pages, None)?;
}
for lang in &self.config.languages {
if !lang.rss {
continue;
}
let pages = self
.library
.pages_values()
.iter()
.filter(|p| if let Some(ref l) = p.lang { l == &lang.code } else { false })
.map(|p| *p)
.collect();
self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?;
}
self.render_404()?;
self.render_robots()?;
self.render_taxonomies()?;
// Generate/move all assets before rendering any content
if let Some(ref theme) = self.config.theme { if let Some(ref theme) = self.config.theme {
let theme_path = self.base_path.join("themes").join(theme); let theme_path = self.base_path.join("themes").join(theme);
if theme_path.join("sass").exists() { if theme_path.join("sass").exists() {
@ -570,6 +544,40 @@ impl Site {
self.build_search_index()?; self.build_search_index()?;
} }
// Render aliases first to allow overwriting
self.render_aliases()?;
self.render_sections()?;
self.render_orphan_pages()?;
self.render_sitemap()?;
let library = self.library.read().unwrap();
if self.config.generate_rss {
let pages = if self.config.is_multilingual() {
library
.pages_values()
.iter()
.filter(|p| p.lang == self.config.default_language)
.map(|p| *p)
.collect()
} else {
library.pages_values()
};
self.render_rss_feed(pages, None)?;
}
for lang in &self.config.languages {
if !lang.rss {
continue;
}
let pages =
library.pages_values().iter().filter(|p| p.lang == lang.code).map(|p| *p).collect();
self.render_rss_feed(pages, Some(&PathBuf::from(lang.code.clone())))?;
}
self.render_404()?;
self.render_robots()?;
self.render_taxonomies()?;
Ok(()) Ok(())
} }
@ -579,7 +587,7 @@ impl Site {
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)), &self.output_path.join(&format!("search_index.{}.js", self.config.default_language)),
&format!( &format!(
"window.searchIndex = {};", "window.searchIndex = {};",
search::build_index(&self.config.default_language, &self.library)? search::build_index(&self.config.default_language, &self.library.read().unwrap())?
), ),
)?; )?;
@ -656,7 +664,7 @@ impl Site {
pub fn render_aliases(&self) -> Result<()> { pub fn render_aliases(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
for (_, page) in self.library.pages() { for (_, page) in self.library.read().unwrap().pages() {
for alias in &page.meta.aliases { for alias in &page.meta.aliases {
let mut output_path = self.output_path.to_path_buf(); let mut output_path = self.output_path.to_path_buf();
let mut split = alias.split('/').collect::<Vec<_>>(); let mut split = alias.split('/').collect::<Vec<_>>();
@ -693,7 +701,7 @@ impl Site {
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
let mut context = Context::new(); let mut context = Context::new();
context.insert("config", &self.config); context.insert("config", &self.config);
let output = render_template("404.html", &self.tera, &context, &self.config.theme)?; let output = render_template("404.html", &self.tera, context, &self.config.theme)?;
create_file(&self.output_path.join("404.html"), &self.inject_livereload(output)) create_file(&self.output_path.join("404.html"), &self.inject_livereload(output))
} }
@ -704,7 +712,7 @@ impl Site {
context.insert("config", &self.config); context.insert("config", &self.config);
create_file( create_file(
&self.output_path.join("robots.txt"), &self.output_path.join("robots.txt"),
&render_template("robots.txt", &self.tera, &context, &self.config.theme)?, &render_template("robots.txt", &self.tera, context, &self.config.theme)?,
) )
} }
@ -723,11 +731,18 @@ impl Site {
} }
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
let output_path = self.output_path.join(&taxonomy.kind.name); let output_path = if taxonomy.kind.lang != self.config.default_language {
let list_output = taxonomy.render_all_terms(&self.tera, &self.config, &self.library)?; let mid_path = self.output_path.join(&taxonomy.kind.lang);
create_directory(&mid_path)?;
mid_path.join(&taxonomy.kind.name)
} else {
self.output_path.join(&taxonomy.kind.name)
};
let list_output =
taxonomy.render_all_terms(&self.tera, &self.config, &self.library.read().unwrap())?;
create_directory(&output_path)?; create_directory(&output_path)?;
create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?; create_file(&output_path.join("index.html"), &self.inject_livereload(list_output))?;
let library = self.library.read().unwrap();
taxonomy taxonomy
.items .items
.par_iter() .par_iter()
@ -736,18 +751,18 @@ impl Site {
if taxonomy.kind.is_paginated() { if taxonomy.kind.is_paginated() {
self.render_paginated( self.render_paginated(
&path, &path,
&Paginator::from_taxonomy(&taxonomy, item, &self.library), &Paginator::from_taxonomy(&taxonomy, item, &library),
)?; )?;
} else { } else {
let single_output = let single_output =
taxonomy.render_term(item, &self.tera, &self.config, &self.library)?; taxonomy.render_term(item, &self.tera, &self.config, &library)?;
create_directory(&path)?; create_directory(&path)?;
create_file(&path.join("index.html"), &self.inject_livereload(single_output))?; create_file(&path.join("index.html"), &self.inject_livereload(single_output))?;
} }
if taxonomy.kind.rss { if taxonomy.kind.rss {
self.render_rss_feed( self.render_rss_feed(
item.pages.iter().map(|p| self.library.get_page_by_key(*p)).collect(), item.pages.iter().map(|p| library.get_page_by_key(*p)).collect(),
Some(&PathBuf::from(format!("{}/{}", taxonomy.kind.name, item.slug))), Some(&PathBuf::from(format!("{}/{}", taxonomy.kind.name, item.slug))),
) )
} else { } else {
@ -761,82 +776,46 @@ impl Site {
pub fn render_sitemap(&self) -> Result<()> { pub fn render_sitemap(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
let mut context = Context::new(); let library = self.library.read().unwrap();
let all_sitemap_entries = sitemap::find_entries(
&library,
&self.taxonomies[..],
&self.config,
);
let sitemap_limit = 30000;
let mut pages = self if all_sitemap_entries.len() < sitemap_limit {
.library // Create single sitemap
.pages_values() let mut context = Context::new();
.iter() context.insert("entries", &all_sitemap_entries);
.filter(|p| !p.is_draft()) let sitemap = &render_template("sitemap.xml", &self.tera, context, &self.config.theme)?;
.map(|p| { create_file(&self.output_path.join("sitemap.xml"), sitemap)?;
let date = match p.meta.date { return Ok(());
Some(ref d) => Some(d.to_string()), }
None => None,
};
SitemapEntry::new(p.permalink.clone(), date)
})
.collect::<Vec<_>>();
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink));
context.insert("pages", &pages);
let mut sections = self // Create multiple sitemaps (max 30000 urls each)
.library let mut sitemap_index = Vec::new();
.sections_values() for (i, chunk) in
.iter() all_sitemap_entries.iter().collect::<Vec<_>>().chunks(sitemap_limit).enumerate()
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
.collect::<Vec<_>>();
for section in
self.library.sections_values().iter().filter(|s| s.meta.paginate_by.is_some())
{ {
let number_pagers = (section.pages.len() as f64 let mut context = Context::new();
/ section.meta.paginate_by.unwrap() as f64) context.insert("entries", &chunk);
.ceil() as isize; let sitemap = &render_template("sitemap.xml", &self.tera, context, &self.config.theme)?;
for i in 1..=number_pagers { let file_name = format!("sitemap{}.xml", i + 1);
let permalink = create_file(&self.output_path.join(&file_name), sitemap)?;
format!("{}{}/{}/", section.permalink, section.meta.paginate_path, i); let mut sitemap_url: String = self.config.make_permalink(&file_name);
sections.push(SitemapEntry::new(permalink, None)) sitemap_url.pop(); // Remove trailing slash
} sitemap_index.push(sitemap_url);
} }
sections.sort_by(|a, b| a.permalink.cmp(&b.permalink)); // Create main sitemap that reference numbered sitemaps
context.insert("sections", &sections); let mut main_context = Context::new();
main_context.insert("sitemaps", &sitemap_index);
let mut taxonomies = vec![]; let sitemap = &render_template(
for taxonomy in &self.taxonomies { "split_sitemap_index.xml",
let name = &taxonomy.kind.name; &self.tera,
let mut terms = vec![]; main_context,
terms.push(SitemapEntry::new(self.config.make_permalink(name), None)); &self.config.theme,
for item in &taxonomy.items { )?;
terms.push(SitemapEntry::new(
self.config.make_permalink(&format!("{}/{}", &name, item.slug)),
None,
));
if taxonomy.kind.is_paginated() {
let number_pagers = (item.pages.len() as f64
/ taxonomy.kind.paginate_by.unwrap() as f64)
.ceil() as isize;
for i in 1..=number_pagers {
let permalink = self.config.make_permalink(&format!(
"{}/{}/{}/{}",
name,
item.slug,
taxonomy.kind.paginate_path(),
i
));
terms.push(SitemapEntry::new(permalink, None))
}
}
}
terms.sort_by(|a, b| a.permalink.cmp(&b.permalink));
taxonomies.push(terms);
}
context.insert("taxonomies", &taxonomies);
context.insert("config", &self.config);
let sitemap = &render_template("sitemap.xml", &self.tera, &context, &self.config.theme)?;
create_file(&self.output_path.join("sitemap.xml"), sitemap)?; create_file(&self.output_path.join("sitemap.xml"), sitemap)?;
Ok(()) Ok(())
@ -866,12 +845,13 @@ impl Site {
pages.par_sort_unstable_by(sort_actual_pages_by_date); pages.par_sort_unstable_by(sort_actual_pages_by_date);
context.insert("last_build_date", &pages[0].meta.date.clone()); context.insert("last_build_date", &pages[0].meta.date.clone());
let library = self.library.read().unwrap();
// limit to the last n elements if the limit is set; otherwise use all. // limit to the last n elements if the limit is set; otherwise use all.
let num_entries = self.config.rss_limit.unwrap_or_else(|| pages.len()); let num_entries = self.config.rss_limit.unwrap_or_else(|| pages.len());
let p = pages let p = pages
.iter() .iter()
.take(num_entries) .take(num_entries)
.map(|x| x.to_serialized_basic(&self.library)) .map(|x| x.to_serialized_basic(&library))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
context.insert("pages", &p); context.insert("pages", &p);
@ -885,7 +865,7 @@ impl Site {
context.insert("feed_url", &rss_feed_url); context.insert("feed_url", &rss_feed_url);
let feed = &render_template("rss.xml", &self.tera, &context, &self.config.theme)?; let feed = &render_template("rss.xml", &self.tera, context, &self.config.theme)?;
if let Some(ref base) = base_path { if let Some(ref base) = base_path {
let mut output_path = self.output_path.clone(); let mut output_path = self.output_path.clone();
@ -907,8 +887,8 @@ impl Site {
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
let mut output_path = self.output_path.clone(); let mut output_path = self.output_path.clone();
if let Some(ref lang) = section.lang { if section.lang != self.config.default_language {
output_path.push(lang); output_path.push(&section.lang);
if !output_path.exists() { if !output_path.exists() {
create_directory(&output_path)?; create_directory(&output_path)?;
} }
@ -937,7 +917,7 @@ impl Site {
section section
.pages .pages
.par_iter() .par_iter()
.map(|k| self.render_page(self.library.get_page_by_key(*k))) .map(|k| self.render_page(self.library.read().unwrap().get_page_by_key(*k)))
.collect::<Result<()>>()?; .collect::<Result<()>>()?;
} }
@ -955,9 +935,13 @@ impl Site {
} }
if section.meta.is_paginated() { if section.meta.is_paginated() {
self.render_paginated(&output_path, &Paginator::from_section(&section, &self.library))?; self.render_paginated(
&output_path,
&Paginator::from_section(&section, &self.library.read().unwrap()),
)?;
} else { } else {
let output = section.render_html(&self.tera, &self.config, &self.library)?; let output =
section.render_html(&self.tera, &self.config, &self.library.read().unwrap())?;
create_file(&output_path.join("index.html"), &self.inject_livereload(output))?; create_file(&output_path.join("index.html"), &self.inject_livereload(output))?;
} }
@ -969,6 +953,8 @@ impl Site {
self.render_section( self.render_section(
&self &self
.library .library
.read()
.unwrap()
.get_section(&self.content_path.join("_index.md")) .get_section(&self.content_path.join("_index.md"))
.expect("Failed to get index section"), .expect("Failed to get index section"),
false, false,
@ -978,6 +964,8 @@ impl Site {
/// Renders all sections /// Renders all sections
pub fn render_sections(&self) -> Result<()> { pub fn render_sections(&self) -> Result<()> {
self.library self.library
.read()
.unwrap()
.sections_values() .sections_values()
.into_par_iter() .into_par_iter()
.map(|s| self.render_section(s, true)) .map(|s| self.render_section(s, true))
@ -987,8 +975,8 @@ impl Site {
/// Renders all pages that do not belong to any sections /// Renders all pages that do not belong to any sections
pub fn render_orphan_pages(&self) -> Result<()> { pub fn render_orphan_pages(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
let library = self.library.read().unwrap();
for page in self.get_all_orphan_pages() { for page in library.get_all_orphan_pages() {
self.render_page(page)?; self.render_page(page)?;
} }
@ -1008,8 +996,12 @@ impl Site {
.map(|pager| { .map(|pager| {
let page_path = folder_path.join(&format!("{}", pager.index)); let page_path = folder_path.join(&format!("{}", pager.index));
create_directory(&page_path)?; create_directory(&page_path)?;
let output = let output = paginator.render_pager(
paginator.render_pager(pager, &self.config, &self.tera, &self.library)?; pager,
&self.config,
&self.tera,
&self.library.read().unwrap(),
)?;
if pager.index > 1 { if pager.index > 1 {
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?; create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
} else { } else {

View file

@ -0,0 +1,127 @@
use std::borrow::Cow;
use std::hash::{Hash, Hasher};
use std::collections::{HashSet};
use tera::{Map, Value};
use config::{Config};
use library::{Library, Taxonomy};
/// The sitemap only needs links, potentially date and extra for pages in case of updates
/// for examples so we trim down all entries to only that
#[derive(Debug, Serialize)]
pub struct SitemapEntry<'a> {
permalink: Cow<'a, str>,
date: Option<String>,
extra: Option<&'a Map<String, Value>>,
}
// Hash/Eq is not implemented for tera::Map but in our case we only care about the permalink
// when comparing/hashing so we implement it manually
impl<'a> Hash for SitemapEntry<'a> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.permalink.hash(state);
}
}
impl<'a> PartialEq for SitemapEntry<'a> {
fn eq(&self, other: &SitemapEntry) -> bool {
self.permalink == other.permalink
}
}
impl<'a> Eq for SitemapEntry<'a> {}
impl<'a> SitemapEntry<'a> {
pub fn new(permalink: Cow<'a, str>, date: Option<String>) -> Self {
SitemapEntry { permalink, date, extra: None }
}
pub fn add_extra(&mut self, extra: &'a Map<String, Value>) {
self.extra = Some(extra);
}
}
/// Finds out all the links to put in a sitemap from the pages/sections/taxonomies
/// There are no duplicate permalinks in the output vec
pub fn find_entries<'a>(library: &'a Library, taxonomies: &'a [Taxonomy], config: &'a Config) -> Vec<SitemapEntry<'a>> {
let pages = library
.pages_values()
.iter()
.filter(|p| !p.is_draft())
.map(|p| {
let date = match p.meta.date {
Some(ref d) => Some(d.to_string()),
None => None,
};
let mut entry = SitemapEntry::new(Cow::Borrowed(&p.permalink), date);
entry.add_extra(&p.meta.extra);
entry
})
.collect::<Vec<_>>();
let mut sections = library
.sections_values()
.iter()
.filter(|s| s.meta.render)
.map(|s| SitemapEntry::new(Cow::Borrowed(&s.permalink), None))
.collect::<Vec<_>>();
for section in library
.sections_values()
.iter()
.filter(|s| s.meta.paginate_by.is_some())
{
let number_pagers = (section.pages.len() as f64
/ section.meta.paginate_by.unwrap() as f64)
.ceil() as isize;
for i in 1..=number_pagers {
let permalink =
format!("{}{}/{}/", section.permalink, section.meta.paginate_path, i);
sections.push(SitemapEntry::new(Cow::Owned(permalink), None))
}
}
let mut taxonomies_entries = vec![];
for taxonomy in taxonomies {
let name = &taxonomy.kind.name;
let mut terms = vec![];
terms.push(SitemapEntry::new(Cow::Owned(config.make_permalink(name)), None));
for item in &taxonomy.items {
terms.push(SitemapEntry::new(
Cow::Owned(config.make_permalink(&format!("{}/{}", name, item.slug))),
None,
));
if taxonomy.kind.is_paginated() {
let number_pagers = (item.pages.len() as f64
/ taxonomy.kind.paginate_by.unwrap() as f64)
.ceil() as isize;
for i in 1..=number_pagers {
let permalink = config.make_permalink(&format!(
"{}/{}/{}/{}",
name,
item.slug,
taxonomy.kind.paginate_path(),
i
));
terms.push(SitemapEntry::new(Cow::Owned(permalink), None))
}
}
}
taxonomies_entries.push(terms);
}
let mut all_sitemap_entries = HashSet::new();
for p in pages {
all_sitemap_entries.insert(p);
}
for s in sections {
all_sitemap_entries.insert(s);
}
for terms in taxonomies_entries {
for term in terms {
all_sitemap_entries.insert(term);
}
}
all_sitemap_entries.into_iter().collect::<Vec<_>>()
}

View file

@ -16,59 +16,59 @@ fn can_parse_site() {
path.push("test_site"); path.push("test_site");
let mut site = Site::new(&path, "config.toml").unwrap(); let mut site = Site::new(&path, "config.toml").unwrap();
site.load().unwrap(); site.load().unwrap();
let library = site.library.read().unwrap();
// Correct number of pages (sections do not count as pages) // Correct number of pages (sections do not count as pages)
assert_eq!(site.library.pages().len(), 22); assert_eq!(library.pages().len(), 22);
let posts_path = path.join("content").join("posts"); let posts_path = path.join("content").join("posts");
// Make sure the page with a url doesn't have any sections // Make sure the page with a url doesn't have any sections
let url_post = site.library.get_page(&posts_path.join("fixed-url.md")).unwrap(); let url_post = library.get_page(&posts_path.join("fixed-url.md")).unwrap();
assert_eq!(url_post.path, "a-fixed-url/"); assert_eq!(url_post.path, "a-fixed-url/");
// Make sure the article in a folder with only asset doesn't get counted as a section // Make sure the article in a folder with only asset doesn't get counted as a section
let asset_folder_post = let asset_folder_post =
site.library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap(); library.get_page(&posts_path.join("with-assets").join("index.md")).unwrap();
assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]); assert_eq!(asset_folder_post.file.components, vec!["posts".to_string()]);
// That we have the right number of sections // That we have the right number of sections
assert_eq!(site.library.sections().len(), 11); assert_eq!(library.sections().len(), 11);
// And that the sections are correct // And that the sections are correct
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap(); let index_section = library.get_section(&path.join("content").join("_index.md")).unwrap();
assert_eq!(index_section.subsections.len(), 4); assert_eq!(index_section.subsections.len(), 4);
assert_eq!(index_section.pages.len(), 1); assert_eq!(index_section.pages.len(), 1);
assert!(index_section.ancestors.is_empty()); assert!(index_section.ancestors.is_empty());
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap(); let posts_section = library.get_section(&posts_path.join("_index.md")).unwrap();
assert_eq!(posts_section.subsections.len(), 2); assert_eq!(posts_section.subsections.len(), 2);
assert_eq!(posts_section.pages.len(), 10); assert_eq!(posts_section.pages.len(), 10);
assert_eq!( assert_eq!(
posts_section.ancestors, posts_section.ancestors,
vec![*site.library.get_section_key(&index_section.file.path).unwrap()] vec![*library.get_section_key(&index_section.file.path).unwrap()]
); );
// Make sure we remove all the pwd + content from the sections // Make sure we remove all the pwd + content from the sections
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap(); let basic = library.get_page(&posts_path.join("simple.md")).unwrap();
assert_eq!(basic.file.components, vec!["posts".to_string()]); assert_eq!(basic.file.components, vec!["posts".to_string()]);
assert_eq!( assert_eq!(
basic.ancestors, basic.ancestors,
vec![ vec![
*site.library.get_section_key(&index_section.file.path).unwrap(), *library.get_section_key(&index_section.file.path).unwrap(),
*site.library.get_section_key(&posts_section.file.path).unwrap(), *library.get_section_key(&posts_section.file.path).unwrap(),
] ]
); );
let tutorials_section = let tutorials_section =
site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap(); library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
assert_eq!(tutorials_section.subsections.len(), 2); assert_eq!(tutorials_section.subsections.len(), 2);
let sub1 = site.library.get_section_by_key(tutorials_section.subsections[0]); let sub1 = library.get_section_by_key(tutorials_section.subsections[0]);
let sub2 = site.library.get_section_by_key(tutorials_section.subsections[1]); let sub2 = library.get_section_by_key(tutorials_section.subsections[1]);
assert_eq!(sub1.clone().meta.title.unwrap(), "Programming"); assert_eq!(sub1.clone().meta.title.unwrap(), "Programming");
assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps"); assert_eq!(sub2.clone().meta.title.unwrap(), "DevOps");
assert_eq!(tutorials_section.pages.len(), 0); assert_eq!(tutorials_section.pages.len(), 0);
let devops_section = site let devops_section = library
.library
.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")) .get_section(&posts_path.join("tutorials").join("devops").join("_index.md"))
.unwrap(); .unwrap();
assert_eq!(devops_section.subsections.len(), 0); assert_eq!(devops_section.subsections.len(), 0);
@ -76,14 +76,13 @@ fn can_parse_site() {
assert_eq!( assert_eq!(
devops_section.ancestors, devops_section.ancestors,
vec![ vec![
*site.library.get_section_key(&index_section.file.path).unwrap(), *library.get_section_key(&index_section.file.path).unwrap(),
*site.library.get_section_key(&posts_section.file.path).unwrap(), *library.get_section_key(&posts_section.file.path).unwrap(),
*site.library.get_section_key(&tutorials_section.file.path).unwrap(), *library.get_section_key(&tutorials_section.file.path).unwrap(),
] ]
); );
let prog_section = site let prog_section = library
.library
.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")) .get_section(&posts_path.join("tutorials").join("programming").join("_index.md"))
.unwrap(); .unwrap();
assert_eq!(prog_section.subsections.len(), 0); assert_eq!(prog_section.subsections.len(), 0);
@ -176,6 +175,8 @@ fn can_build_site_without_live_reload() {
)); ));
// Drafts are not in the sitemap // Drafts are not in the sitemap
assert!(!file_contains!(public, "sitemap.xml", "draft")); assert!(!file_contains!(public, "sitemap.xml", "draft"));
// render: false sections are not in the sitemap either
assert!(!file_contains!(public, "sitemap.xml", "posts/2018/</loc>"));
// robots.txt has been rendered from the template // robots.txt has been rendered from the template
assert!(file_contains!(public, "robots.txt", "User-agent: zola")); assert!(file_contains!(public, "robots.txt", "User-agent: zola"));
@ -234,15 +235,18 @@ fn can_build_site_with_live_reload() {
fn can_build_site_with_taxonomies() { fn can_build_site_with_taxonomies() {
let (site, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| { let (site, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| {
site.load().unwrap(); site.load().unwrap();
for (i, (_, page)) in site.library.pages_mut().iter_mut().enumerate() { {
page.meta.taxonomies = { let mut library = site.library.write().unwrap();
let mut taxonomies = HashMap::new(); for (i, (_, page)) in library.pages_mut().iter_mut().enumerate() {
taxonomies.insert( page.meta.taxonomies = {
"categories".to_string(), let mut taxonomies = HashMap::new();
vec![if i % 2 == 0 { "A" } else { "B" }.to_string()], taxonomies.insert(
); "categories".to_string(),
taxonomies vec![if i % 2 == 0 { "A" } else { "B" }.to_string()],
}; );
taxonomies
};
}
} }
site.populate_taxonomies().unwrap(); site.populate_taxonomies().unwrap();
(site, false) (site, false)
@ -311,12 +315,15 @@ fn can_build_site_and_insert_anchor_links() {
fn can_build_site_with_pagination_for_section() { fn can_build_site_with_pagination_for_section() {
let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| { let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| {
site.load().unwrap(); site.load().unwrap();
for (_, section) in site.library.sections_mut() { {
if section.is_index() { let mut library = site.library.write().unwrap();
continue; for (_, section) in library.sections_mut() {
if section.is_index() {
continue;
}
section.meta.paginate_by = Some(2);
section.meta.template = Some("section_paginated.html".to_string());
} }
section.meta.paginate_by = Some(2);
section.meta.template = Some("section_paginated.html".to_string());
} }
(site, false) (site, false)
}); });
@ -425,12 +432,14 @@ fn can_build_site_with_pagination_for_index() {
let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| { let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| {
site.load().unwrap(); site.load().unwrap();
{ {
let index = site let mut library = site.library.write().unwrap();
.library {
.get_section_mut(&site.base_path.join("content").join("_index.md")) let index = library
.unwrap(); .get_section_mut(&site.base_path.join("content").join("_index.md"))
index.meta.paginate_by = Some(2); .unwrap();
index.meta.template = Some("index_paginated.html".to_string()); index.meta.paginate_by = Some(2);
index.meta.template = Some("index_paginated.html".to_string());
}
} }
(site, false) (site, false)
}); });
@ -479,18 +488,22 @@ fn can_build_site_with_pagination_for_taxonomy() {
paginate_by: Some(2), paginate_by: Some(2),
paginate_path: None, paginate_path: None,
rss: true, rss: true,
lang: site.config.default_language.clone(),
}); });
site.load().unwrap(); site.load().unwrap();
{
let mut library = site.library.write().unwrap();
for (i, (_, page)) in site.library.pages_mut().iter_mut().enumerate() { for (i, (_, page)) in library.pages_mut().iter_mut().enumerate() {
page.meta.taxonomies = { page.meta.taxonomies = {
let mut taxonomies = HashMap::new(); let mut taxonomies = HashMap::new();
taxonomies.insert( taxonomies.insert(
"tags".to_string(), "tags".to_string(),
vec![if i % 2 == 0 { "A" } else { "B" }.to_string()], vec![if i % 2 == 0 { "A" } else { "B" }.to_string()],
); );
taxonomies taxonomies
}; };
}
} }
site.populate_taxonomies().unwrap(); site.populate_taxonomies().unwrap();
(site, false) (site, false)
@ -593,38 +606,48 @@ fn can_apply_page_templates() {
site.load().unwrap(); site.load().unwrap();
let template_path = path.join("content").join("applying_page_template"); let template_path = path.join("content").join("applying_page_template");
let library = site.library.read().unwrap();
let template_section = site.library.get_section(&template_path.join("_index.md")).unwrap(); let template_section = library.get_section(&template_path.join("_index.md")).unwrap();
assert_eq!(template_section.subsections.len(), 2); assert_eq!(template_section.subsections.len(), 2);
assert_eq!(template_section.pages.len(), 2); assert_eq!(template_section.pages.len(), 2);
let from_section_config = site.library.get_page_by_key(template_section.pages[0]); let from_section_config = library.get_page_by_key(template_section.pages[0]);
assert_eq!(from_section_config.meta.template, Some("page_template.html".into())); assert_eq!(from_section_config.meta.template, Some("page_template.html".into()));
assert_eq!(from_section_config.meta.title, Some("From section config".into())); assert_eq!(from_section_config.meta.title, Some("From section config".into()));
let override_page_template = site.library.get_page_by_key(template_section.pages[1]); let override_page_template = library.get_page_by_key(template_section.pages[1]);
assert_eq!(override_page_template.meta.template, Some("page_template_override.html".into())); assert_eq!(override_page_template.meta.template, Some("page_template_override.html".into()));
assert_eq!(override_page_template.meta.title, Some("Override".into())); assert_eq!(override_page_template.meta.title, Some("Override".into()));
// It should have applied recursively as well // It should have applied recursively as well
let another_section = let another_section =
site.library.get_section(&template_path.join("another_section").join("_index.md")).unwrap(); library.get_section(&template_path.join("another_section").join("_index.md")).unwrap();
assert_eq!(another_section.subsections.len(), 0); assert_eq!(another_section.subsections.len(), 0);
assert_eq!(another_section.pages.len(), 1); assert_eq!(another_section.pages.len(), 1);
let changed_recursively = site.library.get_page_by_key(another_section.pages[0]); let changed_recursively = library.get_page_by_key(another_section.pages[0]);
assert_eq!(changed_recursively.meta.template, Some("page_template.html".into())); assert_eq!(changed_recursively.meta.template, Some("page_template.html".into()));
assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into())); assert_eq!(changed_recursively.meta.title, Some("Changed recursively".into()));
// But it should not have override a children page_template // But it should not have override a children page_template
let yet_another_section = site let yet_another_section =
.library library.get_section(&template_path.join("yet_another_section").join("_index.md")).unwrap();
.get_section(&template_path.join("yet_another_section").join("_index.md"))
.unwrap();
assert_eq!(yet_another_section.subsections.len(), 0); assert_eq!(yet_another_section.subsections.len(), 0);
assert_eq!(yet_another_section.pages.len(), 1); assert_eq!(yet_another_section.pages.len(), 1);
let child = site.library.get_page_by_key(yet_another_section.pages[0]); let child = library.get_page_by_key(yet_another_section.pages[0]);
assert_eq!(child.meta.template, Some("page_template_child.html".into())); assert_eq!(child.meta.template, Some("page_template_child.html".into()));
assert_eq!(child.meta.title, Some("Local section override".into())); assert_eq!(child.meta.title, Some("Local section override".into()));
} }
// https://github.com/getzola/zola/issues/571
#[test]
fn can_build_site_custom_builtins_from_theme() {
let (_, _tmp_dir, public) = build_site("test_site");
assert!(&public.exists());
// 404.html is a theme template.
assert!(file_exists!(public, "404.html"));
assert!(file_contains!(public, "404.html", "Oops"));
}

View file

@ -13,45 +13,45 @@ fn can_parse_multilingual_site() {
let mut site = Site::new(&path, "config.toml").unwrap(); let mut site = Site::new(&path, "config.toml").unwrap();
site.load().unwrap(); site.load().unwrap();
assert_eq!(site.library.pages().len(), 10); let library = site.library.read().unwrap();
assert_eq!(site.library.sections().len(), 6); assert_eq!(library.pages().len(), 10);
assert_eq!(library.sections().len(), 6);
// default index sections // default index sections
let default_index_section = let default_index_section =
site.library.get_section(&path.join("content").join("_index.md")).unwrap(); library.get_section(&path.join("content").join("_index.md")).unwrap();
assert_eq!(default_index_section.pages.len(), 1); assert_eq!(default_index_section.pages.len(), 1);
assert!(default_index_section.ancestors.is_empty()); assert!(default_index_section.ancestors.is_empty());
let fr_index_section = let fr_index_section = library.get_section(&path.join("content").join("_index.fr.md")).unwrap();
site.library.get_section(&path.join("content").join("_index.fr.md")).unwrap();
assert_eq!(fr_index_section.pages.len(), 1); assert_eq!(fr_index_section.pages.len(), 1);
assert!(fr_index_section.ancestors.is_empty()); assert!(fr_index_section.ancestors.is_empty());
// blog sections get only their own language pages // blog sections get only their own language pages
let blog_path = path.join("content").join("blog"); let blog_path = path.join("content").join("blog");
let default_blog = site.library.get_section(&blog_path.join("_index.md")).unwrap(); let default_blog = library.get_section(&blog_path.join("_index.md")).unwrap();
assert_eq!(default_blog.subsections.len(), 0); assert_eq!(default_blog.subsections.len(), 0);
assert_eq!(default_blog.pages.len(), 4); assert_eq!(default_blog.pages.len(), 4);
assert_eq!( assert_eq!(
default_blog.ancestors, default_blog.ancestors,
vec![*site.library.get_section_key(&default_index_section.file.path).unwrap()] vec![*library.get_section_key(&default_index_section.file.path).unwrap()]
); );
for key in &default_blog.pages { for key in &default_blog.pages {
let page = site.library.get_page_by_key(*key); let page = library.get_page_by_key(*key);
assert_eq!(page.lang, None); assert_eq!(page.lang, "en");
} }
let fr_blog = site.library.get_section(&blog_path.join("_index.fr.md")).unwrap(); let fr_blog = library.get_section(&blog_path.join("_index.fr.md")).unwrap();
assert_eq!(fr_blog.subsections.len(), 0); assert_eq!(fr_blog.subsections.len(), 0);
assert_eq!(fr_blog.pages.len(), 3); assert_eq!(fr_blog.pages.len(), 3);
assert_eq!( assert_eq!(
fr_blog.ancestors, fr_blog.ancestors,
vec![*site.library.get_section_key(&fr_index_section.file.path).unwrap()] vec![*library.get_section_key(&fr_index_section.file.path).unwrap()]
); );
for key in &fr_blog.pages { for key in &fr_blog.pages {
let page = site.library.get_page_by_key(*key); let page = library.get_page_by_key(*key);
assert_eq!(page.lang, Some("fr".to_string())); assert_eq!(page.lang, "fr");
} }
} }
@ -87,7 +87,7 @@ fn can_build_multilingual_site() {
assert!(file_contains!( assert!(file_contains!(
public, public,
"fr/blog/index.html", "fr/blog/index.html",
"Translated in : My blog https://example.com/blog/" "Translated in en: My blog https://example.com/blog/"
)); ));
assert!(file_contains!( assert!(file_contains!(
public, public,
@ -107,7 +107,7 @@ fn can_build_multilingual_site() {
assert!(file_contains!( assert!(file_contains!(
public, public,
"fr/blog/something/index.html", "fr/blog/something/index.html",
"Translated in : Something https://example.com/blog/something/" "Translated in en: Something https://example.com/blog/something/"
)); ));
// sitemap contains all languages // sitemap contains all languages
@ -125,4 +125,17 @@ fn can_build_multilingual_site() {
assert!(file_contains!(public, "fr/rss.xml", "https://example.com/fr/blog/something-else/")); assert!(file_contains!(public, "fr/rss.xml", "https://example.com/fr/blog/something-else/"));
// Italian doesn't have RSS enabled // Italian doesn't have RSS enabled
assert!(!file_exists!(public, "it/rss.xml")); assert!(!file_exists!(public, "it/rss.xml"));
// Taxonomies are per-language
assert!(file_exists!(public, "authors/index.html"));
assert!(file_contains!(public, "authors/index.html", "Queen"));
assert!(!file_contains!(public, "authors/index.html", "Vincent"));
assert!(!file_exists!(public, "auteurs/index.html"));
assert!(file_exists!(public, "authors/queen-elizabeth/rss.xml"));
assert!(!file_exists!(public, "fr/authors/index.html"));
assert!(file_exists!(public, "fr/auteurs/index.html"));
assert!(!file_contains!(public, "fr/auteurs/index.html", "Queen"));
assert!(file_contains!(public, "fr/auteurs/index.html", "Vincent"));
assert!(!file_exists!(public, "fr/auteurs/vincent-prouillet/rss.xml"));
} }

View file

@ -4,14 +4,13 @@ version = "0.1.0"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"] authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
[dependencies] [dependencies]
tera = "0.11" tera = "1.0.0-alpha.3"
base64 = "0.10" base64 = "0.10"
lazy_static = "1" lazy_static = "1"
pulldown-cmark = "0.2" pulldown-cmark = "0.2"
toml = "0.4" toml = "0.4"
csv = "1" csv = "1"
serde_json = "1.0" serde_json = "1.0"
error-chain = "0.12"
reqwest = "0.9" reqwest = "0.9"
url = "1.5" url = "1.5"

View file

@ -1 +1,2 @@
User-agent: * User-agent: *
Sitemap: {{ get_url(path="sitemap.xml") }}

View file

@ -1,22 +1,10 @@
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"> <urlset xmlns="https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd">
{% for page in pages %} {% for sitemap_entry in entries %}
<url> <url>
<loc>{{ page.permalink | safe }}</loc> <loc>{{ sitemap_entry.permalink | safe }}</loc>
{% if page.date %} {% if sitemap_entry.date %}
<lastmod>{{ page.date }}</lastmod> <lastmod>{{ sitemap_entry.date }}</lastmod>
{% endif %} {% endif %}
</url> </url>
{% endfor %} {% endfor %}
{% for section in sections %}
<url>
<loc>{{ section.permalink | safe }}</loc>
</url>
{% endfor %}
{% for taxonomy in taxonomies %}
{% for entry in taxonomy %}
<url>
<loc>{{ entry.permalink | safe }}</loc>
</url>
{% endfor %}
{% endfor %}
</urlset> </urlset>

View file

@ -0,0 +1,7 @@
<sitemapindex xmlns="https://www.sitemaps.org/schemas/sitemap/0.9/siteindex.xsd">
{% for sitemap in sitemaps %}
<sitemap>
<loc>{{ sitemap }}</loc>
</sitemap>
{% endfor %}
</sitemapindex>

View file

@ -4,7 +4,7 @@ use base64::{decode, encode};
use pulldown_cmark as cmark; use pulldown_cmark as cmark;
use tera::{to_value, Result as TeraResult, Value}; use tera::{to_value, Result as TeraResult, Value};
pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value> { pub fn markdown(value: &Value, args: &HashMap<String, Value>) -> TeraResult<Value> {
let s = try_get_value!("markdown", "value", String, value); let s = try_get_value!("markdown", "value", String, value);
let inline = match args.get("inline") { let inline = match args.get("inline") {
Some(val) => try_get_value!("markdown", "inline", bool, val), Some(val) => try_get_value!("markdown", "inline", bool, val),
@ -21,21 +21,21 @@ pub fn markdown(value: Value, args: HashMap<String, Value>) -> TeraResult<Value>
if inline { if inline {
html = html html = html
.trim_left_matches("<p>") .trim_start_matches("<p>")
// pulldown_cmark finishes a paragraph with `</p>\n` // pulldown_cmark finishes a paragraph with `</p>\n`
.trim_right_matches("</p>\n") .trim_end_matches("</p>\n")
.to_string(); .to_string();
} }
Ok(to_value(&html).unwrap()) Ok(to_value(&html).unwrap())
} }
pub fn base64_encode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> { pub fn base64_encode(value: &Value, _: &HashMap<String, Value>) -> TeraResult<Value> {
let s = try_get_value!("base64_encode", "value", String, value); let s = try_get_value!("base64_encode", "value", String, value);
Ok(to_value(&encode(s.as_bytes())).unwrap()) Ok(to_value(&encode(s.as_bytes())).unwrap())
} }
pub fn base64_decode(value: Value, _: HashMap<String, Value>) -> TeraResult<Value> { pub fn base64_decode(value: &Value, _: &HashMap<String, Value>) -> TeraResult<Value> {
let s = try_get_value!("base64_decode", "value", String, value); let s = try_get_value!("base64_decode", "value", String, value);
Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap()) Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap())
} }
@ -50,7 +50,7 @@ mod tests {
#[test] #[test]
fn markdown_filter() { fn markdown_filter() {
let result = markdown(to_value(&"# Hey").unwrap(), HashMap::new()); let result = markdown(&to_value(&"# Hey").unwrap(), &HashMap::new());
assert!(result.is_ok()); assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(&"<h1>Hey</h1>\n").unwrap()); assert_eq!(result.unwrap(), to_value(&"<h1>Hey</h1>\n").unwrap());
} }
@ -60,8 +60,8 @@ mod tests {
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("inline".to_string(), to_value(true).unwrap()); args.insert("inline".to_string(), to_value(true).unwrap());
let result = markdown( let result = markdown(
to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(), &to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
args, &args,
); );
assert!(result.is_ok()); assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap()); assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap());
@ -73,7 +73,7 @@ mod tests {
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("inline".to_string(), to_value(true).unwrap()); args.insert("inline".to_string(), to_value(true).unwrap());
let result = markdown( let result = markdown(
to_value( &to_value(
&r#" &r#"
|id|author_id| timestamp_created|title |content | |id|author_id| timestamp_created|title |content |
|-:|--------:|-----------------------:|:---------------------|:-----------------| |-:|--------:|-----------------------:|:---------------------|:-----------------|
@ -82,7 +82,7 @@ mod tests {
"#, "#,
) )
.unwrap(), .unwrap(),
args, &args,
); );
assert!(result.is_ok()); assert!(result.is_ok());
assert!(result.unwrap().as_str().unwrap().contains("<table>")); assert!(result.unwrap().as_str().unwrap().contains("<table>"));
@ -102,7 +102,7 @@ mod tests {
]; ];
for (input, expected) in tests { for (input, expected) in tests {
let args = HashMap::new(); let args = HashMap::new();
let result = base64_encode(to_value(input).unwrap(), args); let result = base64_encode(&to_value(input).unwrap(), &args);
assert!(result.is_ok()); assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(expected).unwrap()); assert_eq!(result.unwrap(), to_value(expected).unwrap());
} }
@ -121,7 +121,7 @@ mod tests {
]; ];
for (input, expected) in tests { for (input, expected) in tests {
let args = HashMap::new(); let args = HashMap::new();
let result = base64_decode(to_value(input).unwrap(), args); let result = base64_decode(&to_value(input).unwrap(), &args);
assert!(result.is_ok()); assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(expected).unwrap()); assert_eq!(result.unwrap(), to_value(expected).unwrap());
} }

View file

@ -16,7 +16,7 @@ use std::sync::{Arc, Mutex};
use csv::Reader; use csv::Reader;
use std::collections::HashMap; use std::collections::HashMap;
use tera::{from_value, to_value, Error, GlobalFn, Map, Result, Value}; use tera::{from_value, to_value, Error, Function as TeraFn, Map, Result, Value};
static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str = static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str =
"`load_data`: requires EITHER a `path` or `url` argument"; "`load_data`: requires EITHER a `path` or `url` argument";
@ -151,47 +151,56 @@ fn get_output_format_from_args(
let format_arg = optional_arg!( let format_arg = optional_arg!(
String, String,
args.get("format"), args.get("format"),
"`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml)" "`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml, plain)"
); );
if let Some(format) = format_arg { if let Some(format) = format_arg {
if format == "plain" {
return Ok(OutputFormat::Plain);
}
return OutputFormat::from_str(&format); return OutputFormat::from_str(&format);
} }
let from_extension = if let DataSource::Path(path) = data_source { let from_extension = if let DataSource::Path(path) = data_source {
let extension_result: Result<&str> = path.extension().map(|extension| extension.to_str().unwrap()).unwrap_or_else(|| "plain")
path.extension().map(|extension| extension.to_str().unwrap()).ok_or_else(|| {
format!("Could not determine format for {} from extension", path.display()).into()
});
extension_result?
} else { } else {
"plain" "plain"
}; };
OutputFormat::from_str(from_extension)
// Always default to Plain if we don't know what it is
OutputFormat::from_str(from_extension).or_else(|_| Ok(OutputFormat::Plain))
} }
/// A global function to load data from a file or from a URL /// A Tera function to load data from a file or from a URL
/// Currently the supported formats are json, toml, csv and plain text /// Currently the supported formats are json, toml, csv and plain text
pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn { #[derive(Debug)]
let mut headers = header::HeaderMap::new(); pub struct LoadData {
headers.insert(header::USER_AGENT, "zola".parse().unwrap()); base_path: PathBuf,
let client = Arc::new(Mutex::new(Client::builder().build().expect("reqwest client build"))); client: Arc<Mutex<Client>>,
let result_cache: Arc<Mutex<HashMap<u64, Value>>> = Arc::new(Mutex::new(HashMap::new())); result_cache: Arc<Mutex<HashMap<u64, Value>>>,
Box::new(move |args| -> Result<Value> { }
let data_source = get_data_source_from_args(&content_path, &args)?; impl LoadData {
pub fn new(base_path: PathBuf) -> Self {
let client = Arc::new(Mutex::new(Client::builder().build().expect("reqwest client build")));
let result_cache = Arc::new(Mutex::new(HashMap::new()));
Self { base_path, client, result_cache }
}
}
impl TeraFn for LoadData {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let data_source = get_data_source_from_args(&self.base_path, &args)?;
let file_format = get_output_format_from_args(&args, &data_source)?; let file_format = get_output_format_from_args(&args, &data_source)?;
let cache_key = data_source.get_cache_key(&file_format); let cache_key = data_source.get_cache_key(&file_format);
let mut cache = result_cache.lock().expect("result cache lock"); let mut cache = self.result_cache.lock().expect("result cache lock");
let response_client = client.lock().expect("response client lock"); let response_client = self.client.lock().expect("response client lock");
if let Some(cached_result) = cache.get(&cache_key) { if let Some(cached_result) = cache.get(&cache_key) {
return Ok(cached_result.clone()); return Ok(cached_result.clone());
} }
let data = match data_source { let data = match data_source {
DataSource::Path(path) => read_data_file(&base_path, path), DataSource::Path(path) => read_data_file(&self.base_path, path),
DataSource::Url(url) => { DataSource::Url(url) => {
let mut response = response_client let mut response = response_client
.get(url.as_str()) .get(url.as_str())
@ -223,7 +232,7 @@ pub fn make_load_data(content_path: PathBuf, base_path: PathBuf) -> GlobalFn {
} }
result_value result_value
}) }
} }
/// Parse a JSON string and convert it to a Tera Value /// Parse a JSON string and convert it to a Tera Value
@ -282,7 +291,16 @@ fn load_csv(csv_data: String) -> Result<Value> {
let mut records_array: Vec<Value> = Vec::new(); let mut records_array: Vec<Value> = Vec::new();
for result in records { for result in records {
let record = result.unwrap(); let record = match result {
Ok(r) => r,
Err(e) => {
return Err(tera::Error::chain(
String::from("Error encountered when parsing csv records"),
e,
));
}
};
let mut elements_array: Vec<Value> = Vec::new(); let mut elements_array: Vec<Value> = Vec::new();
for e in record.into_iter() { for e in record.into_iter() {
@ -301,12 +319,12 @@ fn load_csv(csv_data: String) -> Result<Value> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{make_load_data, DataSource, OutputFormat}; use super::{DataSource, LoadData, OutputFormat};
use std::collections::HashMap; use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use tera::to_value; use tera::{to_value, Function};
fn get_test_file(filename: &str) -> PathBuf { fn get_test_file(filename: &str) -> PathBuf {
let test_files = PathBuf::from("../utils/test-files").canonicalize().unwrap(); let test_files = PathBuf::from("../utils/test-files").canonicalize().unwrap();
@ -315,27 +333,25 @@ mod tests {
#[test] #[test]
fn fails_when_missing_file() { fn fails_when_missing_file() {
let static_fn = let static_fn = LoadData::new(PathBuf::from("../utils"));
make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils"));
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap()); args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
let result = static_fn(args); let result = static_fn.call(&args);
assert!(result.is_err()); assert!(result.is_err());
assert!(result.unwrap_err().description().contains("READMEE.md doesn't exist")); assert!(result.unwrap_err().to_string().contains("READMEE.md doesn't exist"));
} }
#[test] #[test]
fn cant_load_outside_content_dir() { fn cant_load_outside_content_dir() {
let static_fn = let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")));
make_load_data(PathBuf::from("../utils/test-files"), PathBuf::from("../utils"));
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("../../../README.md").unwrap()); args.insert("path".to_string(), to_value("../../README.md").unwrap());
args.insert("format".to_string(), to_value("plain").unwrap()); args.insert("format".to_string(), to_value("plain").unwrap());
let result = static_fn(args); let result = static_fn.call(&args);
assert!(result.is_err()); assert!(result.is_err());
assert!(result assert!(result
.unwrap_err() .unwrap_err()
.description() .to_string()
.contains("README.md is not inside the base site directory")); .contains("README.md is not inside the base site directory"));
} }
@ -377,11 +393,11 @@ mod tests {
#[test] #[test]
fn can_load_remote_data() { fn can_load_remote_data() {
let static_fn = make_load_data(PathBuf::new(), PathBuf::new()); let static_fn = LoadData::new(PathBuf::new());
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("url".to_string(), to_value("https://httpbin.org/json").unwrap()); args.insert("url".to_string(), to_value("https://httpbin.org/json").unwrap());
args.insert("format".to_string(), to_value("json").unwrap()); args.insert("format".to_string(), to_value("json").unwrap());
let result = static_fn(args).unwrap(); let result = static_fn.call(&args).unwrap();
assert_eq!( assert_eq!(
result.get("slideshow").unwrap().get("title").unwrap(), result.get("slideshow").unwrap().get("title").unwrap(),
&to_value("Sample Slide Show").unwrap() &to_value("Sample Slide Show").unwrap()
@ -390,29 +406,26 @@ mod tests {
#[test] #[test]
fn fails_when_request_404s() { fn fails_when_request_404s() {
let static_fn = make_load_data(PathBuf::new(), PathBuf::new()); let static_fn = LoadData::new(PathBuf::new());
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("url".to_string(), to_value("https://httpbin.org/status/404/").unwrap()); args.insert("url".to_string(), to_value("https://httpbin.org/status/404/").unwrap());
args.insert("format".to_string(), to_value("json").unwrap()); args.insert("format".to_string(), to_value("json").unwrap());
let result = static_fn(args); let result = static_fn.call(&args);
assert!(result.is_err()); assert!(result.is_err());
assert_eq!( assert_eq!(
result.unwrap_err().description(), result.unwrap_err().to_string(),
"Failed to request https://httpbin.org/status/404/: 404 Not Found" "Failed to request https://httpbin.org/status/404/: 404 Not Found"
); );
} }
#[test] #[test]
fn can_load_toml() { fn can_load_toml() {
let static_fn = make_load_data( let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
PathBuf::from("../utils/test-files"),
PathBuf::from("../utils/test-files"),
);
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("test.toml").unwrap()); args.insert("path".to_string(), to_value("test.toml").unwrap());
let result = static_fn(args.clone()).unwrap(); let result = static_fn.call(&args.clone()).unwrap();
//TOML does not load in order // TOML does not load in order
assert_eq!( assert_eq!(
result, result,
json!({ json!({
@ -425,14 +438,52 @@ mod tests {
} }
#[test] #[test]
fn can_load_csv() { fn unknown_extension_defaults_to_plain() {
let static_fn = make_load_data( let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
PathBuf::from("../utils/test-files"), let mut args = HashMap::new();
PathBuf::from("../utils/test-files"), args.insert("path".to_string(), to_value("test.css").unwrap());
let result = static_fn.call(&args.clone()).unwrap();
assert_eq!(
result,
".hello {}\n",
); );
}
#[test]
fn can_override_known_extension_with_format() {
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("test.csv").unwrap()); args.insert("path".to_string(), to_value("test.csv").unwrap());
let result = static_fn(args.clone()).unwrap(); args.insert("format".to_string(), to_value("plain").unwrap());
let result = static_fn.call(&args.clone()).unwrap();
assert_eq!(
result,
"Number,Title\n1,Gutenberg\n2,Printing",
);
}
#[test]
fn will_use_format_on_unknown_extension() {
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
let mut args = HashMap::new();
args.insert("path".to_string(), to_value("test.css").unwrap());
args.insert("format".to_string(), to_value("plain").unwrap());
let result = static_fn.call(&args.clone()).unwrap();
assert_eq!(
result,
".hello {}\n",
);
}
#[test]
fn can_load_csv() {
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
let mut args = HashMap::new();
args.insert("path".to_string(), to_value("test.csv").unwrap());
let result = static_fn.call(&args.clone()).unwrap();
assert_eq!( assert_eq!(
result, result,
@ -446,15 +497,33 @@ mod tests {
) )
} }
// Test points to bad csv file with uneven row lengths
#[test]
fn bad_csv_should_result_in_error() {
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
let mut args = HashMap::new();
args.insert("path".to_string(), to_value("uneven_rows.csv").unwrap());
let result = static_fn.call(&args.clone());
assert!(result.is_err());
let error_kind = result.err().unwrap().kind;
match error_kind {
tera::ErrorKind::Msg(msg) => {
if msg != String::from("Error encountered when parsing csv records") {
panic!("Error message is wrong. Perhaps wrong error is being returned?");
}
}
_ => panic!("Error encountered was not expected CSV error"),
}
}
#[test] #[test]
fn can_load_json() { fn can_load_json() {
let static_fn = make_load_data( let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
PathBuf::from("../utils/test-files"),
PathBuf::from("../utils/test-files"),
);
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("test.json").unwrap()); args.insert("path".to_string(), to_value("test.json").unwrap());
let result = static_fn(args.clone()).unwrap(); let result = static_fn.call(&args.clone()).unwrap();
assert_eq!( assert_eq!(
result, result,

View file

@ -1,9 +1,8 @@
extern crate error_chain;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, Mutex}; use std::path::PathBuf;
use std::sync::{Arc, Mutex, RwLock};
use tera::{from_value, to_value, GlobalFn, Result, Value}; use tera::{from_value, to_value, Function as TeraFn, Result, Value};
use config::Config; use config::Config;
use library::{Library, Taxonomy}; use library::{Library, Taxonomy};
@ -16,82 +15,39 @@ mod macros;
mod load_data; mod load_data;
pub use self::load_data::make_load_data; pub use self::load_data::LoadData;
pub fn make_trans(config: Config) -> GlobalFn { #[derive(Debug)]
let translations_config = config.translations; pub struct Trans {
let default_lang = config.default_language.clone(); config: Config,
}
Box::new(move |args| -> Result<Value> { impl Trans {
pub fn new(config: Config) -> Self {
Self { config }
}
}
impl TeraFn for Trans {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument."); let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument.");
let lang = optional_arg!(String, args.get("lang"), "`trans`: `lang` must be a string.") let lang = optional_arg!(String, args.get("lang"), "`trans`: `lang` must be a string.")
.unwrap_or_else(|| default_lang.clone()); .unwrap_or_else(|| self.config.default_language.clone());
let translations = &translations_config[lang.as_str()]; let translations = &self.config.translations[lang.as_str()];
Ok(to_value(&translations[key.as_str()]).unwrap()) Ok(to_value(&translations[key.as_str()]).unwrap())
})
}
pub fn make_get_page(library: &Library) -> GlobalFn {
let mut pages = HashMap::new();
for page in library.pages_values() {
pages.insert(
page.file.relative.clone(),
to_value(library.get_page(&page.file.path).unwrap().to_serialized(library)).unwrap(),
);
} }
Box::new(move |args| -> Result<Value> {
let path = required_arg!(
String,
args.get("path"),
"`get_page` requires a `path` argument with a string value"
);
match pages.get(&path) {
Some(p) => Ok(p.clone()),
None => Err(format!("Page `{}` not found.", path).into()),
}
})
} }
pub fn make_get_section(library: &Library) -> GlobalFn { #[derive(Debug)]
let mut sections = HashMap::new(); pub struct GetUrl {
let mut sections_basic = HashMap::new(); config: Config,
for section in library.sections_values() { permalinks: HashMap<String, String>,
sections.insert( }
section.file.relative.clone(), impl GetUrl {
to_value(library.get_section(&section.file.path).unwrap().to_serialized(library)) pub fn new(config: Config, permalinks: HashMap<String, String>) -> Self {
.unwrap(), Self { config, permalinks }
);
sections_basic.insert(
section.file.relative.clone(),
to_value(library.get_section(&section.file.path).unwrap().to_serialized_basic(library))
.unwrap(),
);
} }
Box::new(move |args| -> Result<Value> {
let path = required_arg!(
String,
args.get("path"),
"`get_section` requires a `path` argument with a string value"
);
let metadata_only = args
.get("metadata_only")
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
let container = if metadata_only { &sections_basic } else { &sections };
match container.get(&path) {
Some(p) => Ok(p.clone()),
None => Err(format!("Section `{}` not found.", path).into()),
}
})
} }
impl TeraFn for GetUrl {
pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> GlobalFn { fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
Box::new(move |args| -> Result<Value> {
let cachebust = let cachebust =
args.get("cachebust").map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false)); args.get("cachebust").map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
@ -105,7 +61,7 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob
"`get_url` requires a `path` argument with a string value" "`get_url` requires a `path` argument with a string value"
); );
if path.starts_with("./") { if path.starts_with("./") {
match resolve_internal_link(&path, &permalinks) { match resolve_internal_link(&path, &self.permalinks) {
Ok(url) => Ok(to_value(url).unwrap()), Ok(url) => Ok(to_value(url).unwrap()),
Err(_) => { Err(_) => {
Err(format!("Could not resolve URL for link `{}` not found.", path).into()) Err(format!("Could not resolve URL for link `{}` not found.", path).into())
@ -113,93 +69,35 @@ pub fn make_get_url(permalinks: HashMap<String, String>, config: Config) -> Glob
} }
} else { } else {
// anything else // anything else
let mut permalink = config.make_permalink(&path); let mut permalink = self.config.make_permalink(&path);
if !trailing_slash && permalink.ends_with('/') { if !trailing_slash && permalink.ends_with('/') {
permalink.pop(); // Removes the slash permalink.pop(); // Removes the slash
} }
if cachebust { if cachebust {
permalink = format!("{}?t={}", permalink, config.build_timestamp.unwrap()); permalink = format!("{}?t={}", permalink, self.config.build_timestamp.unwrap());
} }
Ok(to_value(permalink).unwrap()) Ok(to_value(permalink).unwrap())
} }
})
}
pub fn make_get_taxonomy(all_taxonomies: &[Taxonomy], library: &Library) -> GlobalFn {
let mut taxonomies = HashMap::new();
for taxonomy in all_taxonomies {
taxonomies
.insert(taxonomy.kind.name.clone(), to_value(taxonomy.to_serialized(library)).unwrap());
} }
Box::new(move |args| -> Result<Value> {
let kind = required_arg!(
String,
args.get("kind"),
"`get_taxonomy` requires a `kind` argument with a string value"
);
let container = match taxonomies.get(&kind) {
Some(c) => c,
None => {
return Err(format!(
"`get_taxonomy` received an unknown taxonomy as kind: {}",
kind
)
.into());
}
};
Ok(to_value(container).unwrap())
})
} }
pub fn make_get_taxonomy_url(all_taxonomies: &[Taxonomy]) -> GlobalFn { #[derive(Debug)]
let mut taxonomies = HashMap::new(); pub struct ResizeImage {
for taxonomy in all_taxonomies { imageproc: Arc<Mutex<imageproc::Processor>>,
let mut items = HashMap::new(); }
for item in &taxonomy.items { impl ResizeImage {
items.insert(item.name.clone(), item.permalink.clone()); pub fn new(imageproc: Arc<Mutex<imageproc::Processor>>) -> Self {
} Self { imageproc }
taxonomies.insert(taxonomy.kind.name.clone(), items);
} }
Box::new(move |args| -> Result<Value> {
let kind = required_arg!(
String,
args.get("kind"),
"`get_taxonomy_url` requires a `kind` argument with a string value"
);
let name = required_arg!(
String,
args.get("name"),
"`get_taxonomy_url` requires a `name` argument with a string value"
);
let container = match taxonomies.get(&kind) {
Some(c) => c,
None => {
return Err(format!(
"`get_taxonomy_url` received an unknown taxonomy as kind: {}",
kind
)
.into());
}
};
if let Some(permalink) = container.get(&name) {
return Ok(to_value(permalink).unwrap());
}
Err(format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into())
})
} }
pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalFn { static DEFAULT_OP: &'static str = "fill";
static DEFAULT_OP: &'static str = "fill"; static DEFAULT_FMT: &'static str = "auto";
static DEFAULT_FMT: &'static str = "auto"; const DEFAULT_Q: u8 = 75;
const DEFAULT_Q: u8 = 75;
Box::new(move |args| -> Result<Value> { impl TeraFn for ResizeImage {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let path = required_arg!( let path = required_arg!(
String, String,
args.get("path"), args.get("path"),
@ -229,7 +127,7 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into()); return Err("`resize_image`: `quality` must be in range 1-100".to_string().into());
} }
let mut imageproc = imageproc.lock().unwrap(); let mut imageproc = self.imageproc.lock().unwrap();
if !imageproc.source_exists(&path) { if !imageproc.source_exists(&path) {
return Err(format!("`resize_image`: Cannot find path: {}", path).into()); return Err(format!("`resize_image`: Cannot find path: {}", path).into());
} }
@ -239,16 +137,160 @@ pub fn make_resize_image(imageproc: Arc<Mutex<imageproc::Processor>>) -> GlobalF
let url = imageproc.insert(imageop); let url = imageproc.insert(imageop);
to_value(url).map_err(|err| err.into()) to_value(url).map_err(|err| err.into())
}) }
}
#[derive(Debug)]
pub struct GetTaxonomyUrl {
taxonomies: HashMap<String, HashMap<String, String>>,
}
impl GetTaxonomyUrl {
pub fn new(all_taxonomies: &[Taxonomy]) -> Self {
let mut taxonomies = HashMap::new();
for taxonomy in all_taxonomies {
let mut items = HashMap::new();
for item in &taxonomy.items {
items.insert(item.name.clone(), item.permalink.clone());
}
taxonomies.insert(taxonomy.kind.name.clone(), items);
}
Self { taxonomies }
}
}
impl TeraFn for GetTaxonomyUrl {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let kind = required_arg!(
String,
args.get("kind"),
"`get_taxonomy_url` requires a `kind` argument with a string value"
);
let name = required_arg!(
String,
args.get("name"),
"`get_taxonomy_url` requires a `name` argument with a string value"
);
let container = match self.taxonomies.get(&kind) {
Some(c) => c,
None => {
return Err(format!(
"`get_taxonomy_url` received an unknown taxonomy as kind: {}",
kind
)
.into());
}
};
if let Some(permalink) = container.get(&name) {
return Ok(to_value(permalink).unwrap());
}
Err(format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into())
}
}
#[derive(Debug)]
pub struct GetPage {
base_path: PathBuf,
library: Arc<RwLock<Library>>,
}
impl GetPage {
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
Self { base_path: base_path.join("content"), library }
}
}
impl TeraFn for GetPage {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let path = required_arg!(
String,
args.get("path"),
"`get_page` requires a `path` argument with a string value"
);
let full_path = self.base_path.join(&path);
let library = self.library.read().unwrap();
match library.get_page(&full_path) {
Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()),
None => Err(format!("Page `{}` not found.", path).into()),
}
}
}
#[derive(Debug)]
pub struct GetSection {
base_path: PathBuf,
library: Arc<RwLock<Library>>,
}
impl GetSection {
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
Self { base_path: base_path.join("content"), library }
}
}
impl TeraFn for GetSection {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let path = required_arg!(
String,
args.get("path"),
"`get_section` requires a `path` argument with a string value"
);
let metadata_only = args
.get("metadata_only")
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
let full_path = self.base_path.join(&path);
let library = self.library.read().unwrap();
match library.get_section(&full_path) {
Some(s) => {
if metadata_only {
Ok(to_value(s.to_serialized_basic(&library)).unwrap())
} else {
Ok(to_value(s.to_serialized(&library)).unwrap())
}
}
None => Err(format!("Section `{}` not found.", path).into()),
}
}
}
#[derive(Debug)]
pub struct GetTaxonomy {
library: Arc<RwLock<Library>>,
taxonomies: HashMap<String, Taxonomy>,
}
impl GetTaxonomy {
pub fn new(all_taxonomies: Vec<Taxonomy>, library: Arc<RwLock<Library>>) -> Self {
let mut taxonomies = HashMap::new();
for taxo in all_taxonomies {
taxonomies.insert(taxo.kind.name.clone(), taxo);
}
Self { taxonomies, library }
}
}
impl TeraFn for GetTaxonomy {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let kind = required_arg!(
String,
args.get("kind"),
"`get_taxonomy` requires a `kind` argument with a string value"
);
match self.taxonomies.get(&kind) {
Some(t) => Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap()),
None => {
Err(format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into())
}
}
}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{make_get_taxonomy, make_get_taxonomy_url, make_get_url, make_trans}; use super::{GetTaxonomy, GetTaxonomyUrl, GetUrl, Trans};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use tera::{to_value, Value}; use tera::{to_value, Function, Value};
use config::{Config, Taxonomy as TaxonomyConfig}; use config::{Config, Taxonomy as TaxonomyConfig};
use library::{Library, Taxonomy, TaxonomyItem}; use library::{Library, Taxonomy, TaxonomyItem};
@ -256,56 +298,67 @@ mod tests {
#[test] #[test]
fn can_add_cachebust_to_url() { fn can_add_cachebust_to_url() {
let config = Config::default(); let config = Config::default();
let static_fn = make_get_url(HashMap::new(), config); let static_fn = GetUrl::new(config, HashMap::new());
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("app.css").unwrap()); args.insert("path".to_string(), to_value("app.css").unwrap());
args.insert("cachebust".to_string(), to_value(true).unwrap()); args.insert("cachebust".to_string(), to_value(true).unwrap());
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css?t=1"); assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css?t=1");
} }
#[test] #[test]
fn can_add_trailing_slashes() { fn can_add_trailing_slashes() {
let config = Config::default(); let config = Config::default();
let static_fn = make_get_url(HashMap::new(), config); let static_fn = GetUrl::new(config, HashMap::new());
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("app.css").unwrap()); args.insert("path".to_string(), to_value("app.css").unwrap());
args.insert("trailing_slash".to_string(), to_value(true).unwrap()); args.insert("trailing_slash".to_string(), to_value(true).unwrap());
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css/"); assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css/");
} }
#[test] #[test]
fn can_add_slashes_and_cachebust() { fn can_add_slashes_and_cachebust() {
let config = Config::default(); let config = Config::default();
let static_fn = make_get_url(HashMap::new(), config); let static_fn = GetUrl::new(config, HashMap::new());
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("app.css").unwrap()); args.insert("path".to_string(), to_value("app.css").unwrap());
args.insert("trailing_slash".to_string(), to_value(true).unwrap()); args.insert("trailing_slash".to_string(), to_value(true).unwrap());
args.insert("cachebust".to_string(), to_value(true).unwrap()); args.insert("cachebust".to_string(), to_value(true).unwrap());
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css/?t=1"); assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css/?t=1");
} }
#[test] #[test]
fn can_link_to_some_static_file() { fn can_link_to_some_static_file() {
let config = Config::default(); let config = Config::default();
let static_fn = make_get_url(HashMap::new(), config); let static_fn = GetUrl::new(config, HashMap::new());
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("path".to_string(), to_value("app.css").unwrap()); args.insert("path".to_string(), to_value("app.css").unwrap());
assert_eq!(static_fn(args).unwrap(), "http://a-website.com/app.css"); assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css");
} }
#[test] #[test]
fn can_get_taxonomy() { fn can_get_taxonomy() {
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; let config = Config::default();
let library = Library::new(0, 0, false); let taxo_config = TaxonomyConfig {
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
};
let library = Arc::new(RwLock::new(Library::new(0, 0, false)));
let tag = TaxonomyItem::new(
"Programming",
&taxo_config,
&config,
vec![],
&library.read().unwrap(),
);
let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; let tags = Taxonomy { kind: taxo_config, items: vec![tag] };
let taxonomies = vec![tags.clone()]; let taxonomies = vec![tags.clone()];
let static_fn = make_get_taxonomy(&taxonomies, &library); let static_fn = GetTaxonomy::new(taxonomies.clone(), library.clone());
// can find it correctly // can find it correctly
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("kind".to_string(), to_value("tags").unwrap()); args.insert("kind".to_string(), to_value("tags").unwrap());
let res = static_fn(args).unwrap(); let res = static_fn.call(&args).unwrap();
let res_obj = res.as_object().unwrap(); let res_obj = res.as_object().unwrap();
assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap()); assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap());
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1); assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1);
@ -329,31 +382,36 @@ mod tests {
// and errors if it can't find it // and errors if it can't find it
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("kind".to_string(), to_value("something-else").unwrap()); args.insert("kind".to_string(), to_value("something-else").unwrap());
assert!(static_fn(args).is_err()); assert!(static_fn.call(&args).is_err());
} }
#[test] #[test]
fn can_get_taxonomy_url() { fn can_get_taxonomy_url() {
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() }; let config = Config::default();
let taxo_config = TaxonomyConfig {
name: "tags".to_string(),
lang: config.default_language.clone(),
..TaxonomyConfig::default()
};
let library = Library::new(0, 0, false); let library = Library::new(0, 0, false);
let tag = TaxonomyItem::new("Programming", "tags", &Config::default(), vec![], &library); let tag = TaxonomyItem::new("Programming", &taxo_config, &config, vec![], &library);
let tags = Taxonomy { kind: taxo_config, items: vec![tag] }; let tags = Taxonomy { kind: taxo_config, items: vec![tag] };
let taxonomies = vec![tags.clone()]; let taxonomies = vec![tags.clone()];
let static_fn = make_get_taxonomy_url(&taxonomies); let static_fn = GetTaxonomyUrl::new(&taxonomies);
// can find it correctly // can find it correctly
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("kind".to_string(), to_value("tags").unwrap()); args.insert("kind".to_string(), to_value("tags").unwrap());
args.insert("name".to_string(), to_value("Programming").unwrap()); args.insert("name".to_string(), to_value("Programming").unwrap());
assert_eq!( assert_eq!(
static_fn(args).unwrap(), static_fn.call(&args).unwrap(),
to_value("http://a-website.com/tags/programming/").unwrap() to_value("http://a-website.com/tags/programming/").unwrap()
); );
// and errors if it can't find it // and errors if it can't find it
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("kind".to_string(), to_value("tags").unwrap()); args.insert("kind".to_string(), to_value("tags").unwrap());
args.insert("name".to_string(), to_value("random").unwrap()); args.insert("name".to_string(), to_value("random").unwrap());
assert!(static_fn(args).is_err()); assert!(static_fn.call(&args).is_err());
} }
#[test] #[test]
@ -372,16 +430,16 @@ title = "A title"
"#; "#;
let config = Config::parse(trans_config).unwrap(); let config = Config::parse(trans_config).unwrap();
let static_fn = make_trans(config); let static_fn = Trans::new(config);
let mut args = HashMap::new(); let mut args = HashMap::new();
args.insert("key".to_string(), to_value("title").unwrap()); args.insert("key".to_string(), to_value("title").unwrap());
assert_eq!(static_fn(args.clone()).unwrap(), "Un titre"); assert_eq!(static_fn.call(&args).unwrap(), "Un titre");
args.insert("lang".to_string(), to_value("en").unwrap()); args.insert("lang".to_string(), to_value("en").unwrap());
assert_eq!(static_fn(args.clone()).unwrap(), "A title"); assert_eq!(static_fn.call(&args).unwrap(), "A title");
args.insert("lang".to_string(), to_value("fr").unwrap()); args.insert("lang".to_string(), to_value("fr").unwrap());
assert_eq!(static_fn(args.clone()).unwrap(), "Un titre"); assert_eq!(static_fn.call(&args).unwrap(), "Un titre");
} }
} }

View file

@ -25,21 +25,34 @@ pub mod global_fns;
use tera::{Context, Tera}; use tera::{Context, Tera};
use errors::{Result, ResultExt}; use errors::{Error, Result};
lazy_static! { lazy_static! {
pub static ref ZOLA_TERA: Tera = { pub static ref ZOLA_TERA: Tera = {
let mut tera = Tera::default(); let mut tera = Tera::default();
tera.add_raw_templates(vec![ tera.add_raw_templates(vec![
("404.html", include_str!("builtins/404.html")), ("__zola_builtins/404.html", include_str!("builtins/404.html")),
("rss.xml", include_str!("builtins/rss.xml")), ("__zola_builtins/rss.xml", include_str!("builtins/rss.xml")),
("sitemap.xml", include_str!("builtins/sitemap.xml")), ("__zola_builtins/sitemap.xml", include_str!("builtins/sitemap.xml")),
("robots.txt", include_str!("builtins/robots.txt")), ("__zola_builtins/robots.txt", include_str!("builtins/robots.txt")),
("anchor-link.html", include_str!("builtins/anchor-link.html")), (
("shortcodes/youtube.html", include_str!("builtins/shortcodes/youtube.html")), "__zola_builtins/split_sitemap_index.xml",
("shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")), include_str!("builtins/split_sitemap_index.xml"),
("shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")), ),
("shortcodes/streamable.html", include_str!("builtins/shortcodes/streamable.html")), ("__zola_builtins/anchor-link.html", include_str!("builtins/anchor-link.html")),
(
"__zola_builtins/shortcodes/youtube.html",
include_str!("builtins/shortcodes/youtube.html"),
),
(
"__zola_builtins/shortcodes/vimeo.html",
include_str!("builtins/shortcodes/vimeo.html"),
),
("__zola_builtins/shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")),
(
"__zola_builtins/shortcodes/streamable.html",
include_str!("builtins/shortcodes/streamable.html"),
),
("internal/alias.html", include_str!("builtins/internal/alias.html")), ("internal/alias.html", include_str!("builtins/internal/alias.html")),
]) ])
.unwrap(); .unwrap();
@ -56,6 +69,6 @@ pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> {
let mut context = Context::new(); let mut context = Context::new();
context.insert("url", &url); context.insert("url", &url);
tera.render("internal/alias.html", &context) tera.render("internal/alias.html", context)
.chain_err(|| format!("Failed to render alias for '{}'", url)) .map_err(|e| Error::chain(format!("Failed to render alias for '{}'", url), e))
} }

View file

@ -5,7 +5,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
[dependencies] [dependencies]
errors = { path = "../errors" } errors = { path = "../errors" }
tera = "0.11" tera = "1.0.0-alpha.3"
unicode-segmentation = "1.2" unicode-segmentation = "1.2"
walkdir = "2" walkdir = "2"
toml = "0.4" toml = "0.4"

View file

@ -4,7 +4,7 @@ use std::path::{Path, PathBuf};
use std::time::SystemTime; use std::time::SystemTime;
use walkdir::WalkDir; use walkdir::WalkDir;
use errors::{Result, ResultExt}; use errors::{Error, Result};
pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> { pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
let canonical_path = path let canonical_path = path
@ -19,7 +19,8 @@ pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
/// Create a file with the content given /// Create a file with the content given
pub fn create_file(path: &Path, content: &str) -> Result<()> { pub fn create_file(path: &Path, content: &str) -> Result<()> {
let mut file = File::create(&path).chain_err(|| format!("Failed to create {:?}", path))?; let mut file =
File::create(&path).map_err(|e| Error::chain(format!("Failed to create {:?}", path), e))?;
file.write_all(content.as_bytes())?; file.write_all(content.as_bytes())?;
Ok(()) Ok(())
} }
@ -36,8 +37,9 @@ pub fn ensure_directory_exists(path: &Path) -> Result<()> {
/// exists before creating it /// exists before creating it
pub fn create_directory(path: &Path) -> Result<()> { pub fn create_directory(path: &Path) -> Result<()> {
if !path.exists() { if !path.exists() {
create_dir_all(path) create_dir_all(path).map_err(|e| {
.chain_err(|| format!("Was not able to create folder {}", path.display()))?; Error::chain(format!("Was not able to create folder {}", path.display()), e)
})?;
} }
Ok(()) Ok(())
} }
@ -46,7 +48,7 @@ pub fn create_directory(path: &Path) -> Result<()> {
pub fn read_file(path: &Path) -> Result<String> { pub fn read_file(path: &Path) -> Result<String> {
let mut content = String::new(); let mut content = String::new();
File::open(path) File::open(path)
.chain_err(|| format!("Failed to open '{:?}'", path.display()))? .map_err(|e| Error::chain(format!("Failed to open '{:?}'", path.display()), e))?
.read_to_string(&mut content)?; .read_to_string(&mut content)?;
// Remove utf-8 BOM if any. // Remove utf-8 BOM if any.
@ -57,6 +59,19 @@ pub fn read_file(path: &Path) -> Result<String> {
Ok(content) Ok(content)
} }
/// Return the content of a file, with error handling added.
/// The default error message is overwritten by the message given.
/// That means it is allocation 2 strings, oh well
pub fn read_file_with_error(path: &Path, message: &str) -> Result<String> {
let res = read_file(&path);
if res.is_ok() {
return res;
}
let mut err = Error::msg(message);
err.source = res.unwrap_err().source;
Err(err)
}
/// Looks into the current folder for the path and see if there's anything that is not a .md /// Looks into the current folder for the path and see if there's anything that is not a .md
/// file. Those will be copied next to the rendered .html file /// file. Those will be copied next to the rendered .html file
pub fn find_related_assets(path: &Path) -> Vec<PathBuf> { pub fn find_related_assets(path: &Path) -> Vec<PathBuf> {

View file

@ -14,3 +14,4 @@ pub mod fs;
pub mod net; pub mod net;
pub mod site; pub mod site;
pub mod templates; pub mod templates;
pub mod vec;

View file

@ -11,7 +11,7 @@ macro_rules! render_default_tpl {
let mut context = Context::new(); let mut context = Context::new();
context.insert("filename", $filename); context.insert("filename", $filename);
context.insert("url", $url); context.insert("url", $url);
Tera::one_off(DEFAULT_TPL, &context, true).map_err(|e| e.into()) Tera::one_off(DEFAULT_TPL, context, true).map_err(|e| e.into())
}}; }};
} }
@ -22,15 +22,26 @@ macro_rules! render_default_tpl {
pub fn render_template( pub fn render_template(
name: &str, name: &str,
tera: &Tera, tera: &Tera,
context: &Context, context: Context,
theme: &Option<String>, theme: &Option<String>,
) -> Result<String> { ) -> Result<String> {
// check if it is in the templates
if tera.templates.contains_key(name) { if tera.templates.contains_key(name) {
return tera.render(name, context).map_err(|e| e.into()); return tera.render(name, context).map_err(|e| e.into());
} }
// check if it is part of a theme
if let Some(ref t) = *theme { if let Some(ref t) = *theme {
return tera.render(&format!("{}/templates/{}", t, name), context).map_err(|e| e.into()); let theme_template_name = format!("{}/templates/{}", t, name);
if tera.templates.contains_key(&theme_template_name) {
return tera.render(&theme_template_name, context).map_err(|e| e.into());
}
}
// check if it is part of ZOLA_TERA defaults
let default_name = format!("__zola_builtins/{}", name);
if tera.templates.contains_key(&default_name) {
return tera.render(&default_name, context).map_err(|e| e.into());
} }
// maybe it's a default one? // maybe it's a default one?

View file

@ -0,0 +1,44 @@
pub trait InsertMany {
type Element;
fn insert_many(&mut self, elem_to_insert: Vec<(usize, Self::Element)>);
}
impl<T> InsertMany for Vec<T> {
type Element = T;
/// Efficiently insert multiple element in their specified index.
/// The elements should sorted in ascending order by their index.
///
/// This is done in O(n) time.
fn insert_many(&mut self, elem_to_insert: Vec<(usize, T)>) {
let mut inserted = vec![];
let mut last_idx = 0;
for (idx, elem) in elem_to_insert.into_iter() {
let head_len = idx - last_idx;
inserted.extend(self.splice(0..head_len, std::iter::empty()));
inserted.push(elem);
last_idx = idx;
}
let len = self.len();
inserted.extend(self.drain(0..len));
*self = inserted;
}
}
#[cfg(test)]
mod test {
use super::InsertMany;
#[test]
fn insert_many_works() {
let mut v = vec![1, 2, 3, 4, 5];
v.insert_many(vec![(0, 0), (2, -1), (5, 6)]);
assert_eq!(v, &[0, 1, 2, -1, 3, 4, 5, 6]);
let mut v2 = vec![1, 2, 3, 4, 5];
v2.insert_many(vec![(0, 0), (2, -1)]);
assert_eq!(v2, &[0, 1, 2, -1, 3, 4, 5]);
}
}

View file

@ -0,0 +1 @@
.hello {}

View file

@ -0,0 +1,4 @@
Number,Title
1,Gutenberg
2,Printing
3,Typewriter,ExtraBadColumn
1 Number,Title
2 1,Gutenberg
3 2,Printing
4 3,Typewriter,ExtraBadColumn

View file

@ -16,6 +16,9 @@ languages = [
] ]
``` ```
If you want to use per-language taxonomies, ensure you set the `lang` field in their
configuration.
## Content ## Content
Once the languages are added in, you can start to translate your content. Zola Once the languages are added in, you can start to translate your content. Zola
uses the filename to detect the language: uses the filename to detect the language:

View file

@ -102,6 +102,6 @@ where you want the summary to end and the content up to that point will be also
available separately in the available separately in the
[template](./documentation/templates/pages-sections.md#page-variables). [template](./documentation/templates/pages-sections.md#page-variables).
An anchor link to this position named `continue-reading` is created so you can link An anchor link to this position named `continue-reading` is created, wrapped in a paragraph
directly to it if needed for example: with a `zola-continue-reading` id, so you can link directly to it if needed for example:
`<a href="{{ page.permalink }}#continue-reading">Continue Reading</a>` `<a href="{{ page.permalink }}#continue-reading">Continue Reading</a>`

View file

@ -36,6 +36,10 @@ That's it, Zola will now recognise this template as a shortcode named `youtube`
The markdown renderer will wrap an inline HTML node like `<a>` or `<span>` into a paragraph. If you want to disable that, The markdown renderer will wrap an inline HTML node like `<a>` or `<span>` into a paragraph. If you want to disable that,
simply wrap your shortcode in a `div`. simply wrap your shortcode in a `div`.
Shortcodes are rendered before parsing the markdown so it doesn't have access to the table of contents. Because of that,
you also cannot use the `get_page`/`get_section`/`get_taxonomy` global function. It might work while running `zola serve` because
it has been loaded but it will fail during `zola build`.
## Using shortcodes ## Using shortcodes
There are two kinds of shortcodes: There are two kinds of shortcodes:

View file

@ -105,6 +105,7 @@ Here is a full list of the supported languages and the short names you can use:
- Textile -> ["textile"] - Textile -> ["textile"]
- XML -> ["xml", "xsd", "xslt", "tld", "dtml", "rss", "opml", "svg"] - XML -> ["xml", "xsd", "xslt", "tld", "dtml", "rss", "opml", "svg"]
- YAML -> ["yaml", "yml", "sublime-syntax"] - YAML -> ["yaml", "yml", "sublime-syntax"]
- PowerShell -> ["ps1", "psm1", "psd1"]
- SWI-Prolog -> ["pro"] - SWI-Prolog -> ["pro"]
- Reason -> ["re", "rei"] - Reason -> ["re", "rei"]
- CMake C Header -> ["h.in"] - CMake C Header -> ["h.in"]

View file

@ -5,7 +5,7 @@ weight = 60
Each page/section will automatically generate a table of contents for itself based on the headers present. Each page/section will automatically generate a table of contents for itself based on the headers present.
It is available in the template through `section.toc` and `page.toc`. It is available in the template through the `toc` variable.
You can view the [template variables](./documentation/templates/pages-sections.md#table-of-contents) You can view the [template variables](./documentation/templates/pages-sections.md#table-of-contents)
documentation for information on its structure. documentation for information on its structure.
@ -13,7 +13,7 @@ Here is an example of using that field to render a 2-level table of contents:
```jinja2 ```jinja2
<ul> <ul>
{% for h1 in page.toc %} {% for h1 in toc %}
<li> <li>
<a href="{{h1.permalink | safe}}">{{ h1.title }}</a> <a href="{{h1.permalink | safe}}">{{ h1.title }}</a>
{% if h1.children %} {% if h1.children %}

View file

@ -7,13 +7,14 @@ Zola has built-in support for taxonomies.
The first step is to define the taxonomies in your [config.toml](./documentation/getting-started/configuration.md). The first step is to define the taxonomies in your [config.toml](./documentation/getting-started/configuration.md).
A taxonomy has 4 variables: A taxonomy has 5 variables:
- `name`: a required string that will be used in the URLs, usually the plural version (i.e. tags, categories etc) - `name`: a required string that will be used in the URLs, usually the plural version (i.e. tags, categories etc)
- `paginate_by`: if this is set to a number, each term page will be paginated by this much. - `paginate_by`: if this is set to a number, each term page will be paginated by this much.
- `paginate_path`: if set, will be the path used by paginated page and the page number will be appended after it. - `paginate_path`: if set, will be the path used by paginated page and the page number will be appended after it.
For example the default would be page/1 For example the default would be page/1
- `rss`: if set to `true`, a RSS feed will be generated for each individual term. - `rss`: if set to `true`, a RSS feed will be generated for each individual term.
- `lang`: only set this if you are making a multilingual site and want to indicate which language this taxonomy is for
Once this is done, you can then set taxonomies in your content and Zola will pick Once this is done, you can then set taxonomies in your content and Zola will pick
them up: them up:

View file

@ -7,6 +7,13 @@ By default, GitHub Pages uses Jekyll (A ruby based static site generator),
but you can use whatever you want provided you have an `index.html` file in the root of a branch called `gh-pages`. but you can use whatever you want provided you have an `index.html` file in the root of a branch called `gh-pages`.
That branch name can also be manually changed in the settings of a repository. That branch name can also be manually changed in the settings of a repository.
We can use any CI server to build and deploy our site. For example:
* [Github Actions](https://github.com/shalzz/zola-deploy-action)
* [Travis CI](#travis-ci)
## Travis CI
We are going to use [TravisCI](https://travis-ci.org) to automatically publish the site. If you are not using Travis already, We are going to use [TravisCI](https://travis-ci.org) to automatically publish the site. If you are not using Travis already,
you will need to login with the GitHub OAuth and activate Travis for the repository. you will need to login with the GitHub OAuth and activate Travis for the repository.
Don't forget to also check if your repository allows GitHub Pages in its settings. Don't forget to also check if your repository allows GitHub Pages in its settings.

View file

@ -36,6 +36,14 @@ $ zola build --base-url $DEPLOY_URL
This is useful for example when you want to deploy previews of a site to a dynamic URL, such as Netlify This is useful for example when you want to deploy previews of a site to a dynamic URL, such as Netlify
deploy previews. deploy previews.
You can override the default `base_path` by passing a new directory to the `base-path` flag. If no `base-path` flag
is provided, zola defaults to your current working directory. This is useful if your zola project is located in
a different directory from where you're executing zola from.
```bash
$ zola build --base-path /path/to/zola/site
```
You can override the default output directory 'public' by passing a other value to the `output-dir` flag. You can override the default output directory 'public' by passing a other value to the `output-dir` flag.
```bash ```bash
@ -58,6 +66,8 @@ if you are running zola in a Docker container.
In the event you don't want zola to run a local webserver, you can use the `--watch-only` flag. In the event you don't want zola to run a local webserver, you can use the `--watch-only` flag.
Before starting, it will delete the public directory to ensure it starts from a clean slate.
```bash ```bash
$ zola serve $ zola serve
$ zola serve --port 2000 $ zola serve --port 2000
@ -65,6 +75,7 @@ $ zola serve --interface 0.0.0.0
$ zola serve --interface 0.0.0.0 --port 2000 $ zola serve --interface 0.0.0.0 --port 2000
$ zola serve --interface 0.0.0.0 --base-url 127.0.0.1 $ zola serve --interface 0.0.0.0 --base-url 127.0.0.1
$ zola serve --interface 0.0.0.0 --port 2000 --output-dir www/public $ zola serve --interface 0.0.0.0 --port 2000 --output-dir www/public
$ zola serve --interface 0.0.0.0 --port 2000 --base-path mysite/ --output-dir mysite/www/public
$ zola serve --watch-only $ zola serve --watch-only
``` ```

View file

@ -108,6 +108,7 @@ Zola currently has the following highlight themes available:
- [classic-modified](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Classic%20Modified) - [classic-modified](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Classic%20Modified)
- [demain](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Demain) - [demain](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Demain)
- [dimmed-fluid](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Dimmed%20Fluid) - [dimmed-fluid](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Dimmed%20Fluid)
- [dracula](https://draculatheme.com/)
- [gray-matter-dark](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Gray%20Matter%20Dark) - [gray-matter-dark](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Gray%20Matter%20Dark)
- [gruvbox-dark](https://github.com/morhetz/gruvbox) - [gruvbox-dark](https://github.com/morhetz/gruvbox)
- [gruvbox-light](https://github.com/morhetz/gruvbox) - [gruvbox-light](https://github.com/morhetz/gruvbox)

View file

@ -27,7 +27,7 @@ $ yay -S zola-bin
Zola is available on snapcraft: Zola is available on snapcraft:
```bash ```bash
$ snap install --edge --classic zola $ snap install --edge zola
``` ```
## Windows ## Windows
@ -44,11 +44,26 @@ And [Chocolatey](https://chocolatey.org/):
$ choco install zola $ choco install zola
``` ```
Zola does not work in PowerShell ISE.
## From source ## From source
To build it from source, you will need to have Git, [Rust (at least 1.31) and Cargo](https://www.rust-lang.org/) To build it from source, you will need to have Git, [Rust (at least 1.31) and Cargo](https://www.rust-lang.org/)
installed. You will also need additional dependencies to compile [libsass](https://github.com/sass/libsass): installed. You will also need additional dependencies to compile [libsass](https://github.com/sass/libsass):
- OSX, Linux and other Unix: `make` (`gmake` on BSDs), `g++`, `libssl-dev` - OSX, Linux and other Unix: `make` (`gmake` on BSDs), `g++`, `libssl-dev`
- NixOS: Create a `shell.nix` file in the root of the cloned project with the following contents:
```nix
with import <nixpkgs> {};
pkgs.mkShell {
buildInputs = [
libsass
openssl
pkgconfig
];
}
```
- Then invoke `nix-shell`. This opens a shell with the above dependencies. You then run `cargo build --release` to build the project.
- Windows (a bit trickier): updated `MSVC` and overall updated VS installation - Windows (a bit trickier): updated `MSVC` and overall updated VS installation
From a terminal, you can now run the following command: From a terminal, you can now run the following command:

View file

@ -142,38 +142,41 @@ the value should be the same as the one in the front-matter, not the slugified v
Gets the whole taxonomy of a specific kind. Gets the whole taxonomy of a specific kind.
```jinja2 ```jinja2
{% set categories = get_taxonomy_url(kind="categories") %} {% set categories = get_taxonomy(kind="categories") %}
``` ```
### `load_data` ### `load_data`
Loads data from a file or URL. Supported file types include *toml*, *json* and *csv*. Loads data from a file or URL. Supported file types include *toml*, *json* and *csv*.
Any other file type will be loaded as plain text.
The `path` argument specifies the path to the data file relative to your content directory. The `path` argument specifies the path to the data file relative to your base directory, where your `config.toml` is.
As a security precaution, If this file is outside of the main site directory, your site will fail to build. As a security precaution, If this file is outside of the main site directory, your site will fail to build.
```jinja2 ```jinja2
{% set data = load_data(path="blog/story/data.toml") %} {% set data = load_data(path="content/blog/story/data.toml") %}
``` ```
The optional `format` argument allows you to specify and override which data type is contained The optional `format` argument allows you to specify and override which data type is contained
within the file specified in the `path` argument. Valid entries are *"toml"*, *"json"*, *"csv"* within the file specified in the `path` argument. Valid entries are `toml`, `json`, `csv`
or *"plain"*. If the `format` argument isn't specified, then the paths extension is used. or `plain`. If the `format` argument isn't specified, then the paths extension is used.
```jinja2 ```jinja2
{% set data = load_data(path="blog/story/data.txt", format="json") %} {% set data = load_data(path="content/blog/story/data.txt", format="json") %}
``` ```
Use the `plain` format for when your file has a toml/json/csv extension but you want to load it as plain text.
For *toml* and *json* the data is loaded into a structure matching the original data file, For *toml* and *json* the data is loaded into a structure matching the original data file,
however for *csv* there is no native notion of such a structure. Instead the data is seperated however for *csv* there is no native notion of such a structure. Instead the data is separated
into a data structure containing *headers* and *records*. See the example below to see into a data structure containing *headers* and *records*. See the example below to see
how this works. how this works.
In the template: In the template:
```jinja2 ```jinja2
{% set data = load_data(path="blog/story/data.csv") %} {% set data = load_data(path="content/blog/story/data.csv") %}
``` ```
In the *blog/story/data.csv* file: In the *content/blog/story/data.csv* file:
```csv ```csv
Number, Title Number, Title
1,Gutenberg 1,Gutenberg
@ -194,14 +197,14 @@ template:
#### Remote content #### Remote content
Instead of using a file, you can load data from a remote URL. This can be done by specifying a `url` parameter to `load_data` rather than `file`. Instead of using a file, you can load data from a remote URL. This can be done by specifying a `url` parameter to `load_data` rather than `path`.
```jinja2 ```jinja2
{% set response = load_data(url="https://api.github.com/repos/getzola/zola") %} {% set response = load_data(url="https://api.github.com/repos/getzola/zola") %}
{{ response }} {{ response }}
``` ```
By default, the response body will be returned with no parsing. This can be changed by using the `format` argument as above. By default, the response body will be returned with no parsing. This can be changed by using the `format` argument as below.
```jinja2 ```jinja2
@ -211,10 +214,12 @@ By default, the response body will be returned with no parsing. This can be chan
#### Data Caching #### Data Caching
Data file loading and remote requests are cached in memory during build, so multiple requests aren't made to the same endpoint. URLs are cached based on the URL, and data files are cached based on the files modified time. The format is also taken into account when caching, so a request will be sent twice if it's loaded with 2 different formats. Data file loading and remote requests are cached in memory during build, so multiple requests aren't made to the same endpoint.
URLs are cached based on the URL, and data files are cached based on the files modified time.
The format is also taken into account when caching, so a request will be sent twice if it's loaded with 2 different formats.
### `trans` ### `trans`
Gets the translation of the given `key`, for the `default_language` or the `language given Gets the translation of the given `key`, for the `default_language` or the `lang`uage given
```jinja2 ```jinja2
{{/* trans(key="title") */}} {{/* trans(key="title") */}}

View file

@ -32,13 +32,13 @@ word_count: Number;
// Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time // Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time
reading_time: Number; reading_time: Number;
// `earlier` and `later` are only populated if the section variable `sort_by` is set to `date` // `earlier` and `later` are only populated if the section variable `sort_by` is set to `date`
// and only set when rendering the page itself
earlier: Page?; earlier: Page?;
later: Page?; later: Page?;
// `heavier` and `lighter` are only populated if the section variable `sort_by` is set to `weight` // `heavier` and `lighter` are only populated if the section variable `sort_by` is set to `weight`
// and only set when rendering the page itself
heavier: Page?; heavier: Page?;
lighter: Page?; lighter: Page?;
// See the Table of contents section below for more details
toc: Array<Header>;
// Year/month/day is only set if the page has a date and month/day are 1-indexed // Year/month/day is only set if the page has a date and month/day are 1-indexed
year: Number?; year: Number?;
month: Number?; month: Number?;
@ -51,8 +51,8 @@ assets: Array<String>;
ancestors: Array<String>; ancestors: Array<String>;
// The relative path from the `content` directory to the markdown file // The relative path from the `content` directory to the markdown file
relative_path: String; relative_path: String;
// The language for the page if there is one // The language for the page if there is one. Default to the config `default_language`
lang: String?; lang: String;
// Information about all the available languages for that content // Information about all the available languages for that content
translations: Array<TranslatedContent>; translations: Array<TranslatedContent>;
``` ```
@ -70,8 +70,6 @@ with the following fields:
content: String; content: String;
title: String?; title: String?;
description: String?; description: String?;
date: String?;
slug: String;
path: String; path: String;
// the path, split on '/' // the path, split on '/'
components: Array<String>; components: Array<String>;
@ -87,8 +85,6 @@ subsections: Array<String>;
word_count: Number; word_count: Number;
// Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time // Based on https://help.medium.com/hc/en-us/articles/214991667-Read-time
reading_time: Number; reading_time: Number;
// See the Table of contents section below for more details
toc: Array<Header>;
// Paths of colocated assets, relative to the content directory // Paths of colocated assets, relative to the content directory
assets: Array<String>; assets: Array<String>;
// The relative paths of the parent sections until the index onef for use with the `get_section` Tera function // The relative paths of the parent sections until the index onef for use with the `get_section` Tera function
@ -97,15 +93,15 @@ assets: Array<String>;
ancestors: Array<String>; ancestors: Array<String>;
// The relative path from the `content` directory to the markdown file // The relative path from the `content` directory to the markdown file
relative_path: String; relative_path: String;
// The language for the section if there is one // The language for the section if there is one. Default to the config `default_language`
lang: String?; lang: String;
// Information about all the available languages for that content // Information about all the available languages for that content
translations: Array<TranslatedContent>; translations: Array<TranslatedContent>;
``` ```
## Table of contents ## Table of contents
Both page and section have a `toc` field which corresponds to an array of `Header`. Both page and section templates have a `toc` variable which corresponds to an array of `Header`.
A `Header` has the following fields: A `Header` has the following fields:
```ts ```ts

View file

@ -6,20 +6,28 @@ weight = 60
Zola will look for a `sitemap.xml` file in the `templates` directory or Zola will look for a `sitemap.xml` file in the `templates` directory or
use the built-in one. use the built-in one.
If your site has more than 30 000 pages, it will automatically split
the links into multiple sitemaps as recommended by [Google](https://support.google.com/webmasters/answer/183668?hl=en):
The sitemap template gets four variables in addition of the config: > All formats limit a single sitemap to 50MB (uncompressed) and 50,000 URLs.
> If you have a larger file or more URLs, you will have to break your list into multiple sitemaps.
> You can optionally create a sitemap index file (a file that points to a list of sitemaps) and submit that single index file to Google.
- `pages`: all pages of the site In such a case, Zola will use a template called `split_sitemap_index.xml` to render the index sitemap.
- `sections`: all sections of the site, including an index section
- `tags`: links the tags page and individual tag page, empty if no tags
- `categories`: links the categories page and individual category page, empty if no categories
As the sitemap only requires a link and an optional date for the `lastmod` field,
all the variables above are arrays of `SitemapEntry` with the following type: The `sitemap.xml` template gets a single variable:
- `entries`: all pages of the site, as a list of `SitemapEntry`
A `SitemapEntry` has the following fields:
```ts ```ts
permalink: String; permalink: String;
date: String?; date: String?;
extra: Hashmap<String, Any>?;
``` ```
All `SitemapEntry` are sorted in each variable by their permalink. The `split_sitemap_index.xml` also gets a single variable:
- `sitemaps`: a list of permalinks to the sitemaps

View file

@ -27,7 +27,6 @@ paginate_path: String?;
rss: Bool; rss: Bool;
``` ```
```
### Taxonomy list (`list.html`) ### Taxonomy list (`list.html`)

View file

@ -6,7 +6,7 @@ weight = 20
## Installing a theme ## Installing a theme
The easiest way to install to theme is to clone its repository in the `themes` The easiest way to install a theme is to clone its repository in the `themes`
directory. directory.
```bash ```bash

View file

@ -30,6 +30,11 @@ pub fn build_cli() -> App<'static, 'static> {
.long("base-url") .long("base-url")
.takes_value(true) .takes_value(true)
.help("Force the base URL to be that value (default to the one in config.toml)"), .help("Force the base URL to be that value (default to the one in config.toml)"),
Arg::with_name("base_path")
.short("b")
.long("base-path")
.takes_value(true)
.help("Force the base site path to a certain directory [default: the current working directory]"),
Arg::with_name("output_dir") Arg::with_name("output_dir")
.short("o") .short("o")
.long("output-dir") .long("output-dir")
@ -56,6 +61,11 @@ pub fn build_cli() -> App<'static, 'static> {
.default_value("public") .default_value("public")
.takes_value(true) .takes_value(true)
.help("Outputs the generated site in the given path"), .help("Outputs the generated site in the given path"),
Arg::with_name("base_path")
.short("b")
.long("base-path")
.takes_value(true)
.help("Force the base site path to a certain directory [default: the current working directory]"),
Arg::with_name("base_url") Arg::with_name("base_url")
.short("u") .short("u")
.long("base-url") .long("base-url")

View file

@ -1,12 +1,19 @@
use std::env; use std::env;
use std::path::PathBuf;
use errors::Result; use errors::Result;
use site::Site; use site::Site;
use console; use console;
pub fn build(config_file: &str, base_url: Option<&str>, output_dir: &str) -> Result<()> { pub fn build(
let mut site = Site::new(env::current_dir().unwrap(), config_file)?; config_file: &str,
base_path: Option<&str>,
base_url: Option<&str>,
output_dir: &str,
) -> Result<()> {
let bp = base_path.map(PathBuf::from).unwrap_or(env::current_dir().unwrap());
let mut site = Site::new(bp, config_file)?;
site.set_output_path(output_dir); site.set_output_path(output_dir);
if let Some(b) = base_url { if let Some(b) = base_url {
site.set_base_url(b.to_string()); site.set_base_url(b.to_string());

View file

@ -41,7 +41,7 @@ pub fn create_new_project(name: &str) -> Result<()> {
let search = ask_bool("> Do you want to build a search index of the content?", false)?; let search = ask_bool("> Do you want to build a search index of the content?", false)?;
let config = CONFIG let config = CONFIG
.trim_left() .trim_start()
.replace("%BASE_URL%", &base_url) .replace("%BASE_URL%", &base_url)
.replace("%COMPILE_SASS%", &format!("{}", compile_sass)) .replace("%COMPILE_SASS%", &format!("{}", compile_sass))
.replace("%SEARCH%", &format!("{}", search)) .replace("%SEARCH%", &format!("{}", search))

View file

@ -36,7 +36,7 @@ use ctrlc;
use notify::{watcher, RecursiveMode, Watcher}; use notify::{watcher, RecursiveMode, Watcher};
use ws::{Message, Sender, WebSocket}; use ws::{Message, Sender, WebSocket};
use errors::{Result, ResultExt}; use errors::{Error as ZolaError, Result};
use site::Site; use site::Site;
use utils::fs::copy_file; use utils::fs::copy_file;
@ -90,36 +90,39 @@ fn livereload_handler(_: &HttpRequest) -> &'static str {
LIVE_RELOAD LIVE_RELOAD
} }
fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &str) { fn rebuild_done_handling(broadcaster: &Option<Sender>, res: Result<()>, reload_path: &str) {
match res { match res {
Ok(_) => { Ok(_) => {
broadcaster if let Some(broadcaster) = broadcaster.as_ref() {
.send(format!( broadcaster
r#" .send(format!(
{{ r#"
"command": "reload", {{
"path": "{}", "command": "reload",
"originalPath": "", "path": "{}",
"liveCSS": true, "originalPath": "",
"liveImg": true, "liveCSS": true,
"protocol": ["http://livereload.com/protocols/official-7"] "liveImg": true,
}}"#, "protocol": ["http://livereload.com/protocols/official-7"]
reload_path }}"#,
)) reload_path
.unwrap(); ))
.unwrap();
}
} }
Err(e) => console::unravel_errors("Failed to build the site", &e), Err(e) => console::unravel_errors("Failed to build the site", &e),
} }
} }
fn create_new_site( fn create_new_site<P: AsRef<Path>>(
interface: &str, interface: &str,
port: u16, port: u16,
output_dir: &str, output_dir: &str,
base_path: P,
base_url: &str, base_url: &str,
config_file: &str, config_file: &str,
) -> Result<(Site, String)> { ) -> Result<(Site, String)> {
let mut site = Site::new(env::current_dir().unwrap(), config_file)?; let mut site = Site::new(base_path, config_file)?;
let base_address = format!("{}:{}", base_url, port); let base_address = format!("{}:{}", base_url, port);
let address = format!("{}:{}", interface, port); let address = format!("{}:{}", interface, port);
@ -164,12 +167,15 @@ pub fn serve(
interface: &str, interface: &str,
port: u16, port: u16,
output_dir: &str, output_dir: &str,
base_path: Option<&str>,
base_url: &str, base_url: &str,
config_file: &str, config_file: &str,
watch_only: bool, watch_only: bool,
) -> Result<()> { ) -> Result<()> {
let start = Instant::now(); let start = Instant::now();
let (mut site, address) = create_new_site(interface, port, output_dir, base_url, config_file)?; let bp = base_path.map(PathBuf::from).unwrap_or(env::current_dir().unwrap());
let (mut site, address) =
create_new_site(interface, port, output_dir, bp.clone(), base_url, config_file)?;
console::report_elapsed_time(start); console::report_elapsed_time(start);
// Setup watchers // Setup watchers
@ -178,28 +184,28 @@ pub fn serve(
let (tx, rx) = channel(); let (tx, rx) = channel();
let mut watcher = watcher(tx, Duration::from_secs(1)).unwrap(); let mut watcher = watcher(tx, Duration::from_secs(1)).unwrap();
watcher watcher
.watch("content/", RecursiveMode::Recursive) .watch(bp.join("content/"), RecursiveMode::Recursive)
.chain_err(|| "Can't watch the `content` folder. Does it exist?")?; .map_err(|e| ZolaError::chain("Can't watch the `content` folder. Does it exist?", e))?;
watcher watcher
.watch(config_file, RecursiveMode::Recursive) .watch(bp.join(config_file), RecursiveMode::Recursive)
.chain_err(|| "Can't watch the `config` file. Does it exist?")?; .map_err(|e| ZolaError::chain("Can't watch the `config` file. Does it exist?", e))?;
if Path::new("static").exists() { if bp.join("static").exists() {
watching_static = true; watching_static = true;
watcher watcher
.watch("static/", RecursiveMode::Recursive) .watch(bp.join("static/"), RecursiveMode::Recursive)
.chain_err(|| "Can't watch the `static` folder.")?; .map_err(|e| ZolaError::chain("Can't watch the `static` folder.", e))?;
} }
if Path::new("templates").exists() { if bp.join("templates").exists() {
watching_templates = true; watching_templates = true;
watcher watcher
.watch("templates/", RecursiveMode::Recursive) .watch(bp.join("templates/"), RecursiveMode::Recursive)
.chain_err(|| "Can't watch the `templates` folder.")?; .map_err(|e| ZolaError::chain("Can't watch the `templates` folder.", e))?;
} }
// Sass support is optional so don't make it an error to no have a sass folder // Sass support is optional so don't make it an error to no have a sass folder
let _ = watcher.watch("sass/", RecursiveMode::Recursive); let _ = watcher.watch(bp.join("sass/"), RecursiveMode::Recursive);
let ws_address = format!("{}:{}", interface, site.live_reload.unwrap()); let ws_address = format!("{}:{}", interface, site.live_reload.unwrap());
let output_path = Path::new(output_dir).to_path_buf(); let output_path = Path::new(output_dir).to_path_buf();
@ -256,8 +262,6 @@ pub fn serve(
None None
}; };
let pwd = env::current_dir().unwrap();
let mut watchers = vec!["content", "config.toml"]; let mut watchers = vec!["content", "config.toml"];
if watching_static { if watching_static {
watchers.push("static"); watchers.push("static");
@ -271,7 +275,7 @@ pub fn serve(
println!( println!(
"Listening for changes in {}{}{{{}}}", "Listening for changes in {}{}{{{}}}",
pwd.display(), bp.display(),
MAIN_SEPARATOR, MAIN_SEPARATOR,
watchers.join(", ") watchers.join(", ")
); );
@ -293,14 +297,8 @@ pub fn serve(
format!("-> Template changed {}", path.display()) format!("-> Template changed {}", path.display())
}; };
console::info(&msg); console::info(&msg);
if let Some(ref broadcaster) = broadcaster { // Force refresh
// Force refresh rebuild_done_handling(&broadcaster, rebuild::after_template_change(site, &path), "/x.js");
rebuild_done_handling(
broadcaster,
rebuild::after_template_change(site, &path),
"/x.js",
);
}
}; };
let reload_sass = |site: &Site, path: &Path, partial_path: &Path| { let reload_sass = |site: &Site, path: &Path, partial_path: &Path| {
@ -310,13 +308,11 @@ pub fn serve(
format!("-> Sass file changed {}", path.display()) format!("-> Sass file changed {}", path.display())
}; };
console::info(&msg); console::info(&msg);
if let Some(ref broadcaster) = broadcaster { rebuild_done_handling(
rebuild_done_handling( &broadcaster,
&broadcaster, site.compile_sass(&site.base_path),
site.compile_sass(&site.base_path), &partial_path.to_string_lossy(),
&partial_path.to_string_lossy(), );
);
}
}; };
let copy_static = |site: &Site, path: &Path, partial_path: &Path| { let copy_static = |site: &Site, path: &Path, partial_path: &Path| {
@ -332,20 +328,18 @@ pub fn serve(
}; };
console::info(&msg); console::info(&msg);
if let Some(ref broadcaster) = broadcaster { if path.is_dir() {
if path.is_dir() { rebuild_done_handling(
rebuild_done_handling( &broadcaster,
broadcaster, site.copy_static_directories(),
site.copy_static_directories(), &path.to_string_lossy(),
&path.to_string_lossy(), );
); } else {
} else { rebuild_done_handling(
rebuild_done_handling( &broadcaster,
broadcaster, copy_file(&path, &site.output_path, &site.static_path),
copy_file(&path, &site.output_path, &site.static_path), &partial_path.to_string_lossy(),
&partial_path.to_string_lossy(), );
);
}
} }
}; };
@ -357,7 +351,8 @@ pub fn serve(
if path.is_file() && is_temp_file(&path) { if path.is_file() && is_temp_file(&path) {
continue; continue;
} }
let (change_kind, partial_path) = detect_change_kind(&pwd, &path); let (change_kind, partial_path) =
detect_change_kind(&bp.canonicalize().unwrap(), &path);
// We only care about changes in non-empty folders // We only care about changes in non-empty folders
if path.is_dir() && is_folder_empty(&path) { if path.is_dir() && is_folder_empty(&path) {
@ -373,14 +368,12 @@ pub fn serve(
match change_kind { match change_kind {
ChangeKind::Content => { ChangeKind::Content => {
console::info(&format!("-> Content renamed {}", path.display())); console::info(&format!("-> Content renamed {}", path.display()));
if let Some(ref broadcaster) = broadcaster { // Force refresh
// Force refresh rebuild_done_handling(
rebuild_done_handling( &broadcaster,
broadcaster, rebuild::after_content_rename(&mut site, &old_path, &path),
rebuild::after_content_rename(&mut site, &old_path, &path), "/x.js",
"/x.js", );
);
}
} }
ChangeKind::Templates => reload_templates(&mut site, &path), ChangeKind::Templates => reload_templates(&mut site, &path),
ChangeKind::StaticFiles => copy_static(&site, &path, &partial_path), ChangeKind::StaticFiles => copy_static(&site, &path, &partial_path),
@ -391,6 +384,7 @@ pub fn serve(
interface, interface,
port, port,
output_dir, output_dir,
bp.clone(),
base_url, base_url,
config_file, config_file,
) )
@ -411,17 +405,15 @@ pub fn serve(
); );
let start = Instant::now(); let start = Instant::now();
match detect_change_kind(&pwd, &path) { match detect_change_kind(&bp.canonicalize().unwrap(), &path) {
(ChangeKind::Content, _) => { (ChangeKind::Content, _) => {
console::info(&format!("-> Content changed {}", path.display())); console::info(&format!("-> Content changed {}", path.display()));
if let Some(ref broadcaster) = broadcaster { // Force refresh
// Force refresh rebuild_done_handling(
rebuild_done_handling( &broadcaster,
broadcaster, rebuild::after_content_change(&mut site, &path),
rebuild::after_content_change(&mut site, &path), "/x.js",
"/x.js", );
);
}
} }
(ChangeKind::Templates, _) => reload_templates(&mut site, &path), (ChangeKind::Templates, _) => reload_templates(&mut site, &path),
(ChangeKind::StaticFiles, p) => copy_static(&site, &path, &p), (ChangeKind::StaticFiles, p) => copy_static(&site, &path, &p),
@ -432,6 +424,7 @@ pub fn serve(
interface, interface,
port, port,
output_dir, output_dir,
bp.clone(),
base_url, base_url,
config_file, config_file,
) )

View file

@ -1,4 +1,5 @@
use std::env; use std::env;
use std::error::Error as StdError;
use std::io::Write; use std::io::Write;
use std::time::Instant; use std::time::Instant;
@ -47,24 +48,23 @@ fn colorize(message: &str, color: &ColorSpec) {
/// Display in the console the number of pages/sections in the site /// Display in the console the number of pages/sections in the site
pub fn notify_site_size(site: &Site) { pub fn notify_site_size(site: &Site) {
let library = site.library.read().unwrap();
println!( println!(
"-> Creating {} pages ({} orphan), {} sections, and processing {} images", "-> Creating {} pages ({} orphan), {} sections, and processing {} images",
site.library.pages().len(), library.pages().len(),
site.get_all_orphan_pages().len(), site.get_number_orphan_pages(),
site.library.sections().len() - 1, // -1 since we do not the index as a section library.sections().len() - 1, // -1 since we do not the index as a section
site.num_img_ops(), site.num_img_ops(),
); );
} }
/// Display a warning in the console if there are ignored pages in the site /// Display a warning in the console if there are ignored pages in the site
pub fn warn_about_ignored_pages(site: &Site) { pub fn warn_about_ignored_pages(site: &Site) {
let ignored_pages: Vec<_> = site let library = site.library.read().unwrap();
.library let ignored_pages: Vec<_> = library
.sections_values() .sections_values()
.iter() .iter()
.flat_map(|s| { .flat_map(|s| s.ignored_pages.iter().map(|k| library.get_page_by_key(*k).file.path.clone()))
s.ignored_pages.iter().map(|k| site.library.get_page_by_key(*k).file.path.clone())
})
.collect(); .collect();
if !ignored_pages.is_empty() { if !ignored_pages.is_empty() {
@ -96,8 +96,10 @@ pub fn unravel_errors(message: &str, error: &Error) {
self::error(message); self::error(message);
} }
self::error(&format!("Error: {}", error)); self::error(&format!("Error: {}", error));
for e in error.iter().skip(1) { let mut cause = error.source();
while let Some(e) = cause {
self::error(&format!("Reason: {}", e)); self::error(&format!("Reason: {}", e));
cause = e.source();
} }
} }

View file

@ -46,7 +46,12 @@ fn main() {
console::info("Building site..."); console::info("Building site...");
let start = Instant::now(); let start = Instant::now();
let output_dir = matches.value_of("output_dir").unwrap(); let output_dir = matches.value_of("output_dir").unwrap();
match cmd::build(config_file, matches.value_of("base_url"), output_dir) { match cmd::build(
config_file,
matches.value_of("base_path"),
matches.value_of("base_url"),
output_dir,
) {
Ok(()) => console::report_elapsed_time(start), Ok(()) => console::report_elapsed_time(start),
Err(e) => { Err(e) => {
console::unravel_errors("Failed to build the site", &e); console::unravel_errors("Failed to build the site", &e);
@ -79,9 +84,18 @@ fn main() {
} }
let watch_only = matches.is_present("watch_only"); let watch_only = matches.is_present("watch_only");
let output_dir = matches.value_of("output_dir").unwrap(); let output_dir = matches.value_of("output_dir").unwrap();
let base_path = matches.value_of("base_path");
let base_url = matches.value_of("base_url").unwrap(); let base_url = matches.value_of("base_url").unwrap();
console::info("Building site..."); console::info("Building site...");
match cmd::serve(interface, port, output_dir, base_url, config_file, watch_only) { match cmd::serve(
interface,
port,
output_dir,
base_path,
base_url,
config_file,
watch_only,
) {
Ok(()) => (), Ok(()) => (),
Err(e) => { Err(e) => {
console::unravel_errors("", &e); console::unravel_errors("", &e);

View file

@ -0,0 +1,466 @@
%YAML 1.2
---
# http://www.sublimetext.com/docs/3/syntax.html
name: PowerShell
file_extensions:
- ps1
- psm1
- psd1
scope: source.powershell
contexts:
main:
- match: "<#"
captures:
0: punctuation.definition.comment.block.begin.powershell
push:
- meta_scope: comment.block.powershell
- match: "#>"
captures:
0: punctuation.definition.comment.block.end.powershell
pop: true
- include: commentEmbeddedDocs
- match: '[2-6]>&1|>>|>|<<|<|>|>\||[1-6]>|[1-6]>>'
scope: keyword.operator.redirection.powershell
- include: commands
- include: commentLine
- include: variable
- include: interpolatedStringContent
- include: function
- include: attribute
- include: UsingDirective
- include: type
- include: hashtable
- include: doubleQuotedString
- include: scriptblock
- include: doubleQuotedStringEscapes
- match: (?<!')'
captures:
0: punctuation.definition.string.begin.powershell
push:
- meta_scope: string.quoted.single.powershell
- match: "'(?!')"
captures:
0: punctuation.definition.string.end.powershell
pop: true
- match: "''"
scope: constant.character.escape.powershell
- match: \@"(?=$)
push:
- meta_scope: string.quoted.double.heredoc.powershell
- match: ^"@
pop: true
- include: variableNoProperty
- include: doubleQuotedStringEscapes
- include: interpolation
- match: \@'(?=$)
push:
- meta_scope: string.quoted.single.heredoc.powershell
- match: ^'@
pop: true
- match: "''"
scope: constant.character.escape.powershell
- include: numericConstant
- match: (@)(\()
captures:
1: keyword.other.array.begin.powershell
2: punctuation.section.group.begin.powershell
push:
- meta_scope: meta.group.array-expression.powershell
- match: \)
captures:
0: punctuation.section.group.end.powershell
pop: true
- include: main
- match: (\$)(\()
comment: "TODO: move to repo; make recursive."
captures:
1: punctuation.definition.variable.powershell
2: punctuation.section.group.begin.powershell
push:
- meta_scope: meta.group.complex.subexpression.powershell
- match: \)
captures:
0: punctuation.section.group.end.powershell
pop: true
- include: main
- match: '(\b(([A-Za-z0-9\-_\.]+)\.(?i:exe|com|cmd|bat))\b)'
scope: support.function.powershell
- match: (?<!\w|-|\.)((?i:begin|break|catch|continue|data|default|define|do|dynamicparam|else|elseif|end|exit|finally|for|from|if|in|inlinescript|parallel|param|process|return|sequence|switch|throw|trap|try|until|var|while)|%|\?)(?!\w)
scope: keyword.control.powershell
- match: '(?<!\w|-|[^\)]\.)((?i:(foreach|where)(?!-object))|%|\?)(?!\w)'
scope: keyword.control.powershell
- match: (?<!\w)(--%)(?!\w)
comment: This should be moved to the repository at some point.
captures:
1: keyword.control.powershell
push:
- match: $
pop: true
- match: .+
scope: string.unquoted.powershell
- match: (?<!\w)((?i:hidden|static))(?!\w)
comment: This should only be relevant inside a class but will require a rework of how classes are matched. This is a temp fix.
scope: storage.modifier.powershell
- match: '(?<!\w|-)((?i:class)|%|\?)(?:\s)+((?:\p{L}|\d|_|-|)+)\b'
comment: capture should be entity.name.type, but it doesn't provide a good color in the default schema.
captures:
1: storage.type.powershell
2: entity.name.function
- match: (?<!\w)-(?i:is(?:not)?|as)\b
scope: keyword.operator.comparison.powershell
- match: '(?<!\w)-(?i:[ic]?(?:eq|ne|[gl][te]|(?:not)?(?:like|match|contains|in)|replace))(?!\p{L})'
scope: keyword.operator.comparison.powershell
- match: '(?<!\w)-(?i:join|split)(?!\p{L})|!'
scope: keyword.operator.unary.powershell
- match: '(?<!\w)-(?i:and|or|not|xor)(?!\p{L})|!'
scope: keyword.operator.logical.powershell
- match: '(?<!\w)-(?i:band|bor|bnot|bxor|shl|shr)(?!\p{L})'
scope: keyword.operator.bitwise.powershell
- match: '(?<!\w)-(?i:f)(?!\p{L})'
scope: keyword.operator.string-format.powershell
- match: "[+%*/-]?=|[+/*%-]"
scope: keyword.operator.assignment.powershell
- match: '\|{2}|&{2}|;'
scope: punctuation.terminator.statement.powershell
- match: '&|(?<!\w)\.(?= )|`|,|\|'
scope: keyword.operator.other.powershell
- match: (?<!\s|^)\.\.(?=\-?\d|\(|\$)
comment: This is very imprecise, is there a syntax for 'must come after...'
scope: keyword.operator.range.powershell
RequiresDirective:
- match: (?<=#)(?i:(requires))\s
captures:
0: keyword.control.requires.powershell
push:
- meta_scope: meta.requires.powershell
- match: $
pop: true
- match: \-(?i:Modules|PSSnapin|RunAsAdministrator|ShellId|Version)
scope: keyword.other.powershell
- match: '(?<!-)\b\p{L}+|\d+(?:\.\d+)*'
scope: variable.parameter.powershell
- include: hashtable
UsingDirective:
- match: (?<!\w)(?i:(using))\s+(?i:(namespace|module))\s+(?i:((?:\w+(?:\.)?)+))
captures:
1: keyword.control.using.powershell
2: keyword.other.powershell
3: variable.parameter.powershell
attribute:
- match: '(\[)\s*\b(?i)(cmdletbinding|alias|outputtype|parameter|validatenotnull|validatenotnullorempty|validatecount|validateset|allownull|allowemptycollection|allowemptystring|validatescript|validaterange|validatepattern|validatelength)\b'
captures:
1: punctuation.section.bracket.begin.powershell
2: support.function.attribute.powershell
push:
- meta_scope: meta.attribute.powershell
- match: '(\])'
captures:
1: punctuation.section.bracket.end.powershell
pop: true
- match: \(
captures:
0: punctuation.section.group.begin.powershell
push:
- match: \)
captures:
0: punctuation.section.group.end.powershell
pop: true
- include: variable
- include: variableNoProperty
- include: hashtable
- include: scriptblock
- include: doubleQuotedStringEscapes
- include: doubleQuotedString
- include: type
- include: numericConstant
- include: doubleQuotedString
- include: main
- match: (?i)\b(mandatory|valuefrompipeline|valuefrompipelinebypropertyname|valuefromremainingarguments|position|parametersetname|defaultparametersetname|supportsshouldprocess|supportspaging|positionalbinding|helpuri|confirmimpact|helpmessage)\b(?:\s+)?(=)?
captures:
1: variable.parameter.attribute.powershell
2: keyword.operator.assignment.powershell
- match: (?<!')'
captures:
0: punctuation.definition.string.begin.powershell
push:
- meta_scope: string.quoted.single.powershell
- match: "'(?!')"
captures:
0: punctuation.definition.string.end.powershell
pop: true
- match: "''"
scope: constant.character.escape.powershell
commands:
- match: '(?:(\p{L}|\d|_|-|\\|\:)*\\)?\b(?i:Add|Approve|Assert|Backup|Block|Build|Checkpoint|Clear|Close|Compare|Complete|Compress|Confirm|Connect|Convert|ConvertFrom|ConvertTo|Copy|Debug|Deny|Deploy|Disable|Disconnect|Dismount|Edit|Enable|Enter|Exit|Expand|Export|Find|Format|Get|Grant|Group|Hide|Import|Initialize|Install|Invoke|Join|Limit|Lock|Measure|Merge|Mount|Move|New|Open|Optimize|Out|Ping|Pop|Protect|Publish|Push|Read|Receive|Redo|Register|Remove|Rename|Repair|Request|Reset|Resize|Resolve|Restart|Restore|Resume|Revoke|Save|Search|Select|Send|Set|Show|Skip|Split|Start|Step|Stop|Submit|Suspend|Switch|Sync|Test|Trace|Unblock|Undo|Uninstall|Unlock|Unprotect|Unpublish|Unregister|Update|Use|Wait|Watch|Write)\-.+?(?:\.(?i:exe|cmd|bat|ps1))?\b'
comment: "Verb-Noun pattern:"
scope: support.function.powershell
- match: (?<!\w)(?i:foreach-object)(?!\w)
comment: Builtin cmdlets with reserved verbs
scope: support.function.powershell
- match: (?<!\w)(?i:where-object)(?!\w)
comment: Builtin cmdlets with reserved verbs
scope: support.function.powershell
- match: (?<!\w)(?i:sort-object)(?!\w)
comment: Builtin cmdlets with reserved verbs
scope: support.function.powershell
- match: (?<!\w)(?i:tee-object)(?!\w)
comment: Builtin cmdlets with reserved verbs
scope: support.function.powershell
commentEmbeddedDocs:
- match: ^(?i:(?:\s?|#)+(\.)(COMPONENT|DESCRIPTION|EXAMPLE|EXTERNALHELP|FORWARDHELPCATEGORY|FORWARDHELPTARGETNAME|FUNCTIONALITY|INPUTS|LINK|NOTES|OUTPUTS|REMOTEHELPRUNSPACE|ROLE|SYNOPSIS))
scope: comment.documentation.embedded.powershell
captures:
1: constant.string.documentation.powershell
2: keyword.operator.documentation.powershell
- match: '(?i:\s?(\.)(PARAMETER|FORWARDHELPTARGETNAME|FORWARDHELPCATEGORY|REMOTEHELPRUNSPACE|EXTERNALHELP)\s+([a-z0-9-_]+))'
scope: comment.documentation.embedded.powershell
captures:
1: constant.string.documentation.powershell
2: keyword.operator.documentation.powershell
3: keyword.operator.documentation.powershell
commentLine:
- match: '(?<![`\\-])#'
captures:
0: punctuation.definition.comment.powershell
push:
- meta_scope: comment.line.powershell
- match: $\n?
captures:
0: punctuation.definition.comment.powershell
pop: true
- include: commentEmbeddedDocs
- include: RequiresDirective
doubleQuotedString:
- match: (?<!(?<!`)")"
captures:
0: punctuation.definition.string.begin.powershell
push:
- meta_scope: string.quoted.double.powershell
- match: '"(?!")'
captures:
0: punctuation.definition.string.end.powershell
pop: true
- match: '(?i)\b[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,64}\b'
- include: variableNoProperty
- include: variable
- include: doubleQuotedStringEscapes
- include: interpolation
- match: '`\s*$'
scope: keyword.other.powershell
doubleQuotedStringEscapes:
- match: '`[0abnfrvt"''$`]'
scope: constant.character.escape.powershell
- match: '""'
scope: constant.character.escape.powershell
function:
- match: '^(?:\s*+)(?i)(function|filter|configuration|workflow)\s+(?:(global|local|script|private):)?((?:\p{L}|\d|_|-|\.)+)'
captures:
0: meta.function.powershell
1: storage.type.powershell
2: storage.modifier.scope.powershell
3: entity.name.function.powershell
push:
- match: '(?=\{|\()'
pop: true
- include: commentLine
hashtable:
- match: '(@)(\{)'
captures:
1: keyword.other.hashtable.begin.powershell
2: punctuation.section.braces.begin.powershell
push:
- meta_scope: meta.hashtable.powershell
- match: '(\})'
captures:
1: punctuation.section.braces.end.powershell
pop: true
- match: \b((?:\'|\")?)(\w+)((?:\'|\")?)(?:\s+)?(=)(?:\s+)?
scope: meta.hashtable.assignment.powershell
captures:
1: punctuation.definition.string.begin.powershell
2: variable.other.readwrite.powershell
3: punctuation.definition.string.end.powershell
4: keyword.operator.assignment.powershell
- include: scriptblock
- include: main
interpolatedStringContent:
- match: \(
captures:
0: punctuation.section.group.begin.powershell
push:
- meta_content_scope: interpolated.simple.source.powershell
- match: \)
captures:
0: punctuation.section.group.end.powershell
pop: true
- include: main
- include: interpolation
- include: interpolatedStringContent
interpolation:
- match: (\$)(\()
captures:
1: punctuation.definition.variable.powershell
2: punctuation.section.group.begin.powershell
push:
- meta_content_scope: interpolated.complex.source.powershell
- match: \)
captures:
0: punctuation.section.group.end.powershell
pop: true
- include: main
- include: interpolation
- include: interpolatedStringContent
numericConstant:
- match: '(?<!\w)([-+]?0(?:x|X)[0-9a-fA-F_]+(?:U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?)((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.hex.powershell
2: keyword.other.powershell
- match: '(?<!\w)([-+]?(?:[0-9_]+)?\.[0-9_]+(?:(?:e|E)[0-9]+)?(?:F|f|D|d|M|m)?)((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.integer.powershell
2: keyword.other.powershell
- match: '(?<!\w)([-+]?0(?:b|B)[01_]+(?:U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?)((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.octal.powershell
2: keyword.other.powershell
- match: '(?<!\w)([-+]?[0-9_]+(?:e|E)(?:[0-9_])?+(?:F|f|D|d|M|m)?)((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.integer.powershell
2: keyword.other.powershell
- match: '(?<!\w)([-+]?[0-9_]+\.(?:e|E)(?:[0-9_])?+(?:F|f|D|d|M|m)?)((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.integer.powershell
2: keyword.other.powershell
- match: '(?<!\w)([-+]?[0-9_]+[\.]?(?:F|f|D|d|M|m))((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.integer.powershell
2: keyword.other.powershell
- match: '(?<!\w)([-+]?[0-9_]+[\.]?(?:U|u|L|l|UL|Ul|uL|ul|LU|Lu|lU|lu)?)((?i:[kmgtp]b)?)\b'
captures:
1: constant.numeric.integer.powershell
2: keyword.other.powershell
scriptblock:
- match: '\{'
captures:
0: punctuation.section.braces.begin.powershell
push:
- meta_scope: meta.scriptblock.powershell
- match: '\}'
captures:
0: punctuation.section.braces.end.powershell
pop: true
- include: main
type:
- match: '\['
captures:
0: punctuation.section.bracket.begin.powershell
push:
- match: '\]'
captures:
0: punctuation.section.bracket.end.powershell
pop: true
- match: '(?!\d+|\.)(?:\p{L}|\p{N}|\.)+'
scope: storage.type.powershell
- include: main
variable:
- match: (\$)(?i:(False|Null|True))\b
comment: These are special constants.
captures:
0: constant.language.powershell
1: punctuation.definition.variable.powershell
- match: '(\$)(?i:(Error|ExecutionContext|Host|Home|PID|PsHome|PsVersionTable|ShellID))((?:\.(?:\p{L}|\d|_)+)*\b)?\b'
comment: These are the other built-in constants.
captures:
0: support.constant.variable.powershell
1: punctuation.definition.variable.powershell
3: variable.other.member.powershell
- match: '(\$)(?i:(\$|\^|\?|_|Args|ConsoleFileName|Event|EventArgs|EventSubscriber|ForEach|Input|LastExitCode|Matches|MyInvocation|NestedPromptLevel|Profile|PSBoundParameters|PsCmdlet|PsCulture|PSDebugContext|PSItem|PSCommandPath|PSScriptRoot|PsUICulture|Pwd|Sender|SourceArgs|SourceEventArgs|StackTrace|Switch|This))((?:\.(?:\p{L}|\d|_)+)*\b)?\b'
comment: Automatic variables are not constants, but they are read-only. In monokai (default) color schema support.variable doesn't have color, so we use constant.
captures:
0: support.constant.automatic.powershell
1: punctuation.definition.variable.powershell
3: variable.other.member.powershell
- match: '(\$)(?i:(ConfirmPreference|DebugPreference|ErrorActionPreference|ErrorView|FormatEnumerationLimit|MaximumAliasCount|MaximumDriveCount|MaximumErrorCount|MaximumFunctionCount|MaximumHistoryCount|MaximumVariableCount|OFS|OutputEncoding|ProgressPreference|PsCulture|PSDebugContext|PSDefaultParameterValues|PSEmailServer|PSItem|PSModuleAutoloadingPreference|PSSenderInfo|PSSessionApplicationName|PSSessionConfigurationName|PSSessionOption|VerbosePreference|WarningPreference|WhatIfPreference))((?:\.(?:\p{L}|\d|_)+)*\b)?\b'
comment: Style preference variables as language variables so that they stand out.
captures:
0: variable.language.powershell
1: punctuation.definition.variable.powershell
3: variable.other.member.powershell
- match: '(?i:(\$|@)(global|local|private|script|using|workflow):((?:\p{L}|\d|_)+))((?:\.(?:\p{L}|\d|_)+)*\b)?'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: storage.modifier.scope.powershell
4: variable.other.member.powershell
- match: '(?i:(\$)(\{)(global|local|private|script|using|workflow):([^}]*[^}`])(\}))((?:\.(?:\p{L}|\d|_)+)*\b)?'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: punctuation.section.braces.begin.powershell
3: storage.modifier.scope.powershell
5: punctuation.section.braces.end.powershell
6: variable.other.member.powershell
- match: '(?i:(\$|@)((?:\p{L}|\d|_)+:)?((?:\p{L}|\d|_)+))((?:\.(?:\p{L}|\d|_)+)*\b)?'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: support.variable.drive.powershell
4: variable.other.member.powershell
- match: '(?i:(\$)(\{)((?:\p{L}|\d|_)+:)?([^}]*[^}`])(\}))((?:\.(?:\p{L}|\d|_)+)*\b)?'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: punctuation.section.braces.begin.powershell
3: support.variable.drive.powershell
5: punctuation.section.braces.end.powershell
6: variable.other.member.powershell
variableNoProperty:
- match: (\$)(?i:(False|Null|True))\b
comment: These are special constants.
captures:
0: constant.language.powershell
1: punctuation.definition.variable.powershell
- match: (\$)(?i:(Error|ExecutionContext|Host|Home|PID|PsHome|PsVersionTable|ShellID))\b
comment: These are the other built-in constants.
captures:
0: support.constant.variable.powershell
1: punctuation.definition.variable.powershell
3: variable.other.member.powershell
- match: (\$)(?i:(\$|\^|\?|_|Args|ConsoleFileName|Event|EventArgs|EventSubscriber|ForEach|Input|LastExitCode|Matches|MyInvocation|NestedPromptLevel|Profile|PSBoundParameters|PsCmdlet|PsCulture|PSDebugContext|PSItem|PSCommandPath|PSScriptRoot|PsUICulture|Pwd|Sender|SourceArgs|SourceEventArgs|StackTrace|Switch|This))\b
comment: Automatic variables are not constants, but they are read-only...
captures:
0: support.variable.automatic.powershell
1: punctuation.definition.variable.powershell
3: variable.other.member.powershell
- match: (\$)(?i:(ConfirmPreference|DebugPreference|ErrorActionPreference|ErrorView|FormatEnumerationLimit|MaximumAliasCount|MaximumDriveCount|MaximumErrorCount|MaximumFunctionCount|MaximumHistoryCount|MaximumVariableCount|OFS|OutputEncoding|ProgressPreference|PsCulture|PSDebugContext|PSDefaultParameterValues|PSEmailServer|PSItem|PSModuleAutoloadingPreference|PSSenderInfo|PSSessionApplicationName|PSSessionConfigurationName|PSSessionOption|VerbosePreference|WarningPreference|WhatIfPreference))\b
comment: Style preference variables as language variables so that they stand out.
captures:
0: variable.language.powershell
1: punctuation.definition.variable.powershell
3: variable.other.member.powershell
- match: '(?i:(\$|@)(global|local|private|script|using|workflow):((?:\p{L}|\d|_)+))'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: storage.modifier.scope.powershell
4: variable.other.member.powershell
- match: '(?i:(\$)(\{)(global|local|private|script|using|workflow):([^}]*[^}`])(\}))'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: storage.modifier.scope.powershell
4: keyword.other.powershell
5: variable.other.member.powershell
- match: '(?i:(\$)((?:\p{L}|\d|_)+:)?((?:\p{L}|\d|_)+))'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: support.variable.drive.powershell
4: variable.other.member.powershell
- match: '(?i:(\$)(\{)((?:\p{L}|\d|_)+:)?([^}]*[^}`])(\}))'
captures:
0: variable.other.readwrite.powershell
1: punctuation.definition.variable.powershell
2: punctuation.section.braces.begin
3: support.variable.drive.powershell
5: punctuation.section.braces.end

Binary file not shown.

Binary file not shown.

938
sublime_themes/dracula.tmTheme Executable file
View file

@ -0,0 +1,938 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- Dracula Theme v1.4.2
#
# https://github.com/dracula/sublime
#
# Copyright 2013-present, All rights reserved
#
# Code licensed under the MIT license
#
# @author Zeno Rocha <hi@zenorocha.com>
-->
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>name</key>
<string>Dracula</string>
<key>settings</key>
<array>
<dict>
<key>settings</key>
<dict>
<key>background</key>
<string>#282a36</string>
<key>caret</key>
<string>#f8f8f0</string>
<key>foreground</key>
<string>#f8f8f2</string>
<key>invisibles</key>
<string>#3B3A32</string>
<key>lineHighlight</key>
<string>#44475a</string>
<key>selection</key>
<string>#44475a</string>
<key>findHighlight</key>
<string>#effb7b</string>
<key>findHighlightForeground</key>
<string>#000000</string>
<key>selectionBorder</key>
<string>#222218</string>
<key>activeGuide</key>
<string>#9D550FB0</string>
<key>bracketsForeground</key>
<string>#F8F8F2A5</string>
<key>bracketsOptions</key>
<string>underline</string>
<key>bracketContentsForeground</key>
<string>#F8F8F2A5</string>
<key>bracketContentsOptions</key>
<string>underline</string>
<key>tagsOptions</key>
<string>stippled_underline</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Comment</string>
<key>scope</key>
<string>comment</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#6272a4</string>
<key>fontStyle</key>
<string></string>
</dict>
</dict>
<dict>
<key>name</key>
<string>String</string>
<key>scope</key>
<string>string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#f1fa8c</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Number</string>
<key>scope</key>
<string>constant.numeric</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Built-in constant</string>
<key>scope</key>
<string>constant.language</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>User-defined constant</string>
<key>scope</key>
<string>constant.character, constant.other</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Variable</string>
<key>scope</key>
<string>variable</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Ruby's @variable</string>
<key>scope</key>
<string>variable.other.readwrite.instance</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#ffb86c</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>String interpolation</string>
<key>scope</key>
<string>constant.character.escaped, constant.character.escape, string source, string source.ruby</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Ruby Regexp</string>
<key>scope</key>
<string>source.ruby string.regexp.classic.ruby,source.ruby string.regexp.mod-r.ruby</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#ff5555</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Keyword</string>
<key>scope</key>
<string>keyword</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Storage</string>
<key>scope</key>
<string>storage</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Storage type</string>
<key>scope</key>
<string>storage.type</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Storage Type Namespace</string>
<key>scope</key>
<string>storage.type.namespace</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Storage Type Class</string>
<key>scope</key>
<string>storage.type.class</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Class name</string>
<key>scope</key>
<string>entity.name.class</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>underline</string>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Meta Path</string>
<key>scope</key>
<string>meta.path</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>underline</string>
<key>foreground</key>
<string>#66d9ef</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Inherited class</string>
<key>scope</key>
<string>entity.other.inherited-class</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic underline</string>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Function name</string>
<key>scope</key>
<string>entity.name.function</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#50fa7b</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Function argument</string>
<key>scope</key>
<string>variable.parameter</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#ffb86c</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Tag name</string>
<key>scope</key>
<string>entity.name.tag</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Tag attribute</string>
<key>scope</key>
<string>entity.other.attribute-name</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#50fa7b</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Library function</string>
<key>scope</key>
<string>support.function</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Library constant</string>
<key>scope</key>
<string>support.constant</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#6be5fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Library class&#x2f;type</string>
<key>scope</key>
<string>support.type, support.class</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#66d9ef</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Library variable</string>
<key>scope</key>
<string>support.other.variable</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string></string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Support Other Namespace</string>
<key>scope</key>
<string>support.other.namespace</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#66d9ef</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Invalid</string>
<key>scope</key>
<string>invalid</string>
<key>settings</key>
<dict>
<key>background</key>
<string>#ff79c6</string>
<key>fontStyle</key>
<string></string>
<key>foreground</key>
<string>#F8F8F0</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Invalid deprecated</string>
<key>scope</key>
<string>invalid.deprecated</string>
<key>settings</key>
<dict>
<key>background</key>
<string>#bd93f9</string>
<key>foreground</key>
<string>#F8F8F0</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON String</string>
<key>scope</key>
<string>meta.structure.dictionary.json string.quoted.double.json</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#CFCFC2</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.header</string>
<key>scope</key>
<string>meta.diff, meta.diff.header</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#6272a4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.deleted</string>
<key>scope</key>
<string>markup.deleted</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.inserted</string>
<key>scope</key>
<string>markup.inserted</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#50fa7b</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>diff.changed</string>
<key>scope</key>
<string>markup.changed</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#E6DB74</string>
</dict>
</dict>
<dict>
<key>scope</key>
<string>constant.numeric.line-number.find-in-files - match</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>scope</key>
<string>entity.name.filename</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#E6DB74</string>
</dict>
</dict>
<dict>
<key>scope</key>
<string>message.error</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#F83333</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON Punctuation</string>
<key>scope</key>
<string>punctuation.definition.string.begin.json - meta.structure.dictionary.value.json, punctuation.definition.string.end.json - meta.structure.dictionary.value.json</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#EEEEEE</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON Structure</string>
<key>scope</key>
<string>meta.structure.dictionary.json string.quoted.double.json</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON String</string>
<key>scope</key>
<string>meta.structure.dictionary.value.json string.quoted.double.json</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#f1fa8c</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON: 6 deep</string>
<key>scope</key>
<string>meta meta meta meta meta meta meta.structure.dictionary.value string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#50fa7b</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON: 5 deep</string>
<key>scope</key>
<string>meta meta meta meta meta meta.structure.dictionary.value string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ffb86c</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON: 4 deep</string>
<key>scope</key>
<string>meta meta meta meta meta.structure.dictionary.value string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON: 3 deep</string>
<key>scope</key>
<string>meta meta meta meta.structure.dictionary.value string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON: 2 deep</string>
<key>scope</key>
<string>meta meta meta.structure.dictionary.value string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#50fa7b</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>JSON: 1 deep</string>
<key>scope</key>
<string>meta meta.structure.dictionary.value string</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ffb86c</string>
</dict>
</dict>
<!-- Markdown Tweaks -->
<dict>
<key>name</key>
<string>Markup: strike</string>
<key>scope</key>
<string>markup.strike</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#FFB86C</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markup: bold</string>
<key>scope</key>
<string>markup.bold</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>bold</string>
<key>foreground</key>
<string>#FFB86C</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markup: italic</string>
<key>scope</key>
<string>markup.italic</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#FFB86C</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: heading</string>
<key>scope</key>
<string>markup.heading</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#8BE9FD</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: List Items Punctuation</string>
<key>scope</key>
<string>punctuation.definition.list_item.markdown</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#FF79C6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: Blockquote</string>
<key>scope</key>
<string>markup.quote</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#6272A4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: Blockquote Punctuation</string>
<key>scope</key>
<string>punctuation.definition.blockquote.markdown</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>background</key>
<string>#6272A4</string>
<key>foreground</key>
<string>#6272A4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: Separator</string>
<key>scope</key>
<string>meta.separator</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#6272A4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markup: raw inline</string>
<key>scope</key>
<string>text.html.markdown markup.raw.inline</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#50FA7B</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markup: underline</string>
<key>scope</key>
<string>markup.underline</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>underline</string>
<key>foreground</key>
<string>#BD93F9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markup: Raw block</string>
<key>scope</key>
<string>markup.raw.block</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#CFCFC2</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: Raw Block fenced source</string>
<key>scope</key>
<string>markup.raw.block.fenced.markdown source</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#F8F8F2</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: Fenced Bode Block</string>
<key>scope</key>
<string>punctuation.definition.fenced.markdown, variable.language.fenced.markdown</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#6272A4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Markdown: Fenced Language</string>
<key>scope</key>
<string>variable.language.fenced.markdown</string>
<key>settings</key>
<dict>
<key>fontStyle</key>
<string>italic</string>
<key>foreground</key>
<string>#6272A4</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Punctuation Accessor</string>
<key>scope</key>
<string>punctuation.accessor</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#FF79C6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Meta Function Return Type</string>
<key>scope</key>
<string>meta.function.return-type</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#FF79C6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Punctuation Section Block Begin</string>
<key>scope</key>
<string>punctuation.section.block.begin</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ffffff</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Punctuation Section Block End</string>
<key>scope</key>
<string>punctuation.section.block.end</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ffffff</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Punctuation Section Embedded Begin</string>
<key>scope</key>
<string>punctuation.section.embedded.begin</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Punctuation Section Embedded End</string>
<key>scope</key>
<string>punctuation.section.embedded.end</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Punctuation Separator Namespace</string>
<key>scope</key>
<string>punctuation.separator.namespace</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Variable Function</string>
<key>scope</key>
<string>variable.function</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#50fa7b</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Variable Other</string>
<key>scope</key>
<string>variable.other</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ffffff</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Variable Language</string>
<key>scope</key>
<string>variable.language</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Entity Name Module Ruby</string>
<key>scope</key>
<string>entity.name.module.ruby</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Entity Name Constant Ruby</string>
<key>scope</key>
<string>entity.name.constant.ruby</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#bd93f9</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Support Function Builtin Ruby</string>
<key>scope</key>
<string>support.function.builtin.ruby</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ffffff</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Storage Type Namespace CS</string>
<key>scope</key>
<string>storage.type.namespace.cs</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#ff79c6</string>
</dict>
</dict>
<dict>
<key>name</key>
<string>Entity Name Namespace CS</string>
<key>scope</key>
<string>entity.name.namespace.cs</string>
<key>settings</key>
<dict>
<key>foreground</key>
<string>#8be9fd</string>
</dict>
</dict>
</array>
<key>uuid</key>
<string>83091B89-765E-4F0D-9275-0EC6CB084126</string>
<key>colorSpaceName</key>
<string>sRGB</string>
<key>semanticClass</key>
<string>theme.dracula</string>
<key>author</key>
<string>Zeno Rocha</string>
</dict>
</plist>

View file

@ -0,0 +1,2 @@
+++
+++

View file

@ -3,6 +3,7 @@
{% block content %} {% block content %}
{{ page.content | safe }} {{ page.content | safe }}
{{ page.relative_path | safe }} {{ page.relative_path | safe }}
{{ toc }}
{% if page.earlier %}Previous article: {{ page.earlier.permalink }}{% endif %} {% if page.earlier %}Previous article: {{ page.earlier.permalink }}{% endif %}
{% if page.later %}Next article: {{ page.later.permalink }}{% endif %} {% if page.later %}Next article: {{ page.later.permalink }}{% endif %}

View file

@ -0,0 +1 @@
Oops

View file

@ -13,6 +13,11 @@ build_search_index = false
generate_rss = true generate_rss = true
taxonomies = [
{name = "authors", rss = true},
{name = "auteurs", lang = "fr"},
]
languages = [ languages = [
{code = "fr", rss = true}, {code = "fr", rss = true},
{code = "it", rss = false}, {code = "it", rss = false},

View file

@ -1,6 +1,9 @@
+++ +++
title = "Quelque chose" title = "Quelque chose"
date = 2018-10-09 date = 2018-10-09
[taxonomies]
auteurs = ["Vincent Prouillet"]
+++ +++
Un article Un article

View file

@ -1,6 +1,9 @@
+++ +++
title = "Something" title = "Something"
date = 2018-10-09 date = 2018-10-09
[taxonomies]
authors = ["Queen Elizabeth"]
+++ +++
A blog post A blog post

View file

@ -0,0 +1,3 @@
{% for author in terms %}
{{ author.name }} {{ author.slug }} {{ author.pages | length }}
{% endfor %}

View file

@ -0,0 +1,21 @@
{% if not paginator %}
Tag: {{ term.name }}
{% for page in term.pages %}
<article>
<h3 class="post__title"><a href="{{ page.permalink | safe }}">{{ page.title | safe }}</a></h3>
</article>
{% endfor %}
{% else %}
Tag: {{ term.name }}
{% for page in paginator.pages %}
{{page.title|safe}}
{% endfor %}
Num pagers: {{ paginator.number_pagers }}
Page size: {{ paginator.paginate_by }}
Current index: {{ paginator.current_index }}
First: {{ paginator.first | safe }}
Last: {{ paginator.last | safe }}
{% if paginator.previous %}has_prev{% endif%}
{% if paginator.next %}has_next{% endif%}
{% endif %}

View file

@ -0,0 +1,3 @@
{% for term in terms %}
{{ term.name }} {{ term.slug }} {{ term.pages | length }}
{% endfor %}

View file

@ -0,0 +1,21 @@
{% if not paginator %}
Tag: {{ term.name }}
{% for page in term.pages %}
<article>
<h3 class="post__title"><a href="{{ page.permalink | safe }}">{{ page.title | safe }}</a></h3>
</article>
{% endfor %}
{% else %}
Tag: {{ term.name }}
{% for page in paginator.pages %}
{{page.title|safe}}
{% endfor %}
Num pagers: {{ paginator.number_pagers }}
Page size: {{ paginator.paginate_by }}
Current index: {{ paginator.current_index }}
First: {{ paginator.first | safe }}
Last: {{ paginator.last | safe }}
{% if paginator.previous %}has_prev{% endif%}
{% if paginator.next %}has_next{% endif%}
{% endif %}