Merge pull request #1189 from getzola/next

Next version
This commit is contained in:
Vincent Prouillet 2021-01-09 14:51:23 +01:00 committed by GitHub
commit 1ef8c85f53
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
185 changed files with 3455 additions and 1677 deletions

43
.gitmodules vendored
View file

@ -2,68 +2,71 @@
path = sublime/syntaxes/Packages
url = https://github.com/sublimehq/Packages.git
[submodule "sublime/syntaxes/awk-sublime"]
path = sublime/syntaxes/awk-sublime
path = sublime/syntaxes/extra/awk-sublime
url = https://github.com/JohnNilsson/awk-sublime.git
[submodule "sublime/syntaxes/AsciiDoc"]
path = sublime/syntaxes/AsciiDoc
url = https://github.com/SublimeText/AsciiDoc.git
[submodule "sublime/syntaxes/Sublime-CMakeLists"]
path = sublime/syntaxes/Sublime-CMakeLists
path = sublime/syntaxes/extra/Sublime-CMakeLists
url = https://github.com/zyxar/Sublime-CMakeLists.git
[submodule "sublime/syntaxes/SublimeTextLinkerSyntax"]
path = sublime/syntaxes/SublimeTextLinkerSyntax
path = sublime/syntaxes/extra/SublimeTextLinkerSyntax
url = https://github.com/jbw3/SublimeTextLinkerSyntax
[submodule "sublime/syntaxes/Docker.tmbundle"]
path = sublime/syntaxes/Docker.tmbundle
path = sublime/syntaxes/extra/Docker.tmbundle
url = https://github.com/asbjornenge/Docker.tmbundle.git
[submodule "sublime/syntaxes/Sublime-VimL"]
path = sublime/syntaxes/Sublime-VimL
url = https://github.com/SalGnt/Sublime-VimL.git
[submodule "sublime/syntaxes/elixir-sublime-syntax"]
path = sublime/syntaxes/elixir-sublime-syntax
path = sublime/syntaxes/extra/elixir-sublime-syntax
url = https://github.com/princemaple/elixir-sublime-syntax.git
[submodule "sublime/syntaxes/SublimeElmLanguageSupport"]
path = sublime/syntaxes/SublimeElmLanguageSupport
path = sublime/syntaxes/extra/SublimeElmLanguageSupport
url = https://github.com/elm-community/SublimeElmLanguageSupport.git
[submodule "sublime/syntaxes/sublimetext-fsharp"]
path = sublime/syntaxes/sublimetext-fsharp
path = sublime/syntaxes/extra/sublimetext-fsharp
url = https://github.com/hoest/sublimetext-fsharp.git
[submodule "sublime/syntaxes/sublime-fish"]
path = sublime/syntaxes/sublime-fish
path = sublime/syntaxes/extra/sublime-fish
url = https://github.com/Phidica/sublime-fish.git
[submodule "sublime/syntaxes/SublimeFortran"]
path = sublime/syntaxes/SublimeFortran
path = sublime/syntaxes/extra/SublimeFortran
url = https://github.com/315234/SublimeFortran.git
[submodule "sublime/syntaxes/GraphQL-SublimeText3"]
path = sublime/syntaxes/GraphQL-SublimeText3
path = sublime/syntaxes/extra/GraphQL-SublimeText3
url = https://github.com/dncrews/GraphQL-SublimeText3.git
[submodule "sublime/syntaxes/Sublime-GenericConfig"]
path = sublime/syntaxes/Sublime-GenericConfig
path = sublime/syntaxes/extra/Sublime-GenericConfig
url = https://github.com/skozlovf/Sublime-GenericConfig.git
[submodule "sublime/syntaxes/sublime-jinja2"]
path = sublime/syntaxes/sublime-jinja2
path = sublime/syntaxes/extra/sublime-jinja2
url = https://github.com/Martin819/sublime-jinja2.git
[submodule "sublime/syntaxes/Julia-sublime"]
path = sublime/syntaxes/Julia-sublime
path = sublime/syntaxes/extra/Julia-sublime
url = https://github.com/JuliaEditorSupport/Julia-sublime.git
[submodule "sublime/syntaxes/LESS-sublime"]
path = sublime/syntaxes/LESS-sublime
path = sublime/syntaxes/extra/LESS-sublime
url = https://github.com/danro/LESS-sublime.git
[submodule "sublime/syntaxes/sublime-purescript-syntax"]
path = sublime/syntaxes/sublime-purescript-syntax
path = sublime/syntaxes/extra/sublime-purescript-syntax
url = https://github.com/tellnobody1/sublime-purescript-syntax.git
[submodule "sublime/syntaxes/SublimeSass"]
path = sublime/syntaxes/SublimeSass
path = sublime/syntaxes/extra/SublimeSass
url = https://github.com/braver/SublimeSass.git
[submodule "sublime/syntaxes/sublime_toml_highlighting"]
path = sublime/syntaxes/sublime_toml_highlighting
path = sublime/syntaxes/extra/sublime_toml_highlighting
url = https://github.com/jasonwilliams/sublime_toml_highlighting.git
[submodule "sublime/syntaxes/vue-syntax-highlight"]
path = sublime/syntaxes/vue-syntax-highlight
path = sublime/syntaxes/extra/vue-syntax-highlight
url = https://github.com/vuejs/vue-syntax-highlight.git
[submodule "sublime/syntaxes/sublime-glsl"]
path = sublime/syntaxes/sublime-glsl
path = sublime/syntaxes/extra/sublime-glsl
url = https://github.com/euler0/sublime-glsl.git
[submodule "sublime/syntaxes/GDScript-sublime"]
path = sublime/syntaxes/GDScript-sublime
path = sublime/syntaxes/extra/GDScript-sublime
url = https://github.com/beefsack/GDScript-sublime.git
[submodule "sublime/syntaxes/extra/sublime-clojure"]
path = sublime/syntaxes/extra/sublime-clojure
url = https://github.com/tonsky/sublime-clojure.git

View file

@ -1,5 +1,29 @@
# Changelog
## 0.13.0 (2021-01-09)
- Enable HTML minification
- Support `output_dir` in `config.toml`
- Allow sections to be drafted
- Allow specifying default language in filenames
- Render emoji in Markdown content if the `render_emoji` option is enabled
- Enable YouTube privacy mode for the YouTube shortcode
- Add language as class to the `<code>` block and as `data-lang`
- Add bibtex to `load_data`
- Add a `[markdown]` section to `config.toml` to configure rendering
- Add `highlight_code` and `highlight_theme` to a `[markdown]` section in `config.toml`
- Add `external_links_target_blank`, `external_links_no_follow` and `external_links_no_referrer`
- Add a `smart_punctuation` option in the `[markdown]` section in `config.toml` to turn elements like dots and dashes
into their typographic forms
- Add iteration count variable `nth` for shortcodes to know how many times a shortcode has been invoked in a given
content
- Update some highlighting syntaxes and the TS syntax will now be used instead of JS due to issues with it
- Remove `zola serve --watch-only`: since we build the HTML in memory and not on disk, it doesn't make sense anymore
- Update clojure syntax
- Prefer extra syntaxes to the default ones if we have a match for language
- Fix `zola serve` having issues with non-ascii paths
- 404 page now gets the site default language as `lang`
## 0.12.2 (2020-09-28)
- Fix `zola serve` being broken on reload

1013
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "zola"
version = "0.12.2"
version = "0.13.0"
authors = ["Vincent Prouillet <hello@vincentprouillet.com>"]
edition = "2018"
license = "MIT"
@ -27,15 +27,16 @@ termcolor = "1.0.4"
# Used in init to ensure the url given as base_url is a valid one
url = "2"
# Below is for the serve cmd
hyper = { version = "0.13", default-features = false, features = ["runtime"] }
hyper-staticfile = "0.5"
tokio = { version = "0.2", default-features = false, features = [] }
hyper = { version = "0.14.1", default-features = false, features = ["runtime", "server", "http2", "http1"] }
tokio = { version = "1.0.1", default-features = false, features = ["rt", "fs"] }
percent-encoding = "2"
notify = "4"
ws = "0.9"
ctrlc = "3"
open = "1.2"
globset = "0.4"
relative-path = "1"
serde_json = "1.0"
site = { path = "components/site" }
errors = { path = "components/errors" }

View file

@ -21,7 +21,7 @@ stages:
rustup_toolchain: stable
linux-pinned:
imageName: 'ubuntu-20.04'
rustup_toolchain: 1.43.0
rustup_toolchain: 1.45.2
pool:
vmImage: $(imageName)
steps:

View file

@ -7,6 +7,7 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::iter::FromIterator;
use std::path::Path;
use syntect::dumps::*;
use syntect::highlighting::ThemeSet;
use syntect::parsing::SyntaxSetBuilder;
@ -26,10 +27,24 @@ fn main() {
(Some(ref cmd), Some(ref package_dir), Some(ref packpath_newlines)) if cmd == "synpack" => {
let mut builder = SyntaxSetBuilder::new();
builder.add_plain_text_syntax();
match builder.add_from_folder(package_dir, true) {
let base_path = Path::new(&package_dir).to_path_buf();
// First the official Sublime packages
let mut default = base_path.clone();
default.push("Packages");
match builder.add_from_folder(&default, true) {
Ok(_) => (),
Err(e) => println!("Loading error: {:?}", e),
};
// and then the ones we add
let mut extra = base_path.clone();
extra.push("extra");
match builder.add_from_folder(&extra, true) {
Ok(_) => (),
Err(e) => println!("Loading error: {:?}", e),
};
let ss = builder.build();
dump_to_file(&ss, packpath_newlines).unwrap();
let mut syntaxes: HashMap<String, HashSet<String>> = HashMap::new();

View file

@ -0,0 +1,80 @@
use serde_derive::{Deserialize, Serialize};
use syntect::parsing::SyntaxSet;
pub const DEFAULT_HIGHLIGHT_THEME: &str = "base16-ocean-dark";
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(default)]
pub struct Markdown {
/// Whether to highlight all code blocks found in markdown files. Defaults to false
pub highlight_code: bool,
/// Which themes to use for code highlighting. See Readme for supported themes
/// Defaults to "base16-ocean-dark"
pub highlight_theme: String,
/// Whether to render emoji aliases (e.g.: :smile: => 😄) in the markdown files
pub render_emoji: bool,
/// Whether external links are to be opened in a new tab
/// If this is true, a `rel="noopener"` will always automatically be added for security reasons
pub external_links_target_blank: bool,
/// Whether to set rel="nofollow" for all external links
pub external_links_no_follow: bool,
/// Whether to set rel="noreferrer" for all external links
pub external_links_no_referrer: bool,
/// Whether smart punctuation is enabled (changing quotes, dashes, dots etc in their typographic form)
pub smart_punctuation: bool,
/// A list of directories to search for additional `.sublime-syntax` files in.
pub extra_syntaxes: Vec<String>,
/// The compiled extra syntaxes into a syntax set
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are need
pub extra_syntax_set: Option<SyntaxSet>,
}
impl Markdown {
pub fn has_external_link_tweaks(&self) -> bool {
self.external_links_target_blank
|| self.external_links_no_follow
|| self.external_links_no_referrer
}
pub fn construct_external_link_tag(&self, url: &str, title: &str) -> String {
let mut rel_opts = Vec::new();
let mut target = "".to_owned();
let title = if title == "" { "".to_owned() } else { format!("title=\"{}\" ", title) };
if self.external_links_target_blank {
// Security risk otherwise
rel_opts.push("noopener");
target = "target=\"_blank\" ".to_owned();
}
if self.external_links_no_follow {
rel_opts.push("nofollow");
}
if self.external_links_no_referrer {
rel_opts.push("noreferrer");
}
let rel = if rel_opts.is_empty() {
"".to_owned()
} else {
format!("rel=\"{}\" ", rel_opts.join(" "))
};
format!("<a {}{}{}href=\"{}\">", rel, target, title, url)
}
}
impl Default for Markdown {
fn default() -> Markdown {
Markdown {
highlight_code: false,
highlight_theme: DEFAULT_HIGHLIGHT_THEME.to_owned(),
render_emoji: false,
external_links_target_blank: false,
external_links_no_follow: false,
external_links_no_referrer: false,
smart_punctuation: false,
extra_syntaxes: vec![],
extra_syntax_set: None,
}
}
}

View file

@ -1,5 +1,6 @@
pub mod languages;
pub mod link_checker;
pub mod markup;
pub mod search;
pub mod slugify;
pub mod taxonomies;
@ -9,7 +10,7 @@ use std::path::{Path, PathBuf};
use globset::{Glob, GlobSet, GlobSetBuilder};
use serde_derive::{Deserialize, Serialize};
use syntect::parsing::{SyntaxSet, SyntaxSetBuilder};
use syntect::parsing::SyntaxSetBuilder;
use toml::Value as Toml;
use crate::highlighting::THEME_SET;
@ -55,10 +56,10 @@ pub struct Config {
translations: HashMap<String, languages::TranslateTerm>,
/// Whether to highlight all code blocks found in markdown files. Defaults to false
pub highlight_code: bool,
highlight_code: bool,
/// Which themes to use for code highlighting. See Readme for supported themes
/// Defaults to "base16-ocean-dark"
pub highlight_theme: String,
highlight_theme: String,
/// Whether to generate a feed. Defaults to false.
pub generate_feed: bool,
@ -92,9 +93,8 @@ pub struct Config {
/// A list of directories to search for additional `.sublime-syntax` files in.
pub extra_syntaxes: Vec<String>,
/// The compiled extra syntaxes into a syntax set
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are need
pub extra_syntax_set: Option<SyntaxSet>,
pub output_dir: String,
pub link_checker: link_checker::LinkChecker,
@ -104,6 +104,9 @@ pub struct Config {
/// The search config, telling what to include in the search index
pub search: search::Search,
/// The config for the Markdown rendering: syntax highlighting and everything
pub markdown: markup::Markdown,
/// All user params set in [extra] in the config
pub extra: HashMap<String, Toml>,
}
@ -153,8 +156,12 @@ impl Config {
}
}
// TODO: re-enable once it's a bit more tested
config.minify_html = false;
if config.highlight_code {
println!("`highlight_code` has been moved to a [markdown] section. Top level `highlight_code` and `highlight_theme` will stop working in 0.14.");
}
if !config.extra_syntaxes.is_empty() {
println!("`extra_syntaxes` has been moved to a [markdown] section. Top level `extra_syntaxes` will stop working in 0.14.");
}
Ok(config)
}
@ -170,17 +177,56 @@ impl Config {
Config::parse(&content)
}
/// Temporary, while we have the settings in 2 places
/// TODO: remove me in 0.14
pub fn highlight_code(&self) -> bool {
if !self.highlight_code && !self.markdown.highlight_code {
return false;
}
if self.highlight_code {
true
} else {
self.markdown.highlight_code
}
}
/// Temporary, while we have the settings in 2 places
/// TODO: remove me in 0.14
pub fn highlight_theme(&self) -> &str {
if self.highlight_theme != markup::DEFAULT_HIGHLIGHT_THEME {
&self.highlight_theme
} else {
&self.markdown.highlight_theme
}
}
/// TODO: remove me in 0.14
pub fn extra_syntaxes(&self) -> Vec<String> {
if !self.markdown.extra_syntaxes.is_empty() {
return self.markdown.extra_syntaxes.clone();
}
if !self.extra_syntaxes.is_empty() {
return self.extra_syntaxes.clone();
}
Vec::new()
}
/// Attempt to load any extra syntax found in the extra syntaxes of the config
/// TODO: move to markup.rs in 0.14
pub fn load_extra_syntaxes(&mut self, base_path: &Path) -> Result<()> {
if self.extra_syntaxes.is_empty() {
let extra_syntaxes = self.extra_syntaxes();
if extra_syntaxes.is_empty() {
return Ok(());
}
let mut ss = SyntaxSetBuilder::new();
for dir in &self.extra_syntaxes {
for dir in &extra_syntaxes {
ss.add_from_folder(base_path.join(dir), true)?;
}
self.extra_syntax_set = Some(ss.build());
self.markdown.extra_syntax_set = Some(ss.build());
Ok(())
}
@ -332,10 +378,11 @@ impl Default for Config {
ignored_content_globset: None,
translations: HashMap::new(),
extra_syntaxes: Vec::new(),
extra_syntax_set: None,
output_dir: "public".to_string(),
link_checker: link_checker::LinkChecker::default(),
slugify: slugify::Slugify::default(),
search: search::Search::default(),
markdown: markup::Markdown::default(),
extra: HashMap::new(),
}
}
@ -654,4 +701,27 @@ bar = "baz"
// We expect an error here
assert_eq!(false, config.add_theme_extra(&theme).is_ok());
}
#[test]
fn default_output_dir() {
let config = r#"
title = "My site"
base_url = "https://replace-this-with-your-url.com"
"#;
let config = Config::parse(config).unwrap();
assert_eq!(config.output_dir, "public".to_string());
}
#[test]
fn can_add_output_dir() {
let config = r#"
title = "My site"
base_url = "https://replace-this-with-your-url.com"
output_dir = "docs"
"#;
let config = Config::parse(config).unwrap();
assert_eq!(config.output_dir, "docs".to_string());
}
}

View file

@ -18,23 +18,24 @@ lazy_static! {
/// Returns the highlighter and whether it was found in the extra or not
pub fn get_highlighter(language: Option<&str>, config: &Config) -> (HighlightLines<'static>, bool) {
let theme = &THEME_SET.themes[&config.highlight_theme];
let theme = &THEME_SET.themes[config.highlight_theme()];
let mut in_extra = false;
if let Some(ref lang) = language {
let syntax = SYNTAX_SET
.find_syntax_by_token(lang)
.or_else(|| {
if let Some(ref extra) = config.extra_syntax_set {
let syntax = if let Some(ref extra) = config.markdown.extra_syntax_set {
let s = extra.find_syntax_by_token(lang);
if s.is_some() {
in_extra = true;
}
s
} else {
None
// The JS syntax hangs a lot... the TS syntax is probably better anyway.
// https://github.com/getzola/zola/issues/1241
// https://github.com/getzola/zola/issues/1211
// https://github.com/getzola/zola/issues/1174
let hacked_lang = if *lang == "js" || *lang == "javascript" { "ts" } else { lang };
SYNTAX_SET.find_syntax_by_token(hacked_lang)
}
})
.unwrap_or_else(|| SYNTAX_SET.find_syntax_plain_text());
(HighlightLines::new(syntax, theme), in_extra)
} else {

View file

@ -57,7 +57,7 @@ impl Error {
}
/// Create an error from a list of path collisions, formatting the output
pub fn from_collisions(collisions: Vec<(&str, Vec<String>)>) -> Self {
pub fn from_collisions(collisions: Vec<(String, Vec<String>)>) -> Self {
let mut msg = String::from("Found path collisions:\n");
for (path, filepaths) in collisions {

View file

@ -9,9 +9,17 @@ tera = "1"
chrono = "0.4"
serde = "1"
serde_derive = "1"
serde_yaml = "0.8"
toml = "0.5"
regex = "1"
lazy_static = "1"
errors = { path = "../errors" }
utils = { path = "../utils" }
[dev-dependencies]
# Remove from git patch when 1.0.1 is released
# https://github.com/frondeus/test-case/issues/62
# test-case = "1.0"
test-case = { git = "https://github.com/frondeus/test-case" }

View file

@ -3,7 +3,9 @@ use serde_derive::{Deserialize, Serialize};
use errors::{bail, Error, Result};
use regex::Regex;
use serde_yaml;
use std::path::Path;
use toml;
mod page;
mod section;
@ -12,8 +14,31 @@ pub use page::PageFrontMatter;
pub use section::SectionFrontMatter;
lazy_static! {
static ref PAGE_RE: Regex =
static ref TOML_RE: Regex =
Regex::new(r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
static ref YAML_RE: Regex =
Regex::new(r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---\r?\n?((?s).*(?-s))$").unwrap();
}
pub enum RawFrontMatter<'a> {
Toml(&'a str),
Yaml(&'a str),
}
impl RawFrontMatter<'_> {
fn deserialize<T>(&self) -> Result<T>
where
T: serde::de::DeserializeOwned,
{
let f: T = match self {
RawFrontMatter::Toml(s) => toml::from_str(s)?,
RawFrontMatter::Yaml(s) => match serde_yaml::from_str(s) {
Ok(d) => d,
Err(e) => bail!(format!("YAML deserialize error: {:?}", e)),
},
};
Ok(f)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
@ -37,20 +62,30 @@ pub enum InsertAnchor {
/// Split a file between the front matter and its content
/// Will return an error if the front matter wasn't found
fn split_content<'c>(file_path: &Path, content: &'c str) -> Result<(&'c str, &'c str)> {
if !PAGE_RE.is_match(content) {
fn split_content<'c>(file_path: &Path, content: &'c str) -> Result<(RawFrontMatter<'c>, &'c str)> {
let (re, is_toml) = if TOML_RE.is_match(content) {
(&TOML_RE as &Regex, true)
} else if YAML_RE.is_match(content) {
(&YAML_RE as &Regex, false)
} else {
bail!(
"Couldn't find front matter in `{}`. Did you forget to add `+++`?",
"Couldn't find front matter in `{}`. Did you forget to add `+++` or `---`?",
file_path.to_string_lossy()
);
}
};
// 2. extract the front matter and the content
let caps = PAGE_RE.captures(content).unwrap();
let caps = re.captures(content).unwrap();
// caps[0] is the full match
// caps[1] => front matter
// caps[2] => content
Ok((caps.get(1).unwrap().as_str(), caps.get(2).unwrap().as_str()))
let front_matter = caps.get(1).unwrap().as_str();
let content = caps.get(2).unwrap().as_str();
if is_toml {
Ok((RawFrontMatter::Toml(front_matter), content))
} else {
Ok((RawFrontMatter::Yaml(front_matter), content))
}
}
/// Split a file between the front matter and its content.
@ -88,71 +123,125 @@ pub fn split_page_content<'c>(
#[cfg(test)]
mod tests {
use std::path::Path;
use test_case::test_case;
use super::{split_page_content, split_section_content};
#[test]
fn can_split_page_content_valid() {
let content = r#"
#[test_case(r#"
+++
title = "Title"
description = "hey there"
date = 2002-10-12
+++
Hello
"#;
"#; "toml")]
#[test_case(r#"
---
title: Title
description: hey there
date: 2002-10-12
---
Hello
"#; "yaml")]
fn can_split_page_content_valid(content: &str) {
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
assert_eq!(content, "Hello\n");
assert_eq!(front_matter.title.unwrap(), "Title");
}
#[test]
fn can_split_section_content_valid() {
let content = r#"
#[test_case(r#"
+++
paginate_by = 10
+++
Hello
"#;
"#; "toml")]
#[test_case(r#"
---
paginate_by: 10
---
Hello
"#; "yaml")]
fn can_split_section_content_valid(content: &str) {
let (front_matter, content) = split_section_content(Path::new(""), content).unwrap();
assert_eq!(content, "Hello\n");
assert!(front_matter.is_paginated());
}
#[test]
fn can_split_content_with_only_frontmatter_valid() {
let content = r#"
#[test_case(r#"
+++
title = "Title"
description = "hey there"
date = 2002-10-12
+++"#;
+++"#; "toml")]
#[test_case(r#"
---
title: Title
description: hey there
date: 2002-10-12
---"#; "yaml")]
fn can_split_content_with_only_frontmatter_valid(content: &str) {
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
assert_eq!(content, "");
assert_eq!(front_matter.title.unwrap(), "Title");
}
#[test]
fn can_split_content_lazily() {
let content = r#"
#[test_case(r#"
+++
title = "Title"
description = "hey there"
date = 2002-10-02T15:00:00Z
+++
+++"#;
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
assert_eq!(content, "+++");
assert_eq!(front_matter.title.unwrap(), "Title");
}
#[test]
fn errors_if_cannot_locate_frontmatter() {
let content = r#"
+++"#, "+++"; "toml with pluses in content")]
#[test_case(r#"
+++
title = "Title"
description = "hey there"
date = 2002-10-12"#;
date = 2002-10-02T15:00:00Z
+++
---"#, "---"; "toml with minuses in content")]
#[test_case(r#"
---
title: Title
description: hey there
date: 2002-10-02T15:00:00Z
---
+++"#, "+++"; "yaml with pluses in content")]
#[test_case(r#"
---
title: Title
description: hey there
date: 2002-10-02T15:00:00Z
---
---"#, "---"; "yaml with minuses in content")]
fn can_split_content_lazily(content: &str, expected: &str) {
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
assert_eq!(content, expected);
assert_eq!(front_matter.title.unwrap(), "Title");
}
#[test_case(r#"
+++
title = "Title"
description = "hey there"
date = 2002-10-12"#; "toml")]
#[test_case(r#"
+++
title = "Title"
description = "hey there"
date = 2002-10-12
---"#; "toml unmatched")]
#[test_case(r#"
---
title: Title
description: hey there
date: 2002-10-12"#; "yaml")]
#[test_case(r#"
---
title: Title
description: hey there
date: 2002-10-12
+++"#; "yaml unmatched")]
fn errors_if_cannot_locate_frontmatter(content: &str) {
let res = split_page_content(Path::new(""), content);
assert!(res.is_err());
}

View file

@ -7,6 +7,8 @@ use tera::{Map, Value};
use errors::{bail, Result};
use utils::de::{fix_toml_dates, from_toml_datetime};
use crate::RawFrontMatter;
/// The front matter of every page
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(default)]
@ -69,11 +71,8 @@ fn parse_datetime(d: &str) -> Option<NaiveDateTime> {
}
impl PageFrontMatter {
pub fn parse(toml: &str) -> Result<PageFrontMatter> {
let mut f: PageFrontMatter = match toml::from_str(toml) {
Ok(d) => d,
Err(e) => bail!(e),
};
pub fn parse(raw: &RawFrontMatter) -> Result<PageFrontMatter> {
let mut f: PageFrontMatter = raw.deserialize()?;
if let Some(ref slug) = f.slug {
if slug == "" {
@ -140,21 +139,27 @@ impl Default for PageFrontMatter {
#[cfg(test)]
mod tests {
use super::PageFrontMatter;
use super::RawFrontMatter;
use tera::to_value;
use test_case::test_case;
#[test]
fn can_have_empty_front_matter() {
let content = r#" "#;
#[test_case(&RawFrontMatter::Toml(r#" "#); "toml")]
#[test_case(&RawFrontMatter::Toml(r#" "#); "yaml")]
fn can_have_empty_front_matter(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
println!("{:?}", res);
assert!(res.is_ok());
}
#[test]
fn can_parse_valid_front_matter() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there""#;
description = "hey there"
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
"#); "yaml")]
fn can_parse_valid_front_matter(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_ok());
let res = res.unwrap();
@ -162,160 +167,237 @@ mod tests {
assert_eq!(res.description.unwrap(), "hey there".to_string())
}
#[test]
fn errors_with_invalid_front_matter() {
let content = r#"title = 1\n"#;
#[test_case(&RawFrontMatter::Toml(r#"title = |\n"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"title: |\n"#); "yaml")]
fn errors_with_invalid_front_matter(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_err());
}
#[test]
fn errors_on_present_but_empty_slug() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
slug = """#;
slug = ""
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
slug: ""
"#); "yaml")]
fn errors_on_present_but_empty_slug(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_err());
}
#[test]
fn errors_on_present_but_empty_path() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
path = """#;
path = ""
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
path: ""
"#); "yaml")]
fn errors_on_present_but_empty_path(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_err());
}
#[test]
fn can_parse_date_yyyy_mm_dd() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2016-10-10
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2016-10-10
"#); "yaml")]
fn can_parse_date_yyyy_mm_dd(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.datetime.is_some());
}
#[test]
fn can_parse_date_rfc3339() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002-10-02T15:00:00Z
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002-10-02T15:00:00Z
"#); "yaml")]
fn can_parse_date_rfc3339(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.datetime.is_some());
}
#[test]
fn can_parse_date_rfc3339_without_timezone() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002-10-02T15:00:00
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002-10-02T15:00:00
"#); "yaml")]
fn can_parse_date_rfc3339_without_timezone(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.datetime.is_some());
}
#[test]
fn can_parse_date_rfc3339_with_space() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002-10-02 15:00:00+02:00
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002-10-02 15:00:00+02:00
"#); "yaml")]
fn can_parse_date_rfc3339_with_space(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.datetime.is_some());
}
#[test]
fn can_parse_date_rfc3339_with_space_without_timezone() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002-10-02 15:00:00
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002-10-02 15:00:00
"#); "yaml")]
fn can_parse_date_rfc3339_with_space_without_timezone(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.datetime.is_some());
}
#[test]
fn can_parse_date_rfc3339_with_microseconds() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002-10-02T15:00:00.123456Z
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002-10-02T15:00:00.123456Z
"#); "yaml")]
fn can_parse_date_rfc3339_with_microseconds(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.datetime.is_some());
}
#[test]
fn cannot_parse_random_date_format() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002/10/12"#;
date = 2002/10/12
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002/10/12
"#); "yaml")]
fn cannot_parse_random_date_format(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_err());
}
#[test]
fn cannot_parse_invalid_date_format() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = 2002-14-01"#;
date = 2002-14-01
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: 2002-14-01
"#); "yaml")]
fn cannot_parse_invalid_date_format(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_err());
}
#[test]
fn cannot_parse_date_as_string() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = "2002-14-01""#;
date = "2016-10-10"
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: "2016-10-10"
"#); "yaml")]
fn can_parse_valid_date_as_string(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content).unwrap();
assert!(res.date.is_some());
}
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
date = "2002-14-01"
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
date: "2002-14-01"
"#); "yaml")]
fn cannot_parse_invalid_date_as_string(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
assert!(res.is_err());
}
#[test]
fn can_parse_dates_in_extra() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
[extra]
some-date = 2002-14-01"#;
some-date = 2002-14-01
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
extra:
some-date: 2002-14-01
"#); "yaml")]
fn can_parse_dates_in_extra(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
println!("{:?}", res);
assert!(res.is_ok());
assert_eq!(res.unwrap().extra["some-date"], to_value("2002-14-01").unwrap());
}
#[test]
fn can_parse_nested_dates_in_extra() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
[extra.something]
some-date = 2002-14-01"#;
some-date = 2002-14-01
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
extra:
something:
some-date: 2002-14-01
"#); "yaml")]
fn can_parse_nested_dates_in_extra(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
println!("{:?}", res);
assert!(res.is_ok());
assert_eq!(res.unwrap().extra["something"]["some-date"], to_value("2002-14-01").unwrap());
}
#[test]
fn can_parse_fully_nested_dates_in_extra() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello"
description = "hey there"
@ -323,22 +405,43 @@ mod tests {
date_example = 2020-05-04
[[extra.questions]]
date = 2020-05-03
name = "Who is the prime minister of Uganda?""#;
name = "Who is the prime minister of Uganda?"
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello
description: hey there
extra:
date_example: 2020-05-04
questions:
- date: 2020-05-03
name: "Who is the prime minister of Uganda?"
"#); "yaml")]
fn can_parse_fully_nested_dates_in_extra(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
println!("{:?}", res);
assert!(res.is_ok());
assert_eq!(res.unwrap().extra["questions"][0]["date"], to_value("2020-05-03").unwrap());
}
#[test]
fn can_parse_taxonomies() {
let content = r#"
#[test_case(&RawFrontMatter::Toml(r#"
title = "Hello World"
[taxonomies]
tags = ["Rust", "JavaScript"]
categories = ["Dev"]
"#;
"#); "toml")]
#[test_case(&RawFrontMatter::Yaml(r#"
title: Hello World
taxonomies:
tags:
- Rust
- JavaScript
categories:
- Dev
"#); "yaml")]
fn can_parse_taxonomies(content: &RawFrontMatter) {
let res = PageFrontMatter::parse(content);
println!("{:?}", res);
assert!(res.is_ok());

View file

@ -2,9 +2,11 @@ use serde_derive::{Deserialize, Serialize};
use tera::{Map, Value};
use super::{InsertAnchor, SortBy};
use errors::{bail, Result};
use errors::Result;
use utils::de::fix_toml_dates;
use crate::RawFrontMatter;
static DEFAULT_PAGINATE_PATH: &str = "page";
/// The front matter of every section
@ -22,6 +24,8 @@ pub struct SectionFrontMatter {
/// Higher values means it will be at the end. Defaults to `0`
#[serde(skip_serializing)]
pub weight: usize,
/// whether the section is a draft
pub draft: bool,
/// Optional template, if we want to specify which template to render for that section
#[serde(skip_serializing)]
pub template: Option<String>,
@ -71,11 +75,8 @@ pub struct SectionFrontMatter {
}
impl SectionFrontMatter {
pub fn parse(toml: &str) -> Result<SectionFrontMatter> {
let mut f: SectionFrontMatter = match toml::from_str(toml) {
Ok(d) => d,
Err(e) => bail!(e),
};
pub fn parse(raw: &RawFrontMatter) -> Result<SectionFrontMatter> {
let mut f: SectionFrontMatter = raw.deserialize()?;
f.extra = match fix_toml_dates(f.extra) {
Value::Object(o) => o,
@ -114,6 +115,7 @@ impl Default for SectionFrontMatter {
aliases: Vec::new(),
generate_feed: false,
extra: Map::new(),
draft: false,
}
}
}

View file

@ -129,6 +129,11 @@ impl FileInfo {
// We can document that
let mut parts: Vec<String> = self.name.splitn(2, '.').map(|s| s.to_string()).collect();
// If language code is same as default language, go for default
if config.default_language == parts[1].as_str() {
return Ok(config.default_language.clone());
}
// The language code is not present in the config: typo or the user forgot to add it to the
// config
if !config.languages_codes().contains(&parts[1].as_ref()) {
@ -189,6 +194,19 @@ mod tests {
assert_eq!(res.unwrap(), "fr");
}
#[test]
fn can_find_valid_language_with_default_locale() {
let mut config = Config::default();
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
let mut file = FileInfo::new_page(
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"),
&PathBuf::new(),
);
let res = file.find_language(&config);
assert!(res.is_ok());
assert_eq!(res.unwrap(), config.default_language);
}
#[test]
fn can_find_valid_language_in_page_with_assets() {
let mut config = Config::default();

View file

@ -56,8 +56,6 @@ pub struct Section {
/// The language of that section. Equal to the default lang if the user doesn't setup `languages` in config.
/// Corresponds to the lang in the _index.{lang}.md file scheme
pub lang: String,
/// Contains all the translated version of that section
pub translations: Vec<DefaultKey>,
/// Contains the internal links that have an anchor: we can only check the anchor
/// after all pages have been built and their ToC compiled. The page itself should exist otherwise
/// it would have errored before getting there

View file

@ -1,5 +1,6 @@
//! What we are sending to the templates when rendering them
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use serde_derive::Serialize;
@ -24,7 +25,13 @@ impl<'a> TranslatedContent<'a> {
pub fn find_all_sections(section: &'a Section, library: &'a Library) -> Vec<Self> {
let mut translations = vec![];
for key in &section.translations {
for key in library
.translations
.get(&section.file.canonical)
.or(Some(&HashSet::new()))
.unwrap()
.iter()
{
let other = library.get_section_by_key(*key);
translations.push(TranslatedContent {
lang: &other.lang,
@ -40,7 +47,9 @@ impl<'a> TranslatedContent<'a> {
pub fn find_all_pages(page: &'a Page, library: &'a Library) -> Vec<Self> {
let mut translations = vec![];
for key in &page.translations {
for key in
library.translations.get(&page.file.canonical).or(Some(&HashSet::new())).unwrap().iter()
{
let other = library.get_page_by_key(*key);
translations.push(TranslatedContent {
lang: &other.lang,

View file

@ -41,6 +41,12 @@ pub struct Library {
pub paths_to_sections: HashMap<PathBuf, DefaultKey>,
/// Whether we need to look for translations
is_multilingual: bool,
// aliases -> files,
// so we can easily check for conflicts
pub reverse_aliases: HashMap<String, HashSet<String>>,
pub translations: HashMap<PathBuf, HashSet<DefaultKey>>,
}
impl Library {
@ -51,22 +57,52 @@ impl Library {
paths_to_pages: HashMap::with_capacity(cap_pages),
paths_to_sections: HashMap::with_capacity(cap_sections),
is_multilingual,
reverse_aliases: HashMap::new(),
translations: HashMap::new(),
}
}
fn insert_reverse_aliases(&mut self, entries: Vec<String>, file_rel_path: &str) {
for entry in entries {
self.reverse_aliases
.entry(entry)
.and_modify(|s| {
s.insert(file_rel_path.to_owned());
})
.or_insert_with(|| {
let mut s = HashSet::new();
s.insert(file_rel_path.to_owned());
s
});
}
}
/// Add a section and return its Key
pub fn insert_section(&mut self, section: Section) -> DefaultKey {
let path = section.file.path.clone();
let file_path = section.file.path.clone();
let rel_path = section.path.clone();
let mut entries = vec![rel_path.clone()];
entries.extend(section.meta.aliases.iter().map(|a| a.clone()).collect::<Vec<String>>());
self.insert_reverse_aliases(entries, &section.file.relative);
let key = self.sections.insert(section);
self.paths_to_sections.insert(path, key);
self.paths_to_sections.insert(file_path, key);
key
}
/// Add a page and return its Key
pub fn insert_page(&mut self, page: Page) -> DefaultKey {
let path = page.file.path.clone();
let file_path = page.file.path.clone();
let rel_path = page.path.clone();
let mut entries = vec![rel_path.clone()];
entries.extend(page.meta.aliases.iter().map(|a| a.clone()).collect::<Vec<String>>());
self.insert_reverse_aliases(entries, &page.file.relative);
let key = self.pages.insert(page);
self.paths_to_pages.insert(path, key);
self.paths_to_pages.insert(file_path, key);
key
}
@ -103,7 +139,7 @@ impl Library {
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
let mut subsections: HashMap<PathBuf, Vec<_>> = HashMap::new();
for section in self.sections.values_mut() {
for (key, section) in self.sections.iter_mut() {
// Make sure the pages of a section are empty since we can call that many times on `serve`
section.pages = vec![];
section.ignored_pages = vec![];
@ -112,7 +148,7 @@ impl Library {
subsections
// Using the original filename to work for multi-lingual sections
.entry(grand_parent.join(&section.file.filename))
.or_insert_with(|| vec![])
.or_insert_with(Vec::new)
.push(section.file.path.clone());
}
@ -139,6 +175,16 @@ impl Library {
}
}
ancestors.insert(section.file.path.clone(), parents);
// populate translations if necessary
if self.is_multilingual {
self.translations
.entry(section.file.canonical.clone())
.and_modify(|trans| {
trans.insert(key);
})
.or_insert(set![key]);
};
}
for (key, page) in &mut self.pages {
@ -157,7 +203,7 @@ impl Library {
parent_is_transparent = section.meta.transparent;
}
page.ancestors =
ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
ancestors.get(&parent_section_path).cloned().unwrap_or_else(Vec::new);
// Don't forget to push the actual parent
page.ancestors.push(*section_key);
@ -184,9 +230,18 @@ impl Library {
None => break,
}
}
// populate translations if necessary
if self.is_multilingual {
self.translations
.entry(page.file.canonical.clone())
.and_modify(|trans| {
trans.insert(key);
})
.or_insert(set![key]);
};
}
self.populate_translations();
self.sort_sections_pages();
let sections = self.paths_to_sections.clone();
@ -201,8 +256,7 @@ impl Library {
children.sort_by(|a, b| sections_weight[a].cmp(&sections_weight[b]));
section.subsections = children;
}
section.ancestors =
ancestors.get(&section.file.path).cloned().unwrap_or_else(|| vec![]);
section.ancestors = ancestors.get(&section.file.path).cloned().unwrap_or_else(Vec::new);
}
}
@ -276,51 +330,6 @@ impl Library {
}
}
/// Finds all the translations for each section/page and set the `translations`
/// field of each as needed
/// A no-op for sites without multiple languages
fn populate_translations(&mut self) {
if !self.is_multilingual {
return;
}
// Sections first
let mut sections_translations = HashMap::new();
for (key, section) in &self.sections {
sections_translations
.entry(section.file.canonical.clone()) // TODO: avoid this clone
.or_insert_with(Vec::new)
.push(key);
}
for (key, section) in self.sections.iter_mut() {
let translations = &sections_translations[&section.file.canonical];
if translations.len() == 1 {
section.translations = vec![];
continue;
}
section.translations = translations.iter().filter(|k| **k != key).cloned().collect();
}
// Same thing for pages
let mut pages_translations = HashMap::new();
for (key, page) in &self.pages {
pages_translations
.entry(page.file.canonical.clone()) // TODO: avoid this clone
.or_insert_with(Vec::new)
.push(key);
}
for (key, page) in self.pages.iter_mut() {
let translations = &pages_translations[&page.file.canonical];
if translations.len() == 1 {
page.translations = vec![];
continue;
}
page.translations = translations.iter().filter(|k| **k != key).cloned().collect();
}
}
/// Find all the orphan pages: pages that are in a folder without an `_index.md`
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
let pages_in_sections =
@ -415,56 +424,17 @@ impl Library {
/// This will check every section/page paths + the aliases and ensure none of them
/// are colliding.
/// Returns (path colliding, [list of files causing that collision])
pub fn check_for_path_collisions(&self) -> Vec<(&str, Vec<String>)> {
let mut paths: HashMap<&str, HashSet<DefaultKey>> = HashMap::new();
for (key, page) in &self.pages {
paths
.entry(&page.path)
.and_modify(|s| {
s.insert(key);
})
.or_insert_with(|| set!(key));
for alias in &page.meta.aliases {
paths
.entry(&alias)
.and_modify(|s| {
s.insert(key);
})
.or_insert_with(|| set!(key));
}
}
for (key, section) in &self.sections {
if !section.meta.render {
continue;
}
paths
.entry(&section.path)
.and_modify(|s| {
s.insert(key);
})
.or_insert_with(|| set!(key));
}
let mut collisions = vec![];
for (p, keys) in paths {
if keys.len() > 1 {
let file_paths: Vec<String> = keys
pub fn check_for_path_collisions(&self) -> Vec<(String, Vec<String>)> {
self.reverse_aliases
.iter()
.map(|k| {
self.pages.get(*k).map(|p| p.file.relative.clone()).unwrap_or_else(|| {
self.sections.get(*k).map(|s| s.file.relative.clone()).unwrap()
})
})
.collect();
collisions.push((p, file_paths));
.filter_map(|(alias, files)| {
if files.len() > 1 {
Some((alias.clone(), files.clone().into_iter().collect::<Vec<_>>()))
} else {
None
}
}
collisions
})
.collect()
}
}

View file

@ -237,7 +237,7 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
.get_mut(&taxo_key)
.unwrap()
.entry(term.to_string())
.or_insert_with(|| vec![])
.or_insert_with(Vec::new)
.push(key);
}
} else {

View file

@ -11,9 +11,9 @@ config = { path = "../config" }
errors = { path = "../errors" }
[dependencies.reqwest]
version = "0.10"
version = "0.11"
default-features = false
features = ["blocking", "rustls-tls"]
[dev-dependencies]
mockito = "0.27"
mockito = "0.28"

View file

@ -15,6 +15,7 @@ pest = "2"
pest_derive = "2"
regex = "1"
lazy_static = "1"
gh-emoji = "1.0"
errors = { path = "../errors" }
front_matter = { path = "../front_matter" }

View file

@ -17,12 +17,12 @@ Lorem markdownum litora, care ponto nomina, et ut aspicit gelidas sui et
purpureo genuit. Tamen colla venientis [delphina](http://nil-sol.com/ecquis)
Tusci et temptata citaeque curam isto ubi vult vulnere reppulit.
- Seque vidit flendoque de quodam
- Dabit minimos deiecto caputque noctis pluma
- Leti coniunx est Helicen
- Illius pulvereumque Icare inpositos
- Vivunt pereo pluvio tot ramos Olenios gelidis
- Quater teretes natura inde
- :one: Seque vidit flendoque de quodam
- :two: Dabit minimos deiecto caputque noctis pluma
- :three: Leti coniunx est Helicen
- :four: Illius pulvereumque Icare inpositos
- :five: Vivunt pereo pluvio tot ramos Olenios gelidis
- :six: Quater teretes natura inde
### A subsection
@ -35,7 +35,7 @@ granum captantur potuisse Minervae, frugum.
> Clivo sub inprovisoque nostrum minus fama est, discordia patrem petebat precatur
absumitur, poena per sit. Foramina *tamen cupidine* memor supplex tollentes
dictum unam orbem, Anubis caecae. Viderat formosior tegebat satis, Aethiopasque
sit submisso coniuge tristis ubi!
sit submisso coniuge tristis ubi! :exclamation:
## Praeceps Corinthus totidem quem crus vultum cape
@ -68,7 +68,7 @@ And a shortcode:
### Another subsection
Gotta make the toc do a little bit of work
# A big title
# A big title :fire:
- hello
- world
@ -96,7 +96,7 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) {
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = false;
config.markdown.highlight_code = false;
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
b.iter(|| render_content(CONTENT, &context).unwrap());
}
@ -106,7 +106,7 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) {
let tera = Tera::default();
let content2 = CONTENT.replace(r#"{{ youtube(id="my_youtube_id") }}"#, "");
let mut config = Config::default();
config.highlight_code = false;
config.markdown.highlight_code = false;
let permalinks_ctx = HashMap::new();
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
@ -123,3 +123,16 @@ fn bench_render_shortcodes_one_present(b: &mut test::Bencher) {
b.iter(|| render_shortcodes(CONTENT, &context));
}
#[bench]
fn bench_render_content_no_shortcode_with_emoji(b: &mut test::Bencher) {
let tera = Tera::default();
let content2 = CONTENT.replace(r#"{{ youtube(id="my_youtube_id") }}"#, "");
let mut config = Config::default();
config.markdown.highlight_code = false;
config.markdown.render_emoji = true;
let permalinks_ctx = HashMap::new();
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
b.iter(|| render_content(&content2, &context).unwrap());
}

View file

@ -1,3 +1,4 @@
use std::borrow::Cow;
use std::collections::HashMap;
use config::Config;
@ -7,11 +8,11 @@ use tera::{Context, Tera};
/// All the information from the zola site that is needed to render HTML from markdown
#[derive(Debug)]
pub struct RenderContext<'a> {
pub tera: &'a Tera,
pub tera: Cow<'a, Tera>,
pub config: &'a Config,
pub tera_context: Context,
pub current_page_permalink: &'a str,
pub permalinks: &'a HashMap<String, String>,
pub permalinks: Cow<'a, HashMap<String, String>>,
pub insert_anchor: InsertAnchor,
}
@ -25,13 +26,25 @@ impl<'a> RenderContext<'a> {
) -> RenderContext<'a> {
let mut tera_context = Context::new();
tera_context.insert("config", config);
RenderContext {
tera,
Self {
tera: Cow::Borrowed(tera),
tera_context,
current_page_permalink,
permalinks,
permalinks: Cow::Borrowed(permalinks),
insert_anchor,
config,
}
}
// In use in the markdown filter
pub fn from_config(config: &'a Config) -> RenderContext<'a> {
Self {
tera: Cow::Owned(Tera::default()),
tera_context: Context::new(),
current_page_permalink: "",
permalinks: Cow::Owned(HashMap::new()),
insert_anchor: InsertAnchor::None,
config,
}
}
}

View file

@ -13,7 +13,6 @@ use utils::slugs::slugify_anchors;
use utils::vec::InsertMany;
use self::cmark::{Event, LinkType, Options, Parser, Tag};
use pulldown_cmark::CodeBlockKind;
mod codeblock;
mod fence;
@ -101,17 +100,12 @@ fn fix_link(
return Ok(link.to_string());
}
// TODO: remove me in a few versions when people have upgraded
if link.starts_with("./") && link.contains(".md") {
println!("It looks like the link `{}` is using the previous syntax for internal links: start with @/ instead", link);
}
// A few situations here:
// - it could be a relative link (starting with `@/`)
// - it could be a link to a co-located asset
// - it could be a normal link
let result = if link.starts_with("@/") {
match resolve_internal_link(&link, context.permalinks) {
match resolve_internal_link(&link, &context.permalinks) {
Ok(resolved) => {
if resolved.anchor.is_some() {
internal_links_with_anchors
@ -168,6 +162,10 @@ fn get_heading_refs(events: &[Event]) -> Vec<HeadingRef> {
}
pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Rendered> {
lazy_static! {
static ref EMOJI_REPLACER: gh_emoji::Replacer = gh_emoji::Replacer::new();
}
// the rendered html
let mut html = String::with_capacity(content.len());
// Set while parsing
@ -188,6 +186,10 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
opts.insert(Options::ENABLE_STRIKETHROUGH);
opts.insert(Options::ENABLE_TASKLISTS);
if context.config.markdown.smart_punctuation {
opts.insert(Options::ENABLE_SMART_PUNCTUATION);
}
{
let mut events = Parser::new_ext(content, opts)
.map(|event| {
@ -197,20 +199,38 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
if let Some(ref mut code_block) = highlighter {
let html = code_block.highlight(&text);
Event::Html(html.into())
} else if context.config.markdown.render_emoji {
let processed_text = EMOJI_REPLACER.replace_all(&text);
Event::Text(processed_text.to_string().into())
} else {
// Business as usual
Event::Text(text)
}
}
Event::Start(Tag::CodeBlock(ref kind)) => {
if !context.config.highlight_code {
let language = match kind {
cmark::CodeBlockKind::Fenced(fence_info) => {
let fence_info = fence::FenceSettings::new(fence_info);
fence_info.language
}
_ => None,
};
if !context.config.highlight_code() {
if let Some(lang) = language {
let html = format!(
r#"<pre><code class="language-{}" data-lang="{}">"#,
lang, lang
);
return Event::Html(html.into());
}
return Event::Html("<pre><code>".into());
}
let theme = &THEME_SET.themes[&context.config.highlight_theme];
let theme = &THEME_SET.themes[context.config.highlight_theme()];
match kind {
CodeBlockKind::Indented => (),
CodeBlockKind::Fenced(fence_info) => {
cmark::CodeBlockKind::Indented => (),
cmark::CodeBlockKind::Fenced(fence_info) => {
// This selects the background color the same way that
// start_coloured_html_snippet does
let color = theme
@ -227,11 +247,18 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
};
let snippet = start_highlighted_html_snippet(theme);
let mut html = snippet.0;
if let Some(lang) = language {
html.push_str(&format!(
r#"<code class="language-{}" data-lang="{}">"#,
lang, lang
));
} else {
html.push_str("<code>");
}
Event::Html(html.into())
}
Event::End(Tag::CodeBlock(_)) => {
if !context.config.highlight_code {
if !context.config.highlight_code() {
return Event::Html("</code></pre>\n".into());
}
// reset highlight and close the code block
@ -264,15 +291,29 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
return Event::Html("".into());
}
};
if is_external_link(&link)
&& context.config.markdown.has_external_link_tweaks()
{
let mut escaped = String::new();
// write_str can fail but here there are no reasons it should (afaik?)
cmark::escape::escape_href(&mut escaped, &link)
.expect("Could not write to buffer");
Event::Html(
context
.config
.markdown
.construct_external_link_tag(&escaped, &title)
.into(),
)
} else {
Event::Start(Tag::Link(link_type, fixed_link.into(), title))
}
}
Event::Html(ref markup) => {
if markup.contains("<!-- more -->") {
has_summary = true;
Event::Html(CONTINUE_READING.into())
} else {
if in_html_block && markup.contains("</pre>") {
} else if in_html_block && markup.contains("</pre>") {
in_html_block = false;
Event::Html(markup.replacen("</pre>", "", 1).into())
} else if markup.contains("pre data-shortcode") {
@ -288,7 +329,6 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
event
}
}
}
_ => event,
}
})
@ -348,7 +388,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
let anchor_link = utils::templates::render_template(
&ANCHOR_LINK_TEMPLATE,
context.tera,
&context.tera,
c,
&None,
)

View file

@ -24,12 +24,12 @@ pub struct CodeBlock<'config> {
impl<'config> CodeBlock<'config> {
pub fn new(fence_info: &str, config: &'config Config, background: IncludeBackground) -> Self {
let fence_info = FenceSettings::new(fence_info);
let theme = &THEME_SET.themes[&config.highlight_theme];
let theme = &THEME_SET.themes[config.highlight_theme()];
let (highlighter, in_extra) = get_highlighter(fence_info.language, config);
Self {
highlighter,
extra_syntax_set: match in_extra {
true => config.extra_syntax_set.as_ref(),
true => config.markdown.extra_syntax_set.as_ref(),
false => None,
},
background,

View file

@ -3,6 +3,7 @@ use pest::iterators::Pair;
use pest::Parser;
use pest_derive::Parser;
use regex::Regex;
use std::collections::HashMap;
use tera::{to_value, Context, Map, Value};
use crate::context::RenderContext;
@ -102,6 +103,7 @@ fn render_shortcode(
name: &str,
args: &Map<String, Value>,
context: &RenderContext,
invocation_count: u32,
body: Option<&str>,
) -> Result<String> {
let mut tera_context = Context::new();
@ -112,6 +114,7 @@ fn render_shortcode(
// Trimming right to avoid most shortcodes with bodies ending up with a HTML new line
tera_context.insert("body", b.trim_end());
}
tera_context.insert("nth", &invocation_count);
tera_context.extend(context.tera_context.clone());
let mut template_name = format!("shortcodes/{}.md", name);
@ -139,6 +142,12 @@ fn render_shortcode(
pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<String> {
let mut res = String::with_capacity(content.len());
let mut invocation_map: HashMap<String, u32> = HashMap::new();
let mut get_invocation_count = |name: &str| {
let invocation_number = invocation_map.entry(String::from(name)).or_insert(0);
*invocation_number += 1;
*invocation_number
};
let mut pairs = match ContentParser::parse(Rule::page, content) {
Ok(p) => p,
@ -184,7 +193,13 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
Rule::text => res.push_str(p.as_span().as_str()),
Rule::inline_shortcode => {
let (name, args) = parse_shortcode_call(p);
res.push_str(&render_shortcode(&name, &args, context, None)?);
res.push_str(&render_shortcode(
&name,
&args,
context,
get_invocation_count(&name),
None,
)?);
}
Rule::shortcode_with_body => {
let mut inner = p.into_inner();
@ -192,7 +207,13 @@ pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<Strin
// we don't care about the closing tag
let (name, args) = parse_shortcode_call(inner.next().unwrap());
let body = inner.next().unwrap().as_span().as_str();
res.push_str(&render_shortcode(&name, &args, context, Some(body))?);
res.push_str(&render_shortcode(
&name,
&args,
context,
get_invocation_count(&name),
Some(body),
)?);
}
Rule::ignored_inline_shortcode => {
res.push_str(

View file

@ -37,7 +37,7 @@ fn hl_lines_simple() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -66,7 +66,7 @@ fn hl_lines_in_middle() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -95,7 +95,7 @@ fn hl_lines_all() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -122,7 +122,7 @@ fn hl_lines_start_from_one() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -150,7 +150,7 @@ fn hl_lines_start_from_zero() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -178,7 +178,7 @@ fn hl_lines_end() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -206,7 +206,7 @@ fn hl_lines_end_out_of_bounds() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -234,7 +234,7 @@ fn hl_lines_overlap() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -261,7 +261,7 @@ fn hl_lines_multiple() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -289,7 +289,7 @@ fn hl_lines_extra_spaces() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -317,7 +317,7 @@ fn hl_lines_int_and_range() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -346,7 +346,7 @@ fn hl_lines_single_line_range() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"
@ -375,7 +375,7 @@ fn hl_lines_reverse_range() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(
r#"

View file

@ -23,7 +23,7 @@ fn doesnt_highlight_code_block_with_highlighting_off() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = false;
config.markdown.highlight_code = false;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("```\n$ gutenberg server\n```", &context).unwrap();
assert_eq!(res.body, "<pre><code>$ gutenberg server\n</code></pre>\n");
@ -34,7 +34,7 @@ fn can_highlight_code_block_no_lang() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("```\n$ gutenberg server\n$ ping\n```", &context).unwrap();
assert_eq!(
@ -48,12 +48,12 @@ fn can_highlight_code_block_with_lang() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("```python\nlist.append(1)\n```", &context).unwrap();
assert_eq!(
res.body,
"<pre style=\"background-color:#2b303b;\">\n<code><span style=\"color:#c0c5ce;\">list.</span><span style=\"color:#bf616a;\">append</span><span style=\"color:#c0c5ce;\">(</span><span style=\"color:#d08770;\">1</span><span style=\"color:#c0c5ce;\">)\n</span></code></pre>"
"<pre style=\"background-color:#2b303b;\">\n<code class=\"language-python\" data-lang=\"python\"><span style=\"color:#c0c5ce;\">list.</span><span style=\"color:#bf616a;\">append</span><span style=\"color:#c0c5ce;\">(</span><span style=\"color:#d08770;\">1</span><span style=\"color:#c0c5ce;\">)\n</span></code></pre>"
);
}
@ -62,13 +62,13 @@ fn can_higlight_code_block_with_unknown_lang() {
let tera_ctx = Tera::default();
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = true;
config.markdown.highlight_code = true;
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("```yolo\nlist.append(1)\n```", &context).unwrap();
// defaults to plain text
assert_eq!(
res.body,
"<pre style=\"background-color:#2b303b;\">\n<code><span style=\"color:#c0c5ce;\">list.append(1)\n</span></code></pre>"
"<pre style=\"background-color:#2b303b;\">\n<code class=\"language-yolo\" data-lang=\"yolo\"><span style=\"color:#c0c5ce;\">list.append(1)\n</span></code></pre>"
);
}
@ -87,7 +87,9 @@ Hello
)
.unwrap();
assert!(res.body.contains("<p>Hello</p>\n<div >"));
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#));
assert!(res
.body
.contains(r#"<iframe src="https://www.youtube-nocookie.com/embed/ub36ffWAqgQ""#));
}
#[test]
@ -99,7 +101,7 @@ fn can_render_shortcode_with_markdown_char_in_args_name() {
for i in input {
let res =
render_content(&format!("{{{{ youtube(id=\"hey\", {}=1) }}}}", i), &context).unwrap();
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/hey""#));
assert!(res.body.contains(r#"<iframe src="https://www.youtube-nocookie.com/embed/hey""#));
}
}
@ -119,7 +121,7 @@ fn can_render_shortcode_with_markdown_char_in_args_value() {
let res = render_content(&format!("{{{{ youtube(id=\"{}\") }}}}", i), &context).unwrap();
assert!(res
.body
.contains(&format!(r#"<iframe src="https://www.youtube.com/embed/{}""#, i)));
.contains(&format!(r#"<iframe src="https://www.youtube-nocookie.com/embed/{}""#, i)));
}
}
@ -232,10 +234,12 @@ Hello
)
.unwrap();
assert!(res.body.contains("<p>Hello</p>\n<div >"));
assert!(res.body.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ""#));
assert!(res
.body
.contains(r#"<iframe src="https://www.youtube.com/embed/ub36ffWAqgQ?autoplay=1""#));
.contains(r#"<iframe src="https://www.youtube-nocookie.com/embed/ub36ffWAqgQ""#));
assert!(res.body.contains(
r#"<iframe src="https://www.youtube-nocookie.com/embed/ub36ffWAqgQ?autoplay=1""#
));
assert!(res.body.contains(r#"<iframe src="https://www.streamable.com/e/c0ic""#));
assert!(res.body.contains(r#"//player.vimeo.com/video/210073083""#));
}
@ -244,7 +248,7 @@ Hello
fn doesnt_render_ignored_shortcodes() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.highlight_code = false;
config.markdown.highlight_code = false;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(r#"```{{/* youtube(id="w7Ft2ymGmfc") */}}```"#, &context).unwrap();
assert_eq!(res.body, "<p><code>{{ youtube(id=&quot;w7Ft2ymGmfc&quot;) }}</code></p>\n");
@ -1004,7 +1008,6 @@ fn can_render_commented_out_shortcodes_fine() {
assert_eq!(res.body, expected);
}
// https://zola.discourse.group/t/zola-12-issue-with-continue-reading/590/7
#[test]
fn can_render_read_more_after_shortcode() {
@ -1036,3 +1039,120 @@ Again more text"#;
let res = render_content(markdown_string, &context).unwrap();
assert_eq!(res.body, expected);
}
#[test]
fn can_render_emoji_alias() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.markdown.render_emoji = true;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("Hello, World! :smile:", &context).unwrap();
assert_eq!(res.body, "<p>Hello, World! 😄</p>\n");
}
#[test]
fn emoji_aliases_are_ignored_when_disabled_in_config() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("Hello, World! :smile:", &context).unwrap();
assert_eq!(res.body, "<p>Hello, World! :smile:</p>\n");
}
#[test]
fn invocation_count_increments_in_shortcode() {
let permalinks_ctx = HashMap::new();
let mut tera = Tera::default();
tera.extend(&ZOLA_TERA).unwrap();
let shortcode_template_a = r#"<p>a: {{ nth }}</p>"#;
let shortcode_template_b = r#"<p>b: {{ nth }}</p>"#;
let markdown_string = r#"{{ a() }}
{{ b() }}
{{ a() }}
{{ b() }}
"#;
let expected = r#"<p>a: 1</p>
<p>b: 1</p>
<p>a: 2</p>
<p>b: 2</p>
"#;
tera.add_raw_template("shortcodes/a.html", shortcode_template_a).unwrap();
tera.add_raw_template("shortcodes/b.html", shortcode_template_b).unwrap();
let config = Config::default();
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(markdown_string, &context).unwrap();
assert_eq!(res.body, expected);
}
#[test]
fn basic_external_links_unchanged() {
let permalinks_ctx = HashMap::new();
let config = Config::default();
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("<https://google.com>", &context).unwrap();
assert_eq!(res.body, "<p><a href=\"https://google.com\">https://google.com</a></p>\n");
}
#[test]
fn can_set_target_blank_for_external_link() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.markdown.external_links_target_blank = true;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("<https://google.com>", &context).unwrap();
assert_eq!(res.body, "<p><a rel=\"noopener\" target=\"_blank\" href=\"https://google.com\">https://google.com</a></p>\n");
}
#[test]
fn can_set_nofollow_for_external_link() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.markdown.external_links_no_follow = true;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
// Testing href escaping while we're there
let res = render_content("<https://google.com/éllo>", &context).unwrap();
assert_eq!(
res.body,
"<p><a rel=\"nofollow\" href=\"https://google.com/%C3%A9llo\">https://google.com/éllo</a></p>\n"
);
}
#[test]
fn can_set_noreferrer_for_external_link() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.markdown.external_links_no_referrer = true;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("<https://google.com>", &context).unwrap();
assert_eq!(
res.body,
"<p><a rel=\"noreferrer\" href=\"https://google.com\">https://google.com</a></p>\n"
);
}
#[test]
fn can_set_all_options_for_external_link() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.markdown.external_links_target_blank = true;
config.markdown.external_links_no_follow = true;
config.markdown.external_links_no_referrer = true;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content("<https://google.com>", &context).unwrap();
assert_eq!(res.body, "<p><a rel=\"noopener nofollow noreferrer\" target=\"_blank\" href=\"https://google.com\">https://google.com</a></p>\n");
}
#[test]
fn can_use_smart_punctuation() {
let permalinks_ctx = HashMap::new();
let mut config = Config::default();
config.markdown.smart_punctuation = true;
let context = RenderContext::new(&ZOLA_TERA, &config, "", &permalinks_ctx, InsertAnchor::None);
let res = render_content(r#"This -- is "it"..."#, &context).unwrap();
assert_eq!(res.body, "<p>This is “it”…</p>\n");
}

View file

@ -1,6 +1,6 @@
/**
* elasticlunr - http://weixsong.github.io
* Lightweight full-text search engine in Javascript for browser search and offline search. - 0.9.5
* Lightweight full-text search engine in Javascript for browser search and offline search. - 0.9.6
*
* Copyright (C) 2017 Oliver Nightingale
* Copyright (C) 2017 Wei Song

View file

@ -8,13 +8,14 @@ include = ["src/**/*"]
[dependencies]
tera = "1"
glob = "0.3"
minify-html = "0.3.8"
walkdir = "2"
rayon = "1"
serde = "1"
serde_derive = "1"
sass-rs = "0.2"
lazy_static = "1.1"
relative-path = "1"
slotmap = "0.4"
errors = { path = "../errors" }
config = { path = "../config" }

View file

@ -71,7 +71,7 @@ fn bench_render_paginated(b: &mut test::Bencher) {
let section = library.sections_values()[0];
let paginator = Paginator::from_section(&section, &library);
b.iter(|| site.render_paginated(public, &paginator));
b.iter(|| site.render_paginated(Vec::new(), &paginator));
}
#[bench]

View file

@ -9,21 +9,22 @@ use std::fs::remove_dir_all;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, RwLock};
use glob::glob;
use lazy_static::lazy_static;
use minify_html::{with_friendly_error, Cfg};
use rayon::prelude::*;
use tera::{Context, Tera};
use walkdir::{DirEntry, WalkDir};
use config::{get_config, Config};
use errors::{bail, Error, Result};
use front_matter::InsertAnchor;
use library::{find_taxonomies, Library, Page, Paginator, Section, Taxonomy};
use relative_path::RelativePathBuf;
use std::time::Instant;
use templates::render_redirect_template;
use utils::fs::{
copy_directory, copy_file_if_needed, create_directory, create_file, ensure_directory_exists,
};
use utils::minify;
use utils::net::get_available_port;
use utils::templates::render_template;
@ -85,7 +86,7 @@ impl Site {
let static_path = path.join("static");
let imageproc =
imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);
let output_path = path.join("public");
let output_path = path.join(config.output_dir.clone());
let site = Site {
base_path: path.to_path_buf(),
@ -166,72 +167,114 @@ impl Site {
/// out of them
pub fn load(&mut self) -> Result<()> {
let base_path = self.base_path.to_string_lossy().replace("\\", "/");
let content_glob = format!("{}/{}", base_path, "content/**/*.md");
let (section_entries, page_entries): (Vec<_>, Vec<_>) = glob(&content_glob)
.expect("Invalid glob")
.filter_map(|e| e.ok())
.filter(|e| !e.as_path().file_name().unwrap().to_str().unwrap().starts_with('.'))
.partition(|entry| {
entry.as_path().file_name().unwrap().to_str().unwrap().starts_with("_index.")
});
self.library = Arc::new(RwLock::new(Library::new(0, 0, self.config.is_multilingual())));
let mut pages_insert_anchors = HashMap::new();
self.library = Arc::new(RwLock::new(Library::new(
page_entries.len(),
section_entries.len(),
self.config.is_multilingual(),
)));
// not the most elegant loop, but this is necessary to use skip_current_dir
// which we can only decide to use after we've deserialised the section
// so it's kinda necessecary
let mut dir_walker = WalkDir::new(format!("{}/{}", base_path, "content/")).into_iter();
let mut allowed_index_filenames: Vec<_> =
self.config.languages.iter().map(|l| format!("_index.{}.md", l.code)).collect();
allowed_index_filenames.push("_index.md".to_string());
let sections = {
let config = &self.config;
section_entries
.into_par_iter()
.map(|entry| {
let path = entry.as_path();
Section::from_file(path, config, &self.base_path)
})
.collect::<Vec<_>>()
loop {
let entry: DirEntry = match dir_walker.next() {
None => break,
Some(Err(_)) => continue,
Some(Ok(entry)) => entry,
};
let path = entry.path();
let file_name = match path.file_name() {
None => continue,
Some(name) => name.to_str().unwrap(),
};
let pages = {
let config = &self.config;
page_entries
.into_par_iter()
.filter(|entry| match &config.ignored_content_globset {
Some(gs) => !gs.is_match(entry.as_path()),
None => true,
})
.map(|entry| {
let path = entry.as_path();
Page::from_file(path, config, &self.base_path)
})
.collect::<Vec<_>>()
};
// Kinda duplicated code for add_section/add_page but necessary to do it that
// way because of the borrow checker
for section in sections {
let s = section?;
self.add_section(s, false)?;
// ignore excluded content
match &self.config.ignored_content_globset {
Some(gs) => {
if gs.is_match(path) {
continue;
}
}
self.create_default_index_sections()?;
None => (),
}
let mut pages_insert_anchors = HashMap::new();
for page in pages {
let p = page?;
// Should draft pages be ignored?
if p.meta.draft && !self.include_drafts {
// we process a section when we encounter the dir
// so we can process it before any of the pages
// therefore we should skip the actual file to avoid duplication
if file_name.starts_with("_index.") {
continue;
}
// skip hidden files and non md files
if !path.is_dir() && (!file_name.ends_with(".md") || file_name.starts_with('.')) {
continue;
}
// is it a section or not?
if path.is_dir() {
// if we are processing a section we have to collect
// index files for all languages and process them simultaniously
// before any of the pages
let index_files = WalkDir::new(&path)
.max_depth(1)
.into_iter()
.filter_map(|e| match e {
Err(_) => None,
Ok(f) => {
let path_str = f.path().file_name().unwrap().to_str().unwrap();
if f.path().is_file()
&& allowed_index_filenames.iter().find(|&s| *s == path_str).is_some()
{
Some(f)
} else {
// https://github.com/getzola/zola/issues/1244
if path_str.starts_with("_index.") {
println!("Expected a section filename, got `{}`. Allowed values: `{:?}`", path_str, &allowed_index_filenames);
}
None
}
}
})
.collect::<Vec<DirEntry>>();
for index_file in index_files {
let section = match Section::from_file(
index_file.path(),
&self.config,
&self.base_path,
) {
Err(_) => continue,
Ok(sec) => sec,
};
// if the section is drafted we can skip the enitre dir
if section.meta.draft && !self.include_drafts {
dir_walker.skip_current_dir();
continue;
}
self.add_section(section, false)?;
}
} else {
let page = Page::from_file(path, &self.config, &self.base_path)
.expect("error deserialising page");
// should we skip drafts?
if page.meta.draft && !self.include_drafts {
continue;
}
pages_insert_anchors.insert(
p.file.path.clone(),
self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang),
page.file.path.clone(),
self.find_parent_section_insert_anchor(&page.file.parent.clone(), &page.lang),
);
self.add_page(p, false)?;
self.add_page(page, false)?;
}
}
self.create_default_index_sections()?;
{
let library = self.library.read().unwrap();
@ -447,26 +490,6 @@ impl Site {
html
}
/// Minifies html content
fn minify(&self, html: String) -> Result<String> {
let cfg = &Cfg { minify_js: false };
let mut input_bytes = html.as_bytes().to_vec();
match with_friendly_error(&mut input_bytes, cfg) {
Ok(_len) => match std::str::from_utf8(&input_bytes) {
Ok(result) => Ok(result.to_string()),
Err(err) => bail!("Failed to convert bytes to string : {}", err),
},
Err(minify_error) => {
bail!(
"Failed to truncate html at character {}: {} \n {}",
minify_error.position,
minify_error.message,
minify_error.code_context
);
}
}
}
/// Copy the main `static` folder and the theme `static` folder if a theme is used
pub fn copy_static_directories(&self) -> Result<()> {
// The user files will overwrite the theme files
@ -538,7 +561,7 @@ impl Site {
let final_content = if !filename.ends_with("html") || !self.config.minify_html {
content
} else {
match self.minify(content) {
match minify::html(content) {
Ok(minified_content) => minified_content,
Err(error) => bail!(error),
}
@ -587,32 +610,41 @@ impl Site {
/// Deletes the `public` directory (only for `zola build`) and builds the site
pub fn build(&self) -> Result<()> {
let mut start = Instant::now();
// Do not clean on `zola serve` otherwise we end up copying assets all the time
if self.build_mode == BuildMode::Disk {
self.clean()?;
}
start = log_time(start, "Cleaned folder");
// Generate/move all assets before rendering any content
if let Some(ref theme) = self.config.theme {
let theme_path = self.base_path.join("themes").join(theme);
if theme_path.join("sass").exists() {
sass::compile_sass(&theme_path, &self.output_path)?;
start = log_time(start, "Compiled theme Sass");
}
}
if self.config.compile_sass {
sass::compile_sass(&self.base_path, &self.output_path)?;
start = log_time(start, "Compiled own Sass");
}
if self.config.build_search_index {
self.build_search_index()?;
start = log_time(start, "Built search index");
}
// Render aliases first to allow overwriting
self.render_aliases()?;
start = log_time(start, "Rendered aliases");
self.render_sections()?;
start = log_time(start, "Rendered sections");
self.render_orphan_pages()?;
start = log_time(start, "Rendered orphan pages");
self.render_sitemap()?;
start = log_time(start, "Rendered sitemap");
let library = self.library.read().unwrap();
if self.config.generate_feed {
@ -628,6 +660,7 @@ impl Site {
library.pages_values()
};
self.render_feed(pages, None, &self.config.default_language, |c| c)?;
start = log_time(start, "Generated feed in default language");
}
for lang in &self.config.languages {
@ -637,16 +670,22 @@ impl Site {
let pages =
library.pages_values().iter().filter(|p| p.lang == lang.code).cloned().collect();
self.render_feed(pages, Some(&PathBuf::from(lang.code.clone())), &lang.code, |c| c)?;
start = log_time(start, "Generated feed in other language");
}
self.render_404()?;
start = log_time(start, "Rendered 404");
self.render_robots()?;
start = log_time(start, "Rendered robots.txt");
self.render_taxonomies()?;
start = log_time(start, "Rendered taxonomies");
// We process images at the end as we might have picked up images to process from markdown
// or from templates
self.process_images()?;
start = log_time(start, "Processed images");
// Processed images will be in static so the last step is to copy it
self.copy_static_directories()?;
log_time(start, "Copied static dir");
Ok(())
}
@ -731,6 +770,7 @@ impl Site {
ensure_directory_exists(&self.output_path)?;
let mut context = Context::new();
context.insert("config", &self.config);
context.insert("lang", &self.config.default_language);
let output = render_template("404.html", &self.tera, context, &self.config.theme)?;
let content = self.inject_livereload(output);
self.write_content(&[], "404.html", content, false)?;
@ -1056,3 +1096,12 @@ impl Site {
.collect::<Result<()>>()
}
}
fn log_time(start: Instant, message: &str) -> Instant {
let do_print = std::env::var("ZOLA_PERF_LOG").is_ok();
let now = Instant::now();
if do_print {
println!("{} took {}ms", message, now.duration_since(start).as_millis());
}
now
}

View file

@ -5,7 +5,7 @@ use tera::Tera;
use crate::Site;
use config::Config;
use errors::{bail, Error, Result};
use templates::{global_fns, ZOLA_TERA};
use templates::{filters, global_fns, ZOLA_TERA};
use utils::templates::rewrite_theme_paths;
pub fn load_tera(path: &Path, config: &Config) -> Result<Tera> {
@ -50,6 +50,8 @@ pub fn load_tera(path: &Path, config: &Config) -> Result<Tera> {
/// Adds global fns that are to be available to shortcodes while rendering markdown
pub fn register_early_global_fns(site: &mut Site) {
site.tera.register_filter("markdown", filters::MarkdownFilter::new(site.config.clone()));
site.tera.register_function(
"get_url",
global_fns::GetUrl::new(

View file

@ -177,6 +177,9 @@ fn can_build_site_without_live_reload() {
assert!(file_exists!(public, "nested_sass/sass.css"));
assert!(file_exists!(public, "nested_sass/scss.css"));
assert!(!file_exists!(public, "secret_section/index.html"));
assert!(!file_exists!(public, "secret_section/page.html"));
assert!(!file_exists!(public, "secret_section/secret_sub_section/hello.html"));
// no live reload code
assert_eq!(
file_contains!(public, "index.html", "/livereload.js?port=1112&amp;mindelay=10"),
@ -210,7 +213,7 @@ fn can_build_site_without_live_reload() {
#[test]
fn can_build_site_with_live_reload_and_drafts() {
let (_, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| {
let (site, _tmp_dir, public) = build_site_with_setup("test_site", |mut site| {
site.enable_live_reload(1000);
site.include_drafts();
(site, true)
@ -254,6 +257,15 @@ fn can_build_site_with_live_reload_and_drafts() {
// Drafts are included
assert!(file_exists!(public, "posts/draft/index.html"));
assert!(file_contains!(public, "sitemap.xml", "draft"));
// drafted sections are included
let library = site.library.read().unwrap();
assert_eq!(library.sections().len(), 14);
assert!(file_exists!(public, "secret_section/index.html"));
assert!(file_exists!(public, "secret_section/draft-page/index.html"));
assert!(file_exists!(public, "secret_section/page/index.html"));
assert!(file_exists!(public, "secret_section/secret_sub_section/hello/index.html"));
}
#[test]
@ -654,11 +666,7 @@ fn can_build_with_extra_syntaxes() {
assert!(&public.exists());
assert!(file_exists!(public, "posts/extra-syntax/index.html"));
assert!(file_contains!(
public,
"posts/extra-syntax/index.html",
r#"<span style="color:#d08770;">test</span>"#
));
assert!(file_contains!(public, "posts/extra-syntax/index.html", r#"<span style="color:"#));
}
#[test]

View file

@ -6,27 +6,28 @@ edition = "2018"
[dependencies]
tera = "1"
base64 = "0.12"
base64 = "0.13"
lazy_static = "1"
pulldown-cmark = { version = "0.8", default-features = false }
toml = "0.5"
csv = "1"
image = "0.23"
serde_json = "1.0"
sha2 = "0.9"
url = "2"
nom-bibtex = "0.3"
svg_metadata = "0.4.1"
errors = { path = "../errors" }
utils = { path = "../utils" }
library = { path = "../library" }
config = { path = "../config" }
imageproc = { path = "../imageproc" }
svg_metadata = "0.4.1"
rendering = { path = "../rendering" }
[dependencies.reqwest]
version = "0.10"
version = "0.11"
default-features = false
features = ["blocking", "rustls-tls"]
[dev-dependencies]
mockito = "0.27"
mockito = "0.28"

View file

@ -1,2 +1,3 @@
User-agent: *
Allow: /
Sitemap: {{ get_url(path="sitemap.xml") }}

View file

@ -1,3 +1,3 @@
<div {% if class %}class="{{class}}"{% endif %}>
<iframe src="https://www.youtube.com/embed/{{id}}{% if autoplay %}?autoplay=1{% endif %}" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
<iframe src="https://www.youtube-nocookie.com/embed/{{id}}{% if autoplay %}?autoplay=1{% endif %}" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
</div>

View file

@ -2,28 +2,33 @@ use std::collections::HashMap;
use std::hash::BuildHasher;
use base64::{decode, encode};
use pulldown_cmark as cmark;
use tera::{to_value, try_get_value, Result as TeraResult, Value};
use config::Config;
use rendering::{render_content, RenderContext};
use tera::{to_value, try_get_value, Filter as TeraFilter, Result as TeraResult, Value};
pub fn markdown<S: BuildHasher>(
value: &Value,
args: &HashMap<String, Value, S>,
) -> TeraResult<Value> {
#[derive(Debug)]
pub struct MarkdownFilter {
config: Config,
}
impl MarkdownFilter {
pub fn new(config: Config) -> Self {
Self { config }
}
}
impl TeraFilter for MarkdownFilter {
fn filter(&self, value: &Value, args: &HashMap<String, Value>) -> TeraResult<Value> {
let context = RenderContext::from_config(&self.config);
let s = try_get_value!("markdown", "value", String, value);
let inline = match args.get("inline") {
Some(val) => try_get_value!("markdown", "inline", bool, val),
None => false,
};
let mut opts = cmark::Options::empty();
opts.insert(cmark::Options::ENABLE_TABLES);
opts.insert(cmark::Options::ENABLE_FOOTNOTES);
opts.insert(cmark::Options::ENABLE_STRIKETHROUGH);
opts.insert(cmark::Options::ENABLE_TASKLISTS);
let mut html = String::new();
let parser = cmark::Parser::new_ext(&s, opts);
cmark::html::push_html(&mut html, parser);
let mut html = match render_content(&s, &context) {
Ok(res) => res.body,
Err(e) => return Err(format!("Failed to render markdown filter: {:?}", e).into()),
};
if inline {
html = html
@ -35,6 +40,7 @@ pub fn markdown<S: BuildHasher>(
Ok(to_value(&html).unwrap())
}
}
pub fn base64_encode<S: BuildHasher>(
value: &Value,
@ -56,22 +62,24 @@ pub fn base64_decode<S: BuildHasher>(
mod tests {
use std::collections::HashMap;
use tera::to_value;
use tera::{to_value, Filter};
use super::{base64_decode, base64_encode, markdown};
use super::{base64_decode, base64_encode, MarkdownFilter};
use config::Config;
#[test]
fn markdown_filter() {
let result = markdown(&to_value(&"# Hey").unwrap(), &HashMap::new());
let result = MarkdownFilter::new(Config::default())
.filter(&to_value(&"# Hey").unwrap(), &HashMap::new());
assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(&"<h1>Hey</h1>\n").unwrap());
assert_eq!(result.unwrap(), to_value(&"<h1 id=\"hey\">Hey</h1>\n").unwrap());
}
#[test]
fn markdown_filter_inline() {
let mut args = HashMap::new();
args.insert("inline".to_string(), to_value(true).unwrap());
let result = markdown(
let result = MarkdownFilter::new(Config::default()).filter(
&to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
&args,
);
@ -84,7 +92,7 @@ mod tests {
fn markdown_filter_inline_tables() {
let mut args = HashMap::new();
args.insert("inline".to_string(), to_value(true).unwrap());
let result = markdown(
let result = MarkdownFilter::new(Config::default()).filter(
&to_value(
&r#"
|id|author_id| timestamp_created|title |content |
@ -100,6 +108,26 @@ mod tests {
assert!(result.unwrap().as_str().unwrap().contains("<table>"));
}
#[test]
fn markdown_filter_use_config_options() {
let mut config = Config::default();
config.markdown.highlight_code = true;
config.markdown.smart_punctuation = true;
config.markdown.render_emoji = true;
config.markdown.external_links_target_blank = true;
let md = "Hello <https://google.com> :smile: ...";
let result =
MarkdownFilter::new(config.clone()).filter(&to_value(&md).unwrap(), &HashMap::new());
assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(&"<p>Hello <a rel=\"noopener\" target=\"_blank\" href=\"https://google.com\">https://google.com</a> 😄 …</p>\n").unwrap());
let md = "```py\ni=0\n```";
let result = MarkdownFilter::new(config).filter(&to_value(&md).unwrap(), &HashMap::new());
assert!(result.is_ok());
assert!(result.unwrap().as_str().unwrap().contains("<pre style"));
}
#[test]
fn base64_encode_filter() {
// from https://tools.ietf.org/html/rfc4648#section-10

View file

@ -28,6 +28,7 @@ enum OutputFormat {
Toml,
Json,
Csv,
Bibtex,
Plain,
}
@ -51,6 +52,7 @@ impl FromStr for OutputFormat {
"toml" => Ok(OutputFormat::Toml),
"csv" => Ok(OutputFormat::Csv),
"json" => Ok(OutputFormat::Json),
"bibtex" => Ok(OutputFormat::Bibtex),
"plain" => Ok(OutputFormat::Plain),
format => Err(format!("Unknown output format {}", format).into()),
}
@ -63,6 +65,7 @@ impl OutputFormat {
OutputFormat::Json => "application/json",
OutputFormat::Csv => "text/csv",
OutputFormat::Toml => "application/toml",
OutputFormat::Bibtex => "application/x-bibtex",
OutputFormat::Plain => "text/plain",
})
}
@ -148,7 +151,7 @@ fn get_output_format_from_args(
let format_arg = optional_arg!(
String,
args.get("format"),
"`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml, plain)"
"`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml, bibtex, plain)"
);
if let Some(format) = format_arg {
@ -165,11 +168,11 @@ fn get_output_format_from_args(
};
// Always default to Plain if we don't know what it is
OutputFormat::from_str(from_extension).or_else(|_| Ok(OutputFormat::Plain))
OutputFormat::from_str(from_extension).or(Ok(OutputFormat::Plain))
}
/// A Tera function to load data from a file or from a URL
/// Currently the supported formats are json, toml, csv and plain text
/// Currently the supported formats are json, toml, csv, bibtex and plain text
#[derive(Debug)]
pub struct LoadData {
base_path: PathBuf,
@ -223,6 +226,7 @@ impl TeraFn for LoadData {
OutputFormat::Toml => load_toml(data),
OutputFormat::Csv => load_csv(data),
OutputFormat::Json => load_json(data),
OutputFormat::Bibtex => load_bibtex(data),
OutputFormat::Plain => to_value(data).map_err(|e| e.into()),
};
@ -252,6 +256,47 @@ fn load_toml(toml_data: String) -> Result<Value> {
}
}
/// Parse a BIBTEX string and convert it to a Tera Value
fn load_bibtex(bibtex_data: String) -> Result<Value> {
let bibtex_model = nom_bibtex::Bibtex::parse(&bibtex_data).map_err(|e| format!("{:?}", e))?;
let mut bibtex_map = Map::new();
let preambles_array =
bibtex_model.preambles().iter().map(|v| Value::String(v.to_string())).collect();
bibtex_map.insert(String::from("preambles"), Value::Array(preambles_array));
let comments_array =
bibtex_model.comments().iter().map(|v| Value::String(v.to_string())).collect();
bibtex_map.insert(String::from("comments"), Value::Array(comments_array));
let mut variables_map = Map::new();
for (key, val) in bibtex_model.variables() {
variables_map.insert(key.to_string(), Value::String(val.to_string()));
}
bibtex_map.insert(String::from("variables"), Value::Object(variables_map));
let bibliographies_array = bibtex_model
.bibliographies()
.iter()
.map(|b| {
let mut m = Map::new();
m.insert(String::from("entry_type"), Value::String(b.entry_type().to_string()));
m.insert(String::from("citation_key"), Value::String(b.citation_key().to_string()));
let mut tags = Map::new();
for (key, val) in b.tags() {
tags.insert(key.to_lowercase().to_string(), Value::String(val.to_string()));
}
m.insert(String::from("tags"), Value::Object(tags));
Value::Object(m)
})
.collect();
bibtex_map.insert(String::from("bibliographies"), Value::Array(bibliographies_array));
let bibtex_value: Value = Value::Object(bibtex_map);
to_value(bibtex_value).map_err(|err| err.into())
}
/// Parse a CSV string and convert it to a Tera Value
///
/// An example csv file `example.csv` could be:

View file

@ -39,7 +39,7 @@ impl TeraFn for Trans {
let term = self
.config
.get_translation(lang, key)
.map_err(|e| Error::chain("Failed to retreive term translation", e))?;
.map_err(|e| Error::chain("Failed to retrieve term translation", e))?;
Ok(to_value(term).unwrap())
}
@ -331,7 +331,7 @@ impl GetTaxonomyUrl {
}
taxonomies.insert(format!("{}-{}", taxo.kind.name, taxo.kind.lang), items);
}
Self { taxonomies, default_lang: default_lang.to_string(), slugify: slugify }
Self { taxonomies, default_lang: default_lang.to_string(), slugify }
}
}
impl TeraFn for GetTaxonomyUrl {
@ -735,7 +735,7 @@ title = "A title"
let config = Config::parse(TRANS_CONFIG).unwrap();
let error = Trans::new(config).call(&args).unwrap_err();
assert_eq!("Failed to retreive term translation", format!("{}", error));
assert_eq!("Failed to retrieve term translation", format!("{}", error));
}
#[test]
@ -746,7 +746,7 @@ title = "A title"
let config = Config::parse(TRANS_CONFIG).unwrap();
let error = Trans::new(config).call(&args).unwrap_err();
assert_eq!("Failed to retreive term translation", format!("{}", error));
assert_eq!("Failed to retrieve term translation", format!("{}", error));
}
#[test]

View file

@ -36,7 +36,6 @@ lazy_static! {
("internal/alias.html", include_str!("builtins/internal/alias.html")),
])
.unwrap();
tera.register_filter("markdown", filters::markdown);
tera.register_filter("base64_encode", filters::base64_encode);
tera.register_filter("base64_decode", filters::base64_decode);
tera

View file

@ -15,6 +15,7 @@ serde_derive = "1"
slug = "0.1"
percent-encoding = "2"
filetime = "0.2.12"
minify-html = "0.4"
errors = { path = "../errors" }

View file

@ -1,12 +1,32 @@
use serde::{Deserialize, Deserializer};
use serde_derive::Deserialize;
use tera::{Map, Value};
/// Used as an attribute when we want to convert from TOML to a string date
/// If a TOML datetime isn't present, it will accept a string and push it through
/// TOML's date time parser to ensure only valid dates are accepted.
/// Inspired by this proposal: https://github.com/alexcrichton/toml-rs/issues/269
pub fn from_toml_datetime<'de, D>(deserializer: D) -> Result<Option<String>, D::Error>
where
D: Deserializer<'de>,
{
toml::value::Datetime::deserialize(deserializer).map(|s| Some(s.to_string()))
use serde::de::Error;
use std::str::FromStr;
#[derive(Deserialize)]
#[serde(untagged)]
enum MaybeDatetime {
Datetime(toml::value::Datetime),
String(String),
}
match MaybeDatetime::deserialize(deserializer)? {
MaybeDatetime::Datetime(d) => Ok(Some(d.to_string())),
MaybeDatetime::String(s) => match toml::value::Datetime::from_str(&s) {
Ok(d) => Ok(Some(d.to_string())),
Err(e) => Err(D::Error::custom(e)),
},
}
}
/// Returns key/value for a converted date from TOML.

View file

@ -20,8 +20,8 @@ pub fn is_path_in_directory(parent: &Path, path: &Path) -> Result<bool> {
/// Create a file with the content given
pub fn create_file(path: &Path, content: &str) -> Result<()> {
let mut file =
File::create(&path).map_err(|e| Error::chain(format!("Failed to create {:?}", path), e))?;
let mut file = File::create(&path)
.map_err(|e| Error::chain(format!("Failed to create file {}", path.display()), e))?;
file.write_all(content.as_bytes())?;
Ok(())
}
@ -62,7 +62,7 @@ pub fn read_file(path: &Path) -> Result<String> {
/// Return the content of a file, with error handling added.
/// The default error message is overwritten by the message given.
/// That means it is allocation 2 strings, oh well
/// That means it is allocating 2 strings, oh well
pub fn read_file_with_error(path: &Path, message: &str) -> Result<String> {
let res = read_file(&path);
if res.is_ok() {
@ -101,7 +101,9 @@ pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: boo
let target_path = dest.join(relative_path);
if let Some(parent_directory) = target_path.parent() {
create_dir_all(parent_directory)?;
create_dir_all(parent_directory).map_err(|e| {
Error::chain(format!("Was not able to create folder {}", parent_directory.display()), e)
})?;
}
copy_file_if_needed(src, &target_path, hard_link)
@ -113,7 +115,9 @@ pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: boo
/// 3. Its filesize is identical to that of the src file.
pub fn copy_file_if_needed(src: &Path, dest: &PathBuf, hard_link: bool) -> Result<()> {
if let Some(parent_directory) = dest.parent() {
create_dir_all(parent_directory)?;
create_dir_all(parent_directory).map_err(|e| {
Error::chain(format!("Was not able to create folder {}", parent_directory.display()), e)
})?;
}
if hard_link {
@ -125,11 +129,25 @@ pub fn copy_file_if_needed(src: &Path, dest: &PathBuf, hard_link: bool) -> Resul
let target_metadata = metadata(&dest)?;
let target_mtime = FileTime::from_last_modification_time(&target_metadata);
if !(src_mtime == target_mtime && src_metadata.len() == target_metadata.len()) {
copy(src, &dest)?;
copy(src, &dest).map_err(|e| {
Error::chain(
format!(
"Was not able to copy file {} to {}",
src.display(),
dest.display()
),
e,
)
})?;
set_file_mtime(&dest, src_mtime)?;
}
} else {
copy(src, &dest)?;
copy(src, &dest).map_err(|e| {
Error::chain(
format!("Was not able to copy file {} to {}", src.display(), dest.display()),
e,
)
})?;
set_file_mtime(&dest, src_mtime)?;
}
}
@ -146,7 +164,16 @@ pub fn copy_directory(src: &PathBuf, dest: &PathBuf, hard_link: bool) -> Result<
create_directory(&target_path)?;
}
} else {
copy_file(entry.path(), dest, src, hard_link)?;
copy_file(entry.path(), dest, src, hard_link).map_err(|e| {
Error::chain(
format!(
"Was not able to copy file {} to {}",
entry.path().display(),
dest.display()
),
e,
)
})?;
}
}
Ok(())

View file

@ -1,5 +1,6 @@
pub mod de;
pub mod fs;
pub mod minify;
pub mod net;
pub mod site;
pub mod slugs;

View file

@ -0,0 +1,50 @@
use errors::{bail, Result};
use minify_html::{with_friendly_error, Cfg};
pub fn html(html: String) -> Result<String> {
let cfg = &Cfg { minify_js: false, minify_css: false };
let mut input_bytes = html.as_bytes().to_vec();
match with_friendly_error(&mut input_bytes, cfg) {
Ok(len) => match std::str::from_utf8(&input_bytes) {
Ok(result) => Ok(result[..len].to_string()),
Err(err) => bail!("Failed to convert bytes to string : {}", err),
},
Err(minify_error) => {
bail!(
"Failed to truncate html at character {}: {} \n {}",
minify_error.position,
minify_error.message,
minify_error.code_context
);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
// https://github.com/getzola/zola/issues/1292
#[test]
fn can_minify_html() {
let input = r#"
<!doctype html>
<html>
<head>
<meta charset="utf-8">
</head>
<body>
<p>Example blog post</p>
FOO BAR
</body>
</html>
"#;
let expected = r#"<!doctype html><html><head><meta charset=utf-8><body><p>Example blog post</p> FOO BAR"#;
let res = html(input.to_owned()).unwrap();
assert_eq!(res, expected);
}
}

View file

@ -33,8 +33,8 @@ uses the filename to detect the language:
- `content/an-article.md`: this will be the default language
- `content/an-article.fr.md`: this will be in French
If the language code in the filename does not correspond to one of the languages configured,
an error will be shown.
If the language code in the filename does not correspond to one of the languages or
the default language configured, an error will be shown.
If your default language has an `_index.md` in a directory, you will need to add an `_index.{code}.md`
file with the desired front-matter options as there is no language fallback.

View file

@ -79,6 +79,9 @@ by triple pluses (`+++`).
Although none of the front matter variables are mandatory, the opening and closing `+++` are required.
Note that even though the use of TOML is encouraged, YAML front matter is also supported to ease porting
legacy content. In this case the embedded metadata must be enclosed by triple minuses (`---`).
Here is an example page with all the available variables. The values provided below are the
default values.

View file

@ -18,6 +18,9 @@ Any non-Markdown file in a section directory is added to the `assets` collection
[content overview](@/documentation/content/overview.md#asset-colocation). These files are then available in the
Markdown file using relative links.
## Drafting
Just like pages sections can be drafted by setting the `draft` option in the front matter. By default this is not done. When a section is drafted it's descendants like pages, subsections and assets will not be processed unless the `--drafts` flag is passed. Note that even pages that don't have a `draft` status will not be processed if one of their parent sections is drafted.
## Front matter
The `_index.md` file within a directory defines the content and metadata for that section. To set
@ -30,6 +33,9 @@ to your templates through the `section.content` variable.
Although none of the front matter variables are mandatory, the opening and closing `+++` are required.
Note that even though the use of TOML is encouraged, YAML front matter is also supported to ease porting
legacy content. In this case the embedded metadata must be enclosed by triple minuses (`---`).
Here is an example `_index.md` with all the available variables. The values provided below are the
default values.
@ -39,6 +45,9 @@ title = ""
description = ""
# A draft section is only loaded if the `--drafts` flag is passed to `zola build`, `zola serve` or `zola check`.
draft = false
# Used to sort pages by "date", "weight" or "none". See below for more information.
sort_by = "none"

View file

@ -134,6 +134,24 @@ If you want to have some content that looks like a shortcode but not have Zola t
you will need to escape it by using `{%/*` and `*/%}` instead of `{%` and `%}`. You won't need to escape
anything else until the closing tag.
### Invocation Count
Every shortcode context is passed in a variable named `nth` that tracks how many times a particular shortcode has
been invoked in a Markdown file. Given a shortcode `true_statement.html` template:
```jinja2
<p id="number{{ nth }}">{{ value }} is equal to {{ nth }}.</p>
```
It could be used in our Markdown as follows:
```md
{{/* true_statement(value=1) */}}
{{/* true_statement(value=2) */}}
```
This is useful when implementing custom markup for features such as sidenotes or end notes.
## Built-in shortcodes
Zola comes with a few built-in shortcodes. If you want to override a default shortcode template,

View file

@ -35,7 +35,7 @@ Here is a full list of supported languages and their short names:
- C -> ["c", "h"]
- C# -> ["cs", "csx"]
- C++ -> ["C", "c++", "cc", "cp", "cpp", "cxx", "h", "h++", "hh", "hpp", "hxx", "inl", "ipp"]
- Clojure -> ["clj", "cljc", "cljs", "edn"]
- ClojureC -> ["boot", "clj", "cljc", "cljs", "cljx"]
- CMake -> ["CMakeLists.txt", "cmake"]
- CMake C Header -> ["h.in"]
- CMake C++ Header -> ["h++.in", "hh.in", "hpp.in", "hxx.in"]
@ -46,6 +46,7 @@ Here is a full list of supported languages and their short names:
- Dart -> ["dart"]
- Diff -> ["diff", "patch"]
- Dockerfile -> ["Dockerfile", "dockerfile"]
- EDN -> ["edn"]
- Elixir -> ["ex", "exs"]
- Elm -> ["elm"]
- Erlang -> ["Emakefile", "emakefile", "erl", "escript", "hrl"]
@ -66,7 +67,7 @@ Here is a full list of supported languages and their short names:
- Git Rebase Todo -> ["git-rebase-todo"]
- GLSL -> ["comp", "frag", "fs", "fsh", "fshader", "geom", "glsl", "gs", "gsh", "gshader", "tesc", "tese", "vert", "vs", "vsh", "vshader"]
- Go -> ["go"]
- GraphQL -> ["gql", "graphql"]
- GraphQL -> ["gql", "graphql", "graphqls"]
- Graphviz (DOT) -> ["DOT", "dot", "gv"]
- Groovy -> ["Jenkinsfile", "gradle", "groovy", "gvy"]
- Handlebars -> ["handlebars", "handlebars.html", "hbr", "hbrs", "hbs", "hdbs", "hjs", "mu", "mustache", "rac", "stache", "template", "tmpl"]
@ -144,6 +145,9 @@ Here is a full list of supported languages and their short names:
- YAML -> ["sublime-syntax", "yaml", "yml"]
```
Note: due to some issues with the JavaScript syntax, the TypeScript syntax will be used instead.
If
If you want to highlight a language not on this list, please open an issue or a pull request on the [Zola repo](https://github.com/getzola/zola).
Alternatively, the `extra_syntaxes` configuration option can be used to add additional syntax files.

View file

@ -3,7 +3,62 @@ title = "Taxonomies"
weight = 90
+++
Zola has built-in support for taxonomies.
Zola has built-in support for taxonomies. Taxonomies are a way for users to group content according to user-defined categories.
## Definitions
- Taxonomy: A category that can be used to group content
- Term: A specific group within a taxonomy
- Value: A piece of content that can be associated with a term
## Example: a movie website
Imagine that you want to make a website to display information about various movies. In that case you could use the following taxonomies:
- Director
- Genres
- Awards
- Release year
Then at build time Zola can create pages for each taxonomy listing all of the known terms as well as pages for each term in a taxonomy, listing all of the pieces of content associated with that term.
Imagine again we have the following movies:
```
- Shape of water <--- Value
- Director <--- Taxonomy
- Guillermo Del Toro <--- Term
- Genres <--- Taxonomy
- Thriller <--- Term
- Drama <--- Term
- Awards <--- Taxonomy
- Golden globe <--- Term
- Academy award <--- Term
- BAFTA <--- Term
- Release year <--- Taxonomy
- 2017 <--- Term
- The Room: <--- Value
- Director <--- Taxonomy
- Tommy Wiseau <--- Term
- Genres <--- Taxonomy
- Romance <--- Term
- Drama <--- Term
- Release Year <--- Taxonomy
- 2003 <--- Term
- Bright <--- Value
- Director <--- Taxonomy
- David Ayer <--- Term
- Genres <--- Taxonomy
- Fantasy <--- Term
- Action <--- Term
- Awards <--- Taxonomy
- California on Location Awards <--- Term
- Release Year <--- Taxonomy
- 2017 <--- Term
```
In this example the page for `Release year` would include links to pages for both 2003 and 2017, where the page for 2017 would list both Shape of Water and Bright.
## Configuration
@ -23,16 +78,30 @@ Insert into the configuration file (config.toml):
**Example 1:** (one language)
```toml
taxonomies = [ name = "categories", rss = true ]
taxonomies = [
{ name = "director", feed = true},
{ name = "genres", feed = true},
{ name = "awards", feed = true},
{ name = "release-year", feed = true},
]
```
**Example 2:** (multilingual site)
```toml
taxonomies = [
{name = "tags", lang = "fr"},
{name = "tags", lang = "eo"},
{name = "tags", lang = "en"},
{name = "director", feed = true, lang = "fr"},
{name = "director", feed = true, lang = "eo"},
{name = "director", feed = true, lang = "en"},
{name = "genres", feed = true, lang = "fr"},
{name = "genres", feed = true, lang = "eo"},
{name = "genres", feed = true, lang = "en"},
{name = "awards", feed = true, lang = "fr"},
{name = "awards", feed = true, lang = "eo"},
{name = "awards", feed = true, lang = "en"},
{name = "release-year", feed = true, lang = "fr"},
{name = "release-year", feed = true, lang = "eo"},
{name = "release-year", feed = true, lang = "en"},
]
```
@ -44,11 +113,13 @@ Once the configuration is done, you can then set taxonomies in your content and
```toml
+++
title = "Writing a static-site generator in Rust"
date = 2019-08-15
title = "Shape of water"
date = 2019-08-15 # date of the post, not the movie
[taxonomies]
tags = ["rust", "web"]
categories = ["programming"]
director=["Guillermo Del Toro"]
genres=["Thriller","Drama"]
awards=["Golden Globe", "Academy award", "BAFTA"]
release-year = ["2017"]
+++
```

View file

@ -13,11 +13,12 @@ If you are not familiar with TOML, have a look at [the TOML spec](https://github
Here are the current `config.toml` sections:
1. main (unnamed)
2. link_checker
3. slugify
4. search
5. translations
6. extra
2. markdown
3. link_checker
4. slugify
5. search
6. translations
7. extra
**Only the `base_url` variable is mandatory**. Everything else is optional. All configuration variables
used by Zola as well as their default values are listed below:
@ -36,13 +37,6 @@ default_language = "en"
# The site theme to use.
theme = ""
# When set to "true", all code blocks are highlighted.
highlight_code = false
# The theme to use for code highlighting.
# See below for list of allowed values.
highlight_theme = "base16-ocean-dark"
# When set to "true", a feed is automatically generated.
generate_feed = false
@ -86,6 +80,9 @@ languages = []
# Sass files in theme directories are always compiled.
compile_sass = false
# When set to "true", the generated HTML files are minified.
minify_html = false
# A list of glob patterns specifying asset files to ignore when the content
# directory is processed. Defaults to none, which means that all asset files are
# copied over to the `public` directory.
@ -96,6 +93,36 @@ ignored_content = []
# A list of directories used to search for additional `.sublime-syntax` files.
extra_syntaxes = []
# You can override the default output directory `public` by setting an another value.
# output_dir = "docs"
# Configuration of the Markdown rendering
[markdown]
# When set to "true", all code blocks are highlighted.
highlight_code = false
# The theme to use for code highlighting.
# See below for list of allowed values.
highlight_theme = "base16-ocean-dark"
# When set to "true", emoji aliases translated to their corresponding
# Unicode emoji equivalent in the rendered Markdown files. (e.g.: :smile: => 😄)
render_emoji = false
# Whether external links are to be opened in a new tab
# If this is true, a `rel="noopener"` will always automatically be added for security reasons
external_links_target_blank = false
# Whether to set rel="nofollow" for all external links
external_links_no_follow = false
# Whether to set rel="noreferrer" for all external links
external_links_no_referrer = false
# Whether smart punctuation is enabled (changing quotes, dashes, dots in their typographic form)
# For example, `...` into `…`, `"quote"` into `“curly”` etc
smart_punctuation = false
# Configuration of the link checker.
[link_checker]
# Skip link checking for external URLs that start with these prefixes

View file

@ -77,7 +77,7 @@ $ choco install zola
Zola does not work in PowerShell ISE.
## From source
To build Zola from source, you will need to have Git, [Rust (at least 1.43) and Cargo](https://www.rust-lang.org/)
To build Zola from source, you will need to have Git, [Rust (at least 1.45) and Cargo](https://www.rust-lang.org/)
installed. You will also need to meet additional dependencies to compile [libsass](https://github.com/sass/libsass):
- OSX, Linux and other Unix-like operating systems: `make` (`gmake` on BSDs), `g++`, `libssl-dev`

View file

@ -173,7 +173,7 @@ We now need to make the `blog-page.html` template. In the `templates` directory,
{{ page.title }}
</h1>
<p class="subtitle"><strong>{{ page.date }}</strong></p>
<p>{{ page.content | safe }}</p>
{{ page.content | safe }}
{% endblock content %}
```

View file

@ -146,7 +146,7 @@ In the case of non-internal links, you can also add a cachebust of the format `?
by passing `cachebust=true` to the `get_url` function.
### 'get_file_hash`
### `get_file_hash`
Gets the hash digest for a static file. Supported hashes are SHA-256, SHA-384 (default) and SHA-512. Requires `path`. The `sha_type` key is optional and must be one of 256, 384 or 512.
@ -202,7 +202,7 @@ items: Array<TaxonomyTerm>;
See the [Taxonomies documentation](@/documentation/templates/taxonomies.md) for a full documentation of those types.
### `load_data`
Loads data from a file or URL. Supported file types include *toml*, *json* and *csv*.
Loads data from a file or URL. Supported file types include *toml*, *json*, *csv* and *bibtex*.
Any other file type will be loaded as plain text.
The `path` argument specifies the path to the data file relative to your base directory, where your `config.toml` is.
@ -213,7 +213,7 @@ As a security precaution, if this file is outside the main site directory, your
```
The optional `format` argument allows you to specify and override which data type is contained
within the file specified in the `path` argument. Valid entries are `toml`, `json`, `csv`
within the file specified in the `path` argument. Valid entries are `toml`, `json`, `csv`, `bibtex`
or `plain`. If the `format` argument isn't specified, then the path extension is used.
```jinja2
@ -251,6 +251,58 @@ template:
}
```
The `bibtex` format loads data into a structure matching the format used by the
[nom-bibtex crate](https://crates.io/crates/nom-bibtex). The following is an example of data
in bibtex format:
```
@preamble{"A bibtex preamble" # " this is."}
@Comment{
Here is a comment.
}
Another comment!
@string(name = "Vincent Prouillet")
@string(github = "https://github.com/getzola/zola")
@misc {my_citation_key,
author= name,
title = "Zola",
note = "github: " # github
} }
```
The following is the json-equivalent format of the produced bibtex data structure:
```json
{
"preambles": ["A bibtex preamble this is."],
"comments": ["Here is a comment.", "Another comment!"],
"variables": {
"name": "Vincent Prouillet",
"github": "https://github.com/getzola/zola"
},
"bibliographies": [
{
"entry_type": "misc",
"citation_key": "my_citation_key",
"tags": {
"author": "Vincent Prouillet",
"title": "Zola",
"note": "github: https://github.com/getzola/zola"
}
}
]
}
```
Finally, the bibtex data can be accessed from the template as follows:
```jinja2
{% set tags = data.bibliographies[0].tags %}
This was generated using {{ tags.title }}, authored by {{ tags.author }}.
```
#### Remote content
Instead of using a file, you can load data from a remote URL. This can be done by specifying a `url` parameter

View file

@ -49,3 +49,31 @@ A paginated taxonomy gets two variables aside from the `paginator` variable:
- a `term` variable of type `TaxonomyTerm`.
See the [taxonomies page](@/documentation/templates/taxonomies.md) for a detailed version of the types.
## Example
Here is an example from a theme on how to use pagination on a page (`index.html` in this case):
```jinja2
<div class="posts">
{% for page in paginator.pages %}
<article class="post">
{{ post_macros::title(page=page) }}
<div class="post__summary">
{{ page.summary | safe }}
</div>
<div class="read-more">
<a href="{{ page.permalink }}">Read more...</a>
</div>
</article>
{% endfor %}
</div>
<nav class="pagination">
{% if paginator.previous %}
<a class="previous" href="{{ paginator.previous }}"> Previous</a>
{% endif %}
{% if paginator.next %}
<a class="next" href="{{ paginator.next }}">Next </a>
{% endif %}
</nav>
```

View file

@ -11,4 +11,6 @@ and the default is what most sites want:
```jinja2
User-agent: *
Allow: /
Sitemap: {{/* get_url(path="sitemap.xml") */}}
```

View file

@ -3,11 +3,11 @@
title = "DeepThought"
description = "A simple blog theme focused on writing powered by Bulma and Zola."
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/RatanShreshtha/DeepThought.git"
homepage = "https://github.com/RatanShreshtha/DeepThought"
minimum_version = "0.9.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 538 KiB

View file

@ -3,11 +3,11 @@
title = "Ergo"
description = "A simple blog Theme focused on writing, inspired by svbtle"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/InsidiousMind/Ergo.git"
homepage = "https://github.com/insipx/Ergo"
minimum_version = "0.4.1"
@ -42,8 +42,10 @@ Here's a timelapse:
Get [Zola](https://www.getzola.org/) and/or follow their guide on [installing a theme](https://www.getzola.org/documentation/themes/installing-and-using-themes/).
Make sure to add `theme = "ergo"` to your `config.toml`
#### Check zola version (only 0.4.1+)
Just to double-check to make sure you have the right version. It is not supported to use this theme with a version under 0.4.1.
Ergo relies on having `paginate_by` variable set in `content/_index.md`.
#### Check zola version (only 0.11.0+)
Just to double-check to make sure you have the right version. It is not supported to use this theme with a version under 0.11.0.
### how to serve
go into your sites directory, and type `zola serve`. You should see your new site at `localhost:1111`.
@ -64,6 +66,9 @@ profile = 'profile.svg'
# Description. This is needed for SEO/site metadata purposes
description = "Simple blog theme focused on writing, inspired by svbtle"
# Color themes used by the theme (theme will use ${color_theme}.css file, generated by SASS or SCSS file with the same name). Defaults to ["default"]. User can choose either of them, default theme is the first in list.
color_themes = ["my-awesome-theme", "default"]
# website, should not be preceded with `http://`
website = "code.liquidthink.net"
@ -84,6 +89,8 @@ reddit = "${your_reddit}"
# youtube
youtube = "${your_youtube_channel_id}"
# if any social networks are missing from this list that you want added, open an issue. I will add it for you ASAP
# Whether to use country flags or language code
country_flags = true
```
## Features
@ -92,7 +99,7 @@ youtube = "${your_youtube_channel_id}"
- [ ] Edit Colors in `config.toml`
- [x] NoJS
- [ ] Analytics
- [ ] Comments?
- [x] Comments?
- [ ] Like button http://kudosplease.com/
- [ ] categories?
- [ ] related posts? (would meaningful related posts, or unmeaningful ones, be worth it w/o database?)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 421 KiB

View file

@ -3,11 +3,11 @@
title = "Zulma"
description = "A zola theme based off bulma.css"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/Worble/Zulma"
homepage = "https://github.com/Worble/Zulma"
minimum_version = "0.6.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 84 KiB

View file

@ -1,6 +1,4 @@
+++
template = "themes.html"
sort_by = "date"
+++

View file

@ -3,11 +3,11 @@
title = "after-dark"
description = "A robust, elegant dark theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/getzola/after-dark.git"
homepage = "https://github.com/getzola/after-dark"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 131 KiB

View file

@ -3,11 +3,11 @@
title = "Anpu"
description = "A port of the Hugo Anubis theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/zbrox/anpu-zola-theme.git"
homepage = "https://github.com/zbrox/anpu-zola-theme"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 261 KiB

View file

@ -3,11 +3,11 @@
title = "book"
description = "A book theme inspired from GitBook/mdBook"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/getzola/book.git"
homepage = "https://github.com/getzola/book"
minimum_version = "0.5.0"
@ -68,10 +68,10 @@ slug `introduction`, then you would set `redirect_to = "introduction"`.
### Numbered chapters
By default, the `book` theme will number the chapters and pages in the left menu.
You can disable that by setting the `book_numbered_chapters` in `extra`:
You can disable that by setting the `book_number_chapters` in `extra`:
```toml
book_numbered_chapters = false
book_number_chapters = false
```

Binary file not shown.

Before

Width:  |  Height:  |  Size: 142 KiB

View file

@ -3,11 +3,11 @@
title = "Clean Blog"
description = "A port of Start Bootstrap Clean Blog for Zola"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/dave-tucker/zola-clean-blog"
homepage = "https://github.com/dave-tucker/zola-clean-blog"
minimum_version = "0.4.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 MiB

View file

@ -3,11 +3,11 @@
title = "codinfox-zola"
description = "Codinfox theme for Zola"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/svavs/codinfox-zola"
homepage = "https://github.com/svavs/codinfox-zola"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 244 KiB

View file

@ -3,11 +3,11 @@
title = "dinkleberg"
description = "The Rust BR theme for Gutenberg"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/rust-br/dinkleberg.git"
homepage = "https://github.com/rust-br/dinkleberg"
minimum_version = "0.4.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 408 KiB

View file

@ -3,11 +3,11 @@
title = "Docsascode_theme"
description = "A modern simple Zola's theme related to docs as code methodology"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/codeandmedia/zola_docsascode_theme.git"
homepage = "https://github.com/codeandmedia/zola_docsascode_theme"
minimum_version = "0.10.0"
@ -29,6 +29,12 @@ The repo contains a theme for [Zola](https://www.getzola.org/) (the best static
codeandmedia/docsascode-theme:latest
```
If you would use Docker on MacBook M1 processors \ Raspberry Pi4 64bit \ Amazon Graviton or another ARM64 - just fork the ARM64 branch or push
```
codeandmedia/docsascode-theme-arm64:latest
```
## Perks
* light / dark switcher

Binary file not shown.

Before

Width:  |  Height:  |  Size: 158 KiB

View file

@ -3,11 +3,11 @@
title = "even"
description = "A robust, elegant dark theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/getzola/even.git"
homepage = "https://github.com/getzola/even"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 104 KiB

View file

@ -3,11 +3,11 @@
title = "feather"
description = "A modern blog theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/piedoom/feather"
homepage = "https://github.com/piedoom/feather"
minimum_version = "0.5.1"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 MiB

View file

@ -3,11 +3,11 @@
title = "Float"
description = "An elegant blog theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://gitlab.com/float-theme/float.git"
homepage = "https://float-theme.netlify.app/"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 720 KiB

View file

@ -3,11 +3,11 @@
title = "hallo"
description = "A single-page theme to introduce yourself."
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/flyingP0tat0/zola-hallo.git"
homepage = "https://github.com/janbaudisch/zola-hallo"
minimum_version = "0.4.0"
@ -122,8 +122,8 @@ foreground = "#FFF" # text and portrait border
hover = "#333" # link hover
```
[build-img]: https://travis-ci.com/janbaudisch/zola-hallo.svg?branch=master
[build-url]: https://travis-ci.com/janbaudisch/zola-hallo
[build-img]: https://builds.sr.ht/~janbaudisch/zola-hallo.svg
[build-url]: https://builds.sr.ht/~janbaudisch/zola-hallo
[demo-img]: https://img.shields.io/badge/demo-live-green.svg
[demo-url]: https://zola-hallo.janbaudisch.dev
[zola]: https://www.getzola.org

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

View file

@ -3,11 +3,11 @@
title = "hyde"
description = "A classic blog theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/getzola/hyde.git"
homepage = "https://github.com/getzola/hyde"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 58 KiB

View file

@ -3,11 +3,11 @@
title = "juice"
description = "An intuitive, elegant, and lightweight Zola™ theme for product sites."
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/huhu/juice"
homepage = "https://github.com/huhu/juice"
minimum_version = "0.11.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 150 KiB

View file

@ -3,11 +3,11 @@
title = "lightspeed"
description = "Zola theme with a perfect Lighthouse score"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/carpetscheme/lightspeed"
homepage = "https://github.com/carpetscheme/lightspeed"
minimum_version = "0.10.0"
@ -26,8 +26,9 @@ An insanely fast and performance-based Zola theme, ported from [Light Speed Jeky
Some fun facts about the theme:
* Perfect score on Google's Lighthouse audit
* Only ~600 bytes of CSS
* Only ~700 bytes of CSS
* No JavaScript
* Now with SEO!
Demo: [quirky-perlman-34d0da.netlify.com](https://quirky-perlman-34d0da.netlify.com)
@ -38,10 +39,10 @@ Demo: [quirky-perlman-34d0da.netlify.com](https://quirky-perlman-34d0da.netlify.
- [Installation](#installation)
- [Options](#options)
- [Title](#title)
- [Sass](#Sass)
- [Footer menu](#footer-menu)
- [Author](#author)
- [Netlify](#netlify)
- [SEO](#seo)
- [Footer text](#footer-text)
- [Sass](#Sass)
- [Original](#original)
- [License](#license)
@ -77,18 +78,6 @@ description = "for different folks"
```
### Sass
Styles are compiled from sass and imported inline to the header :zap:
You can overide the styles by enabling sass compilation in the config:
```toml
compile_sass = true
```
...and placing a replacement `style.scss` file in your sass folder.
### Footer-menu
Set a field in `extra` with a key of `footer_links`:
@ -97,7 +86,7 @@ Set a field in `extra` with a key of `footer_links`:
footer_links = [
{url = "$BASE_URL/about", name = "About"},
{url = "$BASE_URL/rss.xml", name = "RSS"},
{url = "$BASE_URL/atom.xml", name = "RSS"},
{url = "https://google.com", name = "Google"},
]
```
@ -111,32 +100,45 @@ Create pages such as `$BASE_URL/about` by placing them in a subfolder of the con
path = "about"
```
### Author
### SEO
To add author name to the head meta-data, set an `author` field in `extra`:
Most SEO tags are populated by the page metadata, but you can set the `author` and for the `og:image` tag provide the path to an image:
```toml
[extra]
author = "Grant Green"
ogimage = "Greenery.png"
```
### Netlify
### Footer-text
Deployed on netlify? Add a link in the footer by setting `netlify` in `extra` as `true`.
By default the footer provides links to Zola and Netlify, and a tagline of "Maintained with :heart: for the web".
To disable any of those parts, and/or add a custom tagline of your own, the following options are available:
```toml
[extra]
netlify = true
zola = true
netlify = false
maintained_with_love = false
footer_tagline = "What if everything is an illusion and nothing exists? In that case, I definitely overpaid for my carpet."
```
### Sass
Styles are compiled from sass and imported inline to the header :zap:
You can overide the styles by enabling sass compilation in the config:
```toml
compile_sass = true
```
...and placing a replacement `style.scss` file in your sass folder.
## Original
This template is based on the Jekyll template [Light Speed Jekyll](https://github.com/bradleytaunt/lightspeed) by **Bradley Taunt**:
- <https://github.com/bradleytaunt>
- <https://twitter.com/bradtaunt>
This template is based on the Jekyll template [Light Speed Jekyll](https://github.com/bradleytaunt/lightspeed) by Bradley Taunt.
## License

Binary file not shown.

Before

Width:  |  Height:  |  Size: 36 KiB

View file

@ -3,11 +3,11 @@
title = "Oceanic Zen"
description = "Minimalistic blog theme"
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/barlog-m/oceanic-zen.git"
homepage = "https://github.com/barlog-m/oceanic-zen"
minimum_version = "0.9.0"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 326 KiB

View file

@ -3,11 +3,11 @@
title = "sam"
description = "A Simple and Minimalist theme with a focus on typography and content."
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/janbaudisch/zola-sam.git"
homepage = "https://github.com/janbaudisch/zola-sam"
minimum_version = "0.4.0"
@ -154,8 +154,8 @@ To place some text at the end of pages, set the following:
text = "Some footer text."
```
[build-img]: https://travis-ci.com/janbaudisch/zola-sam.svg?branch=master
[build-url]: https://travis-ci.com/janbaudisch/zola-sam
[build-img]: https://builds.sr.ht/~janbaudisch/zola-sam.svg
[build-url]: https://builds.sr.ht/~janbaudisch/zola-sam
[demo-img]: https://img.shields.io/badge/demo-live-green.svg
[demo-url]: https://zola-sam.janbaudisch.dev
[zola]: https://getzola.org

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

View file

@ -3,11 +3,11 @@
title = "simple-dev-blog"
description = "A simple dev blog theme with no javascript, prerendered linked pages and SEO tags."
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/bennetthardwick/simple-dev-blog-zola-starter"
homepage = "https://github.com/bennetthardwick/simple-dev-blog-zola-starter"
minimum_version = "0.4.0"
@ -19,7 +19,6 @@ name = "Bennett Hardwick"
homepage = "https://bennetthardwick.com/"
+++
![preview image](https://i.imgur.com/IWoJtkF.png)
# simple-dev-blog-zola-starter
@ -28,7 +27,7 @@ A simple dev-blog theme for Zola. It uses no JavaScript, prerenders links betwee
You can view it live [here](https://simple-dev-blog-zola-starter.netlify.app/).
### How to get started
## How to get started
To create a new Zola site, first download the CLI and install it on your system.
You can find installation instructions [on the Zola website](https://www.getzola.org/documentation/getting-started/installation/).
@ -50,7 +49,11 @@ You can find installation instructions [on the Zola website](https://www.getzola
3. Now in your `config.toml` file, choose the theme by setting `theme = "simple-dev-blog"`.
4. That's it! Now build your site by running the following command, and navigate to `127.0.0.1:111`:
4. This theme uses the `tags` taxonomy, in your `config.toml` file set `taxonomies = [ { name = "tags" } ]`
5. Copy across the default content from the theme by running `cp themes/simple-dev-blog/content/* ./content -r`
6. That's it! Now build your site by running the following command, and navigate to `127.0.0.1:111`:
```sh
zola serve
@ -58,5 +61,30 @@ You can find installation instructions [on the Zola website](https://www.getzola
You should now have a speedy simple dev blog up and running, have fun!
## Customisation
Look at the `config.toml` and `theme.toml` in this repo for an idea, here's a list of all the options:
### Global
The following options should be under the `[extra]` in `config.toml`
- `accent_light` - a lighter shade of your site's accent color
- `accent` - your site's accent color
- `blog_path` - the path to your blog (defaults to `blog`)
- `default_og_image` - the path default og:image for your page
- `footer_about` - the content for your footer in markdown
- `icon` - the path to the icon for your site in the content folder
- E.g to add the file `icon.png` you should put it in `content/icon.png`
- `nav` - see `theme.toml`, the navigation links for your site
- `not_found_message` - the content for your 404 page in markdown
- `profile_large` - the path to a larger vertical version of your profile picture in the content folder
- `profile_small` - the path to a small version of your profile picture in the content folder
### Page
The following options should be under the `[extra]` section of each page
- `thumbnail` - the path to your og:image for that page

Binary file not shown.

Before

Width:  |  Height:  |  Size: 136 KiB

View file

@ -3,11 +3,11 @@
title = "Slim"
description = "Slim is a minimal, clean and beautiful theme for Zola."
template = "theme.html"
date = 2020-12-08T16:58:54+01:00
date = 2020-12-14T20:45:47+01:00
[extra]
created = 2020-12-08T16:58:54+01:00
updated = 2020-12-08T16:58:54+01:00
created = 2020-12-14T20:45:47+01:00
updated = 2020-12-14T20:45:47+01:00
repository = "https://github.com/jameshclrk/zola-slim"
homepage = "https://github.com/jameshclrk/zola-slim"
minimum_version = "0.8.0"

Some files were not shown because too many files have changed in this diff Show more