commit
b2354c6cd8
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -3,6 +3,7 @@ target
|
|||
test_site/public
|
||||
test_site_i18n/public
|
||||
docs/public
|
||||
docs/out
|
||||
|
||||
small-blog
|
||||
medium-blog
|
||||
|
|
6
.gitmodules
vendored
6
.gitmodules
vendored
|
@ -70,3 +70,9 @@
|
|||
[submodule "sublime/syntaxes/extra/sublime-clojure"]
|
||||
path = sublime/syntaxes/extra/sublime-clojure
|
||||
url = https://github.com/tonsky/sublime-clojure.git
|
||||
[submodule "sublime/syntaxes/extra/sublime-zig-language"]
|
||||
path = sublime/syntaxes/extra/sublime-zig-language
|
||||
url = https://github.com/ziglang/sublime-zig-language.git
|
||||
[submodule "sublime/syntaxes/extra/protobuf-syntax-highlighting"]
|
||||
path = sublime/syntaxes/extra/protobuf-syntax-highlighting
|
||||
url = https://github.com/VcamX/protobuf-syntax-highlighting.git
|
||||
|
|
47
CHANGELOG.md
47
CHANGELOG.md
|
@ -1,5 +1,52 @@
|
|||
# Changelog
|
||||
|
||||
## 0.14.0 (2021-07-19)
|
||||
|
||||
### Breaking
|
||||
|
||||
- Newlines are now required after the closing `+++` of front-matter
|
||||
- `resize_image` now returns an object: `{url, static_path}` instead of just the URL so you can follow up with other functions on the new file if needed
|
||||
- `get_file_hash` now has the `base64` option set to `true` by default (from `false`) since it's mainly used for integrity hashes which are base64
|
||||
- i18n rework: languages now have their sections in `config.toml` to set up all their options
|
||||
1. taxonomies don't have a `lang` anymore in the config, you need to declare them in their respective language section
|
||||
2. the `config` variable in templates has been changed and is now a stripped down language aware version of the previous `config`
|
||||
object
|
||||
3. Search settings are now language specific
|
||||
4. Translations are now nested in the languages table
|
||||
- Paths unification:
|
||||
1. `get_url` does not load automatically from the `static` folder anymore
|
||||
2. New path resolving logic for all on-disk files: replace `@/` by `content/`, trim leading `/` and
|
||||
search in $BASE_DIR + $path, $BASE_DIR + static + $path and $BASE_DIR + content + $path
|
||||
3. `get_file_hash` now returns base64 encoded hash by default
|
||||
4. all functions working on files can now only load files in the Zola directory
|
||||
5. `resize_image` return value has changed
|
||||
6. `page.assets` now start with a `/` to match `section.assets` and other paths
|
||||
|
||||
### Other
|
||||
|
||||
- Internal links are now resolved in the `markdown` filter in the templates (#1296 #1316)
|
||||
- Add a `required` argument to `load_data` so it can be allowed to fail
|
||||
- `get_file_hash` now supports returning the base64 encoded hash
|
||||
- The `markdown` filter not renders shortcodes
|
||||
- Image processing now supports WebP
|
||||
- Fix `zola serve` failing for some static files
|
||||
- Fix `zola serve` not picking up directory renaming
|
||||
- Add `path` to the taxonomy terms to be on par with pages and sections
|
||||
- Add the `base16-aterlierdune-light` syntax highlight theme
|
||||
- Improve link checking: less concurrency and try to not overload the servers
|
||||
- Allow using POST for `load_data`, along with a body to POST and allow it to fail
|
||||
- Add Zig and Protobuf syntax highlighting
|
||||
- Footnotes links are now stripped from summaries - they were not linking to anything.
|
||||
- `get_url` and `get_taxonomy_url` are now marked as safe, no need to call `| safe` on their output
|
||||
- Add `allow_missing` optional argument to `get_image_metadata` to not error if the file is not found
|
||||
- Add `permalink` to `Taxonomy` in templates
|
||||
- Syntax highlighting improvements, see documentation for details on each
|
||||
1. Add CSS class based syntax highlighting
|
||||
2. Allow hiding specific lines
|
||||
3. Allow showing line numbers
|
||||
|
||||
|
||||
|
||||
## 0.13.0 (2021-01-09)
|
||||
|
||||
- Enable HTML minification
|
||||
|
|
|
@ -35,7 +35,7 @@ Tools > Developer > New Syntax from ... and put it in the `sublime/syntaxes` dir
|
|||
You can also add a submodule to the repository of the wanted syntax:
|
||||
|
||||
```bash
|
||||
$ cd sublime/syntaxes
|
||||
$ cd sublime/syntaxes/extra
|
||||
$ git submodule add https://github.com/elm-community/SublimeElmLanguageSupport
|
||||
```
|
||||
|
||||
|
|
911
Cargo.lock
generated
911
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
13
Cargo.toml
13
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "zola"
|
||||
version = "0.13.0"
|
||||
version = "0.14.0"
|
||||
authors = ["Vincent Prouillet <hello@vincentprouillet.com>"]
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
|
@ -37,11 +37,17 @@ open = "1.2"
|
|||
globset = "0.4"
|
||||
relative-path = "1"
|
||||
serde_json = "1.0"
|
||||
# For mimetype detection in serve mode
|
||||
mime_guess = "2.0"
|
||||
|
||||
site = { path = "components/site" }
|
||||
errors = { path = "components/errors" }
|
||||
front_matter = { path = "components/front_matter" }
|
||||
utils = { path = "components/utils" }
|
||||
search = { path = "components/search" }
|
||||
|
||||
[dev-dependencies]
|
||||
same-file = "1"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
|
@ -61,3 +67,8 @@ members = [
|
|||
[profile.release]
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
|
||||
[profile.dev]
|
||||
# Disabling debug info speeds up builds a bunch,
|
||||
# and we don't rely on it for debugging that much.
|
||||
debug = 0
|
||||
|
|
|
@ -21,7 +21,7 @@ stages:
|
|||
rustup_toolchain: stable
|
||||
linux-pinned:
|
||||
imageName: 'ubuntu-20.04'
|
||||
rustup_toolchain: 1.45.2
|
||||
rustup_toolchain: 1.49.0
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
|
|
|
@ -12,7 +12,9 @@ serde_derive = "1"
|
|||
chrono = "0.4"
|
||||
globset = "0.4"
|
||||
lazy_static = "1"
|
||||
syntect = "4.1"
|
||||
# TODO: go back to version 4/5 once https://github.com/trishume/syntect/pull/337 is merged
|
||||
syntect = { git = "https://github.com/Keats/syntect.git", branch = "scopestack" }
|
||||
unic-langid = "0.9"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
|
|
|
@ -1,16 +1,56 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use errors::{bail, Result};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use unic_langid::LanguageIdentifier;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Serialize, Deserialize)]
|
||||
use crate::config::search;
|
||||
use crate::config::taxonomies;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Language {
|
||||
/// The language code
|
||||
pub code: String,
|
||||
pub struct LanguageOptions {
|
||||
/// Title of the site. Defaults to None
|
||||
pub title: Option<String>,
|
||||
/// Description of the site. Defaults to None
|
||||
pub description: Option<String>,
|
||||
/// Whether to generate a feed for that language, defaults to `false`
|
||||
pub feed: bool,
|
||||
pub generate_feed: bool,
|
||||
/// The filename to use for feeds. Used to find the template, too.
|
||||
/// Defaults to "atom.xml", with "rss.xml" also having a template provided out of the box.
|
||||
pub feed_filename: String,
|
||||
pub taxonomies: Vec<taxonomies::Taxonomy>,
|
||||
/// Whether to generate search index for that language, defaults to `false`
|
||||
pub search: bool,
|
||||
pub build_search_index: bool,
|
||||
/// The search config, telling what to include in the search index for that language
|
||||
pub search: search::Search,
|
||||
/// A toml crate `Table` with String key representing term and value
|
||||
/// another `String` representing its translation.
|
||||
///
|
||||
/// Use `get_translation()` method for translating key into different languages.
|
||||
pub translations: HashMap<String, String>,
|
||||
}
|
||||
|
||||
pub type TranslateTerm = HashMap<String, String>;
|
||||
impl Default for LanguageOptions {
|
||||
fn default() -> Self {
|
||||
LanguageOptions {
|
||||
title: None,
|
||||
description: None,
|
||||
generate_feed: false,
|
||||
feed_filename: String::new(),
|
||||
build_search_index: false,
|
||||
taxonomies: Vec::new(),
|
||||
search: search::Search::default(),
|
||||
translations: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// We want to ensure the language codes are valid ones
|
||||
pub fn validate_code(code: &str) -> Result<()> {
|
||||
if LanguageIdentifier::from_bytes(code.as_bytes()).is_err() {
|
||||
bail!("Language `{}` is not a valid Unicode Language Identifier (see http://unicode.org/reports/tr35/#Unicode_language_identifier)", code)
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,8 +1,27 @@
|
|||
use std::path::Path;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use syntect::parsing::SyntaxSet;
|
||||
use syntect::parsing::{SyntaxSet, SyntaxSetBuilder};
|
||||
|
||||
use errors::Result;
|
||||
|
||||
pub const DEFAULT_HIGHLIGHT_THEME: &str = "base16-ocean-dark";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct ThemeCss {
|
||||
/// Which theme are we generating the CSS from
|
||||
pub theme: String,
|
||||
/// In which file are we going to output the CSS
|
||||
pub filename: String,
|
||||
}
|
||||
|
||||
impl Default for ThemeCss {
|
||||
fn default() -> ThemeCss {
|
||||
ThemeCss { theme: String::new(), filename: String::new() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Markdown {
|
||||
|
@ -11,6 +30,8 @@ pub struct Markdown {
|
|||
/// Which themes to use for code highlighting. See Readme for supported themes
|
||||
/// Defaults to "base16-ocean-dark"
|
||||
pub highlight_theme: String,
|
||||
/// Generate CSS files for Themes out of syntect
|
||||
pub highlight_themes_css: Vec<ThemeCss>,
|
||||
/// Whether to render emoji aliases (e.g.: :smile: => 😄) in the markdown files
|
||||
pub render_emoji: bool,
|
||||
/// Whether external links are to be opened in a new tab
|
||||
|
@ -31,6 +52,21 @@ pub struct Markdown {
|
|||
}
|
||||
|
||||
impl Markdown {
|
||||
/// Attempt to load any extra syntax found in the extra syntaxes of the config
|
||||
pub fn load_extra_syntaxes(&mut self, base_path: &Path) -> Result<()> {
|
||||
if self.extra_syntaxes.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut ss = SyntaxSetBuilder::new();
|
||||
for dir in &self.extra_syntaxes {
|
||||
ss.add_from_folder(base_path.join(dir), true)?;
|
||||
}
|
||||
self.extra_syntax_set = Some(ss.build());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn has_external_link_tweaks(&self) -> bool {
|
||||
self.external_links_target_blank
|
||||
|| self.external_links_no_follow
|
||||
|
@ -40,7 +76,7 @@ impl Markdown {
|
|||
pub fn construct_external_link_tag(&self, url: &str, title: &str) -> String {
|
||||
let mut rel_opts = Vec::new();
|
||||
let mut target = "".to_owned();
|
||||
let title = if title == "" { "".to_owned() } else { format!("title=\"{}\" ", title) };
|
||||
let title = if title.is_empty() { "".to_owned() } else { format!("title=\"{}\" ", title) };
|
||||
|
||||
if self.external_links_target_blank {
|
||||
// Security risk otherwise
|
||||
|
@ -68,12 +104,13 @@ impl Default for Markdown {
|
|||
Markdown {
|
||||
highlight_code: false,
|
||||
highlight_theme: DEFAULT_HIGHLIGHT_THEME.to_owned(),
|
||||
highlight_themes_css: Vec::new(),
|
||||
render_emoji: false,
|
||||
external_links_target_blank: false,
|
||||
external_links_no_follow: false,
|
||||
external_links_no_referrer: false,
|
||||
smart_punctuation: false,
|
||||
extra_syntaxes: vec![],
|
||||
extra_syntaxes: Vec::new(),
|
||||
extra_syntax_set: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,25 +10,24 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use syntect::parsing::SyntaxSetBuilder;
|
||||
use toml::Value as Toml;
|
||||
|
||||
use crate::highlighting::THEME_SET;
|
||||
use crate::theme::Theme;
|
||||
use errors::{bail, Error, Result};
|
||||
use utils::fs::read_file_with_error;
|
||||
use utils::fs::read_file;
|
||||
|
||||
// We want a default base url for tests
|
||||
static DEFAULT_BASE_URL: &str = "http://a-website.com";
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum Mode {
|
||||
Build,
|
||||
Serve,
|
||||
Check,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Config {
|
||||
/// Base URL of the site, the only required config argument
|
||||
|
@ -44,22 +43,9 @@ pub struct Config {
|
|||
/// The language used in the site. Defaults to "en"
|
||||
pub default_language: String,
|
||||
/// The list of supported languages outside of the default one
|
||||
pub languages: Vec<languages::Language>,
|
||||
|
||||
/// Languages list and translated strings
|
||||
///
|
||||
/// The `String` key of `HashMap` is a language name, the value should be toml crate `Table`
|
||||
/// with String key representing term and value another `String` representing its translation.
|
||||
///
|
||||
/// The attribute is intentionally not public, use `get_translation()` method for translating
|
||||
/// key into different language.
|
||||
translations: HashMap<String, languages::TranslateTerm>,
|
||||
|
||||
/// Whether to highlight all code blocks found in markdown files. Defaults to false
|
||||
highlight_code: bool,
|
||||
/// Which themes to use for code highlighting. See Readme for supported themes
|
||||
/// Defaults to "base16-ocean-dark"
|
||||
highlight_theme: String,
|
||||
pub languages: HashMap<String, languages::LanguageOptions>,
|
||||
/// The translations strings for the default language
|
||||
translations: HashMap<String, String>,
|
||||
|
||||
/// Whether to generate a feed. Defaults to false.
|
||||
pub generate_feed: bool,
|
||||
|
@ -91,26 +77,33 @@ pub struct Config {
|
|||
#[serde(skip_serializing)]
|
||||
pub mode: Mode,
|
||||
|
||||
/// A list of directories to search for additional `.sublime-syntax` files in.
|
||||
pub extra_syntaxes: Vec<String>,
|
||||
|
||||
pub output_dir: String,
|
||||
|
||||
pub link_checker: link_checker::LinkChecker,
|
||||
|
||||
/// The setup for which slugification strategies to use for paths, taxonomies and anchors
|
||||
pub slugify: slugify::Slugify,
|
||||
|
||||
/// The search config, telling what to include in the search index
|
||||
pub search: search::Search,
|
||||
|
||||
/// The config for the Markdown rendering: syntax highlighting and everything
|
||||
pub markdown: markup::Markdown,
|
||||
|
||||
/// All user params set in [extra] in the config
|
||||
pub extra: HashMap<String, Toml>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct SerializedConfig<'a> {
|
||||
base_url: &'a str,
|
||||
mode: Mode,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
languages: HashMap<&'a String, &'a languages::LanguageOptions>,
|
||||
generate_feed: bool,
|
||||
feed_filename: &'a str,
|
||||
taxonomies: &'a [taxonomies::Taxonomy],
|
||||
build_search_index: bool,
|
||||
extra: &'a HashMap<String, Toml>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Parses a string containing TOML to our Config struct
|
||||
/// Any extra parameter will end up in the extra field
|
||||
|
@ -124,14 +117,22 @@ impl Config {
|
|||
bail!("A base URL is required in config.toml with key `base_url`");
|
||||
}
|
||||
|
||||
if !THEME_SET.themes.contains_key(&config.highlight_theme) {
|
||||
bail!("Highlight theme {} not available", config.highlight_theme)
|
||||
if config.markdown.highlight_theme != "css" {
|
||||
if !THEME_SET.themes.contains_key(&config.markdown.highlight_theme) {
|
||||
bail!(
|
||||
"Highlight theme {} defined in config does not exist.",
|
||||
config.markdown.highlight_theme
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if config.languages.iter().any(|l| l.code == config.default_language) {
|
||||
bail!("Default language `{}` should not appear both in `config.default_language` and `config.languages`", config.default_language)
|
||||
languages::validate_code(&config.default_language)?;
|
||||
for code in config.languages.keys() {
|
||||
languages::validate_code(&code)?;
|
||||
}
|
||||
|
||||
config.add_default_language();
|
||||
|
||||
if !config.ignored_content.is_empty() {
|
||||
// Convert the file glob strings into a compiled glob set matcher. We want to do this once,
|
||||
// at program initialization, rather than for every page, for example. We arrange for the
|
||||
|
@ -150,87 +151,23 @@ impl Config {
|
|||
Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
|
||||
}
|
||||
|
||||
for taxonomy in config.taxonomies.iter_mut() {
|
||||
if taxonomy.lang.is_empty() {
|
||||
taxonomy.lang = config.default_language.clone();
|
||||
}
|
||||
}
|
||||
|
||||
if config.highlight_code {
|
||||
println!("`highlight_code` has been moved to a [markdown] section. Top level `highlight_code` and `highlight_theme` will stop working in 0.14.");
|
||||
}
|
||||
if !config.extra_syntaxes.is_empty() {
|
||||
println!("`extra_syntaxes` has been moved to a [markdown] section. Top level `extra_syntaxes` will stop working in 0.14.");
|
||||
}
|
||||
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
pub fn default_for_test() -> Self {
|
||||
let mut config = Config::default();
|
||||
config.add_default_language();
|
||||
config
|
||||
}
|
||||
|
||||
/// Parses a config file from the given path
|
||||
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> {
|
||||
let path = path.as_ref();
|
||||
let file_name = path.file_name().unwrap();
|
||||
let content = read_file_with_error(
|
||||
path,
|
||||
&format!("No `{:?}` file found. Are you in the right directory?", file_name),
|
||||
)?;
|
||||
let content =
|
||||
read_file(path).map_err(|e| errors::Error::chain("Failed to load config", e))?;
|
||||
Config::parse(&content)
|
||||
}
|
||||
|
||||
/// Temporary, while we have the settings in 2 places
|
||||
/// TODO: remove me in 0.14
|
||||
pub fn highlight_code(&self) -> bool {
|
||||
if !self.highlight_code && !self.markdown.highlight_code {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.highlight_code {
|
||||
true
|
||||
} else {
|
||||
self.markdown.highlight_code
|
||||
}
|
||||
}
|
||||
|
||||
/// Temporary, while we have the settings in 2 places
|
||||
/// TODO: remove me in 0.14
|
||||
pub fn highlight_theme(&self) -> &str {
|
||||
if self.highlight_theme != markup::DEFAULT_HIGHLIGHT_THEME {
|
||||
&self.highlight_theme
|
||||
} else {
|
||||
&self.markdown.highlight_theme
|
||||
}
|
||||
}
|
||||
|
||||
/// TODO: remove me in 0.14
|
||||
pub fn extra_syntaxes(&self) -> Vec<String> {
|
||||
if !self.markdown.extra_syntaxes.is_empty() {
|
||||
return self.markdown.extra_syntaxes.clone();
|
||||
}
|
||||
|
||||
if !self.extra_syntaxes.is_empty() {
|
||||
return self.extra_syntaxes.clone();
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
/// Attempt to load any extra syntax found in the extra syntaxes of the config
|
||||
/// TODO: move to markup.rs in 0.14
|
||||
pub fn load_extra_syntaxes(&mut self, base_path: &Path) -> Result<()> {
|
||||
let extra_syntaxes = self.extra_syntaxes();
|
||||
if extra_syntaxes.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut ss = SyntaxSetBuilder::new();
|
||||
for dir in &extra_syntaxes {
|
||||
ss.add_from_folder(base_path.join(dir), true)?;
|
||||
}
|
||||
self.markdown.extra_syntax_set = Some(ss.build());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Makes a url, taking into account that the base url might have a trailing slash
|
||||
pub fn make_permalink(&self, path: &str) -> String {
|
||||
let trailing_bit =
|
||||
|
@ -255,6 +192,28 @@ impl Config {
|
|||
}
|
||||
}
|
||||
|
||||
/// Adds the default language to the list of languages if not present
|
||||
pub fn add_default_language(&mut self) {
|
||||
// We automatically insert a language option for the default language *if* it isn't present
|
||||
// TODO: what to do if there is like an empty dict for the lang? merge it or use the language
|
||||
// TODO: as source of truth?
|
||||
if !self.languages.contains_key(&self.default_language) {
|
||||
self.languages.insert(
|
||||
self.default_language.clone(),
|
||||
languages::LanguageOptions {
|
||||
title: self.title.clone(),
|
||||
description: self.description.clone(),
|
||||
generate_feed: self.generate_feed,
|
||||
feed_filename: self.feed_filename.clone(),
|
||||
build_search_index: self.build_search_index,
|
||||
taxonomies: self.taxonomies.clone(),
|
||||
search: self.search.clone(),
|
||||
translations: self.translations.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Merges the extra data from the theme with the config extra data
|
||||
fn add_theme_extra(&mut self, theme: &Theme) -> Result<()> {
|
||||
for (key, val) in &theme.extra {
|
||||
|
@ -270,27 +229,26 @@ impl Config {
|
|||
|
||||
/// Parse the theme.toml file and merges the extra data from the theme
|
||||
/// with the config extra data
|
||||
pub fn merge_with_theme(&mut self, path: &PathBuf) -> Result<()> {
|
||||
let theme = Theme::from_file(path)?;
|
||||
pub fn merge_with_theme(&mut self, path: &PathBuf, theme_name: &str) -> Result<()> {
|
||||
let theme = Theme::from_file(path, theme_name)?;
|
||||
self.add_theme_extra(&theme)
|
||||
}
|
||||
|
||||
/// Returns all the languages settings for languages other than the default one
|
||||
pub fn other_languages(&self) -> HashMap<&str, &languages::LanguageOptions> {
|
||||
let mut others = HashMap::new();
|
||||
for (k, v) in &self.languages {
|
||||
if k == &self.default_language {
|
||||
continue;
|
||||
}
|
||||
others.insert(k.as_str(), v);
|
||||
}
|
||||
others
|
||||
}
|
||||
|
||||
/// Is this site using i18n?
|
||||
pub fn is_multilingual(&self) -> bool {
|
||||
!self.languages.is_empty()
|
||||
}
|
||||
|
||||
/// Returns the codes of all additional languages
|
||||
pub fn languages_codes(&self) -> Vec<&str> {
|
||||
self.languages.iter().map(|l| l.code.as_ref()).collect()
|
||||
}
|
||||
|
||||
pub fn is_in_build_mode(&self) -> bool {
|
||||
self.mode == Mode::Build
|
||||
}
|
||||
|
||||
pub fn is_in_serve_mode(&self) -> bool {
|
||||
self.mode == Mode::Serve
|
||||
!self.other_languages().is_empty()
|
||||
}
|
||||
|
||||
pub fn is_in_check_mode(&self) -> bool {
|
||||
|
@ -303,26 +261,42 @@ impl Config {
|
|||
|
||||
pub fn enable_check_mode(&mut self) {
|
||||
self.mode = Mode::Check;
|
||||
// Disable syntax highlighting since the results won't be used
|
||||
// and this operation can be expensive.
|
||||
self.highlight_code = false;
|
||||
// Disable syntax highlighting since the results won't be used and it is slow
|
||||
self.markdown.highlight_code = false;
|
||||
}
|
||||
|
||||
pub fn get_translation<S: AsRef<str>>(&self, lang: S, key: S) -> Result<String> {
|
||||
let terms = self.translations.get(lang.as_ref()).ok_or_else(|| {
|
||||
Error::msg(format!("Translation for language '{}' is missing", lang.as_ref()))
|
||||
})?;
|
||||
pub fn get_translation(&self, lang: &str, key: &str) -> Result<String> {
|
||||
if let Some(options) = self.languages.get(lang) {
|
||||
options
|
||||
.translations
|
||||
.get(key)
|
||||
.ok_or_else(|| {
|
||||
Error::msg(format!(
|
||||
"Translation key '{}' for language '{}' is missing",
|
||||
key, lang
|
||||
))
|
||||
})
|
||||
.map(|term| term.to_string())
|
||||
} else {
|
||||
bail!("Language '{}' not found.", lang)
|
||||
}
|
||||
}
|
||||
|
||||
terms
|
||||
.get(key.as_ref())
|
||||
.ok_or_else(|| {
|
||||
Error::msg(format!(
|
||||
"Translation key '{}' for language '{}' is missing",
|
||||
key.as_ref(),
|
||||
lang.as_ref()
|
||||
))
|
||||
})
|
||||
.map(|term| term.to_string())
|
||||
pub fn serialize(&self, lang: &str) -> SerializedConfig {
|
||||
let options = &self.languages[lang];
|
||||
|
||||
SerializedConfig {
|
||||
base_url: &self.base_url,
|
||||
mode: self.mode,
|
||||
title: &options.title,
|
||||
description: &options.description,
|
||||
languages: self.languages.iter().filter(|(k, _)| k.as_str() != lang).collect(),
|
||||
generate_feed: options.generate_feed,
|
||||
feed_filename: &options.feed_filename,
|
||||
taxonomies: &options.taxonomies,
|
||||
build_search_index: options.build_search_index,
|
||||
extra: &self.extra,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -361,10 +335,8 @@ impl Default for Config {
|
|||
title: None,
|
||||
description: None,
|
||||
theme: None,
|
||||
highlight_code: false,
|
||||
highlight_theme: "base16-ocean-dark".to_string(),
|
||||
default_language: "en".to_string(),
|
||||
languages: Vec::new(),
|
||||
languages: HashMap::new(),
|
||||
generate_feed: false,
|
||||
feed_limit: None,
|
||||
feed_filename: "atom.xml".to_string(),
|
||||
|
@ -377,7 +349,6 @@ impl Default for Config {
|
|||
ignored_content: Vec::new(),
|
||||
ignored_content_globset: None,
|
||||
translations: HashMap::new(),
|
||||
extra_syntaxes: Vec::new(),
|
||||
output_dir: "public".to_string(),
|
||||
link_checker: link_checker::LinkChecker::default(),
|
||||
slugify: slugify::Slugify::default(),
|
||||
|
@ -532,16 +503,17 @@ base_url = "https://remplace-par-ton-url.fr"
|
|||
default_language = "fr"
|
||||
|
||||
[translations]
|
||||
[translations.fr]
|
||||
title = "Un titre"
|
||||
|
||||
[translations.en]
|
||||
[languages.en]
|
||||
[languages.en.translations]
|
||||
title = "A title"
|
||||
"#;
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn can_use_present_translation() {
|
||||
let config = Config::parse(CONFIG_TRANSLATION).unwrap();
|
||||
assert!(config.languages.contains_key("fr"));
|
||||
assert_eq!(config.get_translation("fr", "title").unwrap(), "Un titre");
|
||||
assert_eq!(config.get_translation("en", "title").unwrap(), "A title");
|
||||
}
|
||||
|
@ -551,7 +523,7 @@ title = "A title"
|
|||
let config = Config::parse(CONFIG_TRANSLATION).unwrap();
|
||||
let error = config.get_translation("absent", "key").unwrap_err();
|
||||
|
||||
assert_eq!("Translation for language 'absent' is missing", format!("{}", error));
|
||||
assert_eq!("Language 'absent' not found.", format!("{}", error));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -668,21 +640,6 @@ anchors = "off"
|
|||
assert_eq!(config.slugify.anchors, SlugifyStrategy::Off);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_on_language_set_twice() {
|
||||
let config_str = r#"
|
||||
base_url = "https://remplace-par-ton-url.fr"
|
||||
default_language = "fr"
|
||||
languages = [
|
||||
{ code = "fr" },
|
||||
{ code = "en" },
|
||||
]
|
||||
"#;
|
||||
let config = Config::parse(config_str);
|
||||
let err = config.unwrap_err();
|
||||
assert_eq!("Default language `fr` should not appear both in `config.default_language` and `config.languages`", format!("{}", err));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cannot_overwrite_theme_mapping_with_invalid_type() {
|
||||
let config_str = r#"
|
||||
|
|
|
@ -13,6 +13,8 @@ pub struct Search {
|
|||
/// Includes the description in the search index. When the site becomes too large, you can switch
|
||||
/// to that instead. `false` by default
|
||||
pub include_description: bool,
|
||||
/// Include the path of the page in the search index. `false` by default.
|
||||
pub include_path: bool,
|
||||
}
|
||||
|
||||
impl Default for Search {
|
||||
|
@ -21,6 +23,7 @@ impl Default for Search {
|
|||
include_title: true,
|
||||
include_content: true,
|
||||
include_description: false,
|
||||
include_path: false,
|
||||
truncate_content_length: None,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,9 +11,6 @@ pub struct Taxonomy {
|
|||
pub paginate_path: Option<String>,
|
||||
/// Whether to generate a feed only for each taxonomy term, defaults to false
|
||||
pub feed: bool,
|
||||
/// The language for that taxonomy, only used in multilingual sites.
|
||||
/// Defaults to the config `default_language` if not set
|
||||
pub lang: String,
|
||||
}
|
||||
|
||||
impl Taxonomy {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use lazy_static::lazy_static;
|
||||
use syntect::dumps::from_binary;
|
||||
use syntect::easy::HighlightLines;
|
||||
use syntect::highlighting::ThemeSet;
|
||||
use syntect::parsing::SyntaxSet;
|
||||
use syntect::highlighting::{Theme, ThemeSet};
|
||||
use syntect::parsing::{SyntaxReference, SyntaxSet};
|
||||
|
||||
use crate::config::Config;
|
||||
use syntect::html::{css_for_theme_with_class_style, ClassStyle};
|
||||
|
||||
lazy_static! {
|
||||
pub static ref SYNTAX_SET: SyntaxSet = {
|
||||
|
@ -16,29 +16,80 @@ lazy_static! {
|
|||
from_binary(include_bytes!("../../../sublime/themes/all.themedump"));
|
||||
}
|
||||
|
||||
/// Returns the highlighter and whether it was found in the extra or not
|
||||
pub fn get_highlighter(language: Option<&str>, config: &Config) -> (HighlightLines<'static>, bool) {
|
||||
let theme = &THEME_SET.themes[config.highlight_theme()];
|
||||
let mut in_extra = false;
|
||||
pub const CLASS_STYLE: ClassStyle = ClassStyle::SpacedPrefixed { prefix: "z-" };
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum HighlightSource {
|
||||
/// One of the built-in Zola syntaxes
|
||||
BuiltIn,
|
||||
/// Found in the extra syntaxes
|
||||
Extra,
|
||||
/// No language specified
|
||||
Plain,
|
||||
/// We didn't find the language in built-in and extra syntaxes
|
||||
NotFound,
|
||||
}
|
||||
|
||||
pub struct SyntaxAndTheme<'config> {
|
||||
pub syntax: &'config SyntaxReference,
|
||||
pub syntax_set: &'config SyntaxSet,
|
||||
/// None if highlighting via CSS
|
||||
pub theme: Option<&'config Theme>,
|
||||
pub source: HighlightSource,
|
||||
}
|
||||
|
||||
pub fn resolve_syntax_and_theme<'config>(
|
||||
language: Option<&'_ str>,
|
||||
config: &'config Config,
|
||||
) -> SyntaxAndTheme<'config> {
|
||||
let theme = if config.markdown.highlight_theme != "css" {
|
||||
Some(&THEME_SET.themes[&config.markdown.highlight_theme])
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(ref lang) = language {
|
||||
let syntax = if let Some(ref extra) = config.markdown.extra_syntax_set {
|
||||
let s = extra.find_syntax_by_token(lang);
|
||||
if s.is_some() {
|
||||
in_extra = true;
|
||||
if let Some(ref extra_syntaxes) = config.markdown.extra_syntax_set {
|
||||
if let Some(syntax) = extra_syntaxes.find_syntax_by_token(lang) {
|
||||
return SyntaxAndTheme {
|
||||
syntax,
|
||||
syntax_set: extra_syntaxes,
|
||||
theme,
|
||||
source: HighlightSource::Extra,
|
||||
};
|
||||
}
|
||||
}
|
||||
// The JS syntax hangs a lot... the TS syntax is probably better anyway.
|
||||
// https://github.com/getzola/zola/issues/1241
|
||||
// https://github.com/getzola/zola/issues/1211
|
||||
// https://github.com/getzola/zola/issues/1174
|
||||
let hacked_lang = if *lang == "js" || *lang == "javascript" { "ts" } else { lang };
|
||||
if let Some(syntax) = SYNTAX_SET.find_syntax_by_token(hacked_lang) {
|
||||
SyntaxAndTheme {
|
||||
syntax,
|
||||
syntax_set: &SYNTAX_SET as &SyntaxSet,
|
||||
theme,
|
||||
source: HighlightSource::BuiltIn,
|
||||
}
|
||||
} else {
|
||||
SyntaxAndTheme {
|
||||
syntax: SYNTAX_SET.find_syntax_plain_text(),
|
||||
syntax_set: &SYNTAX_SET as &SyntaxSet,
|
||||
theme,
|
||||
source: HighlightSource::NotFound,
|
||||
}
|
||||
s
|
||||
} else {
|
||||
// The JS syntax hangs a lot... the TS syntax is probably better anyway.
|
||||
// https://github.com/getzola/zola/issues/1241
|
||||
// https://github.com/getzola/zola/issues/1211
|
||||
// https://github.com/getzola/zola/issues/1174
|
||||
let hacked_lang = if *lang == "js" || *lang == "javascript" { "ts" } else { lang };
|
||||
SYNTAX_SET.find_syntax_by_token(hacked_lang)
|
||||
}
|
||||
.unwrap_or_else(|| SYNTAX_SET.find_syntax_plain_text());
|
||||
(HighlightLines::new(syntax, theme), in_extra)
|
||||
} else {
|
||||
(HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme), false)
|
||||
SyntaxAndTheme {
|
||||
syntax: SYNTAX_SET.find_syntax_plain_text(),
|
||||
syntax_set: &SYNTAX_SET as &SyntaxSet,
|
||||
theme,
|
||||
source: HighlightSource::Plain,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export_theme_css(theme_name: &str) -> String {
|
||||
let theme = &THEME_SET.themes[theme_name];
|
||||
css_for_theme_with_class_style(theme, CLASS_STYLE)
|
||||
}
|
||||
|
|
|
@ -1,21 +1,17 @@
|
|||
mod config;
|
||||
pub mod highlighting;
|
||||
mod theme;
|
||||
|
||||
pub use crate::config::{
|
||||
languages::Language, link_checker::LinkChecker, slugify::Slugify, taxonomies::Taxonomy, Config,
|
||||
languages::LanguageOptions, link_checker::LinkChecker, search::Search, slugify::Slugify,
|
||||
taxonomies::Taxonomy, Config,
|
||||
};
|
||||
use errors::Result;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
/// Get and parse the config.
|
||||
/// If it doesn't succeed, exit
|
||||
pub fn get_config(filename: &Path) -> Config {
|
||||
match Config::from_file(filename) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
println!("Failed to load {}", filename.display());
|
||||
println!("Error: {}", e);
|
||||
::std::process::exit(1);
|
||||
}
|
||||
}
|
||||
pub fn get_config(filename: &Path) -> Result<Config> {
|
||||
Config::from_file(filename)
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use serde_derive::{Deserialize, Serialize};
|
|||
use toml::Value as Toml;
|
||||
|
||||
use errors::{bail, Result};
|
||||
use utils::fs::read_file_with_error;
|
||||
use utils::fs::read_file;
|
||||
|
||||
/// Holds the data from a `theme.toml` file.
|
||||
/// There are other fields than `extra` in it but Zola
|
||||
|
@ -39,13 +39,9 @@ impl Theme {
|
|||
}
|
||||
|
||||
/// Parses a theme file from the given path
|
||||
pub fn from_file(path: &PathBuf) -> Result<Theme> {
|
||||
let content = read_file_with_error(
|
||||
path,
|
||||
"No `theme.toml` file found. \
|
||||
Is the `theme` defined in your `config.toml` present in the `themes` directory \
|
||||
and does it have a `theme.toml` inside?",
|
||||
)?;
|
||||
pub fn from_file(path: &PathBuf, theme_name: &str) -> Result<Theme> {
|
||||
let content = read_file(path)
|
||||
.map_err(|e| errors::Error::chain(format!("Failed to load theme {}", theme_name), e))?;
|
||||
Theme::parse(&content)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,4 +8,5 @@ edition = "2018"
|
|||
tera = "1"
|
||||
toml = "0.5"
|
||||
image = "0.23"
|
||||
syntect = "4.4"
|
||||
# TODO: go back to version 4/5 once https://github.com/trishume/syntect/pull/337 is merged
|
||||
syntect = { git = "https://github.com/Keats/syntect.git", branch = "scopestack" }
|
||||
|
|
|
@ -18,8 +18,4 @@ errors = { path = "../errors" }
|
|||
utils = { path = "../utils" }
|
||||
|
||||
[dev-dependencies]
|
||||
# Remove from git patch when 1.0.1 is released
|
||||
# https://github.com/frondeus/test-case/issues/62
|
||||
# test-case = "1.0"
|
||||
test-case = { git = "https://github.com/frondeus/test-case" }
|
||||
|
||||
test-case = "1"
|
||||
|
|
|
@ -3,9 +3,7 @@ use serde_derive::{Deserialize, Serialize};
|
|||
|
||||
use errors::{bail, Error, Result};
|
||||
use regex::Regex;
|
||||
use serde_yaml;
|
||||
use std::path::Path;
|
||||
use toml;
|
||||
|
||||
mod page;
|
||||
mod section;
|
||||
|
@ -14,10 +12,14 @@ pub use page::PageFrontMatter;
|
|||
pub use section::SectionFrontMatter;
|
||||
|
||||
lazy_static! {
|
||||
static ref TOML_RE: Regex =
|
||||
Regex::new(r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
||||
static ref YAML_RE: Regex =
|
||||
Regex::new(r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---\r?\n?((?s).*(?-s))$").unwrap();
|
||||
static ref TOML_RE: Regex = Regex::new(
|
||||
r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))"
|
||||
)
|
||||
.unwrap();
|
||||
static ref YAML_RE: Regex = Regex::new(
|
||||
r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---[[:space:]]*(?:$|(?:\r?\n((?s).*(?-s))$))"
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
pub enum RawFrontMatter<'a> {
|
||||
|
@ -46,6 +48,10 @@ impl RawFrontMatter<'_> {
|
|||
pub enum SortBy {
|
||||
/// Most recent to oldest
|
||||
Date,
|
||||
/// Most recent to oldest
|
||||
UpdateDate,
|
||||
/// Sort by title
|
||||
Title,
|
||||
/// Lower weight comes first
|
||||
Weight,
|
||||
/// No sorting
|
||||
|
@ -80,7 +86,8 @@ fn split_content<'c>(file_path: &Path, content: &'c str) -> Result<(RawFrontMatt
|
|||
// caps[1] => front matter
|
||||
// caps[2] => content
|
||||
let front_matter = caps.get(1).unwrap().as_str();
|
||||
let content = caps.get(2).unwrap().as_str();
|
||||
let content = caps.get(2).map_or("", |m| m.as_str());
|
||||
|
||||
if is_toml {
|
||||
Ok((RawFrontMatter::Toml(front_matter), content))
|
||||
} else {
|
||||
|
@ -172,13 +179,27 @@ Hello
|
|||
title = "Title"
|
||||
description = "hey there"
|
||||
date = 2002-10-12
|
||||
+++"#; "toml")]
|
||||
+++
|
||||
"#; "toml")]
|
||||
#[test_case(r#"
|
||||
---
|
||||
title: Title
|
||||
description: hey there
|
||||
date: 2002-10-12
|
||||
---"#; "yaml")]
|
||||
---
|
||||
"#; "yaml")]
|
||||
#[test_case(r#"
|
||||
+++
|
||||
title = "Title"
|
||||
description = "hey there"
|
||||
date = 2002-10-12
|
||||
+++"#; "toml no newline")]
|
||||
#[test_case(r#"
|
||||
---
|
||||
title: Title
|
||||
description: hey there
|
||||
date: 2002-10-12
|
||||
---"#; "yaml no newline")]
|
||||
fn can_split_content_with_only_frontmatter_valid(content: &str) {
|
||||
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
|
||||
assert_eq!(content, "");
|
||||
|
@ -231,6 +252,12 @@ description = "hey there"
|
|||
date = 2002-10-12
|
||||
---"#; "toml unmatched")]
|
||||
#[test_case(r#"
|
||||
+++
|
||||
title = "Title"
|
||||
description = "hey there"
|
||||
date = 2002-10-12
|
||||
++++"#; "toml too many pluses")]
|
||||
#[test_case(r#"
|
||||
---
|
||||
title: Title
|
||||
description: hey there
|
||||
|
@ -241,6 +268,12 @@ title: Title
|
|||
description: hey there
|
||||
date: 2002-10-12
|
||||
+++"#; "yaml unmatched")]
|
||||
#[test_case(r#"
|
||||
---
|
||||
title: Title
|
||||
description: hey there
|
||||
date: 2002-10-12
|
||||
----"#; "yaml too many dashes")]
|
||||
fn errors_if_cannot_locate_frontmatter(content: &str) {
|
||||
let res = split_page_content(Path::new(""), content);
|
||||
assert!(res.is_err());
|
||||
|
|
|
@ -20,6 +20,12 @@ pub struct PageFrontMatter {
|
|||
/// Updated date
|
||||
#[serde(default, deserialize_with = "from_toml_datetime")]
|
||||
pub updated: Option<String>,
|
||||
/// Chrono converted update datatime
|
||||
#[serde(default, skip_deserializing)]
|
||||
pub updated_datetime: Option<NaiveDateTime>,
|
||||
/// The converted update datetime into a (year, month, day) tuple
|
||||
#[serde(default, skip_deserializing)]
|
||||
pub updated_datetime_tuple: Option<(i32, u32, u32)>,
|
||||
/// Date if we want to order pages (ie blog post)
|
||||
#[serde(default, deserialize_with = "from_toml_datetime")]
|
||||
pub date: Option<String>,
|
||||
|
@ -81,7 +87,7 @@ impl PageFrontMatter {
|
|||
}
|
||||
|
||||
if let Some(ref path) = f.path {
|
||||
if path == "" {
|
||||
if path.is_empty() {
|
||||
bail!("`path` can't be empty if present")
|
||||
}
|
||||
}
|
||||
|
@ -107,6 +113,10 @@ impl PageFrontMatter {
|
|||
pub fn date_to_datetime(&mut self) {
|
||||
self.datetime = self.date.as_ref().map(|s| s.as_ref()).and_then(parse_datetime);
|
||||
self.datetime_tuple = self.datetime.map(|dt| (dt.year(), dt.month(), dt.day()));
|
||||
|
||||
self.updated_datetime = self.updated.as_ref().map(|s| s.as_ref()).and_then(parse_datetime);
|
||||
self.updated_datetime_tuple =
|
||||
self.updated_datetime.map(|dt| (dt.year(), dt.month(), dt.day()));
|
||||
}
|
||||
|
||||
pub fn weight(&self) -> usize {
|
||||
|
@ -120,6 +130,8 @@ impl Default for PageFrontMatter {
|
|||
title: None,
|
||||
description: None,
|
||||
updated: None,
|
||||
updated_datetime: None,
|
||||
updated_datetime_tuple: None,
|
||||
date: None,
|
||||
datetime: None,
|
||||
datetime_tuple: None,
|
||||
|
|
|
@ -10,6 +10,15 @@ regex = "1.0"
|
|||
tera = "1"
|
||||
image = "0.23"
|
||||
rayon = "1"
|
||||
webp = "0.1.1"
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
svg_metadata = "0.4.1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
config = { path = "../config" }
|
||||
|
||||
[dev-dependencies]
|
||||
# TODO: prune
|
||||
serde_json = "1"
|
||||
site = { path = "../site" }
|
||||
|
|
|
@ -1,29 +1,62 @@
|
|||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::collections::hash_map::Entry as HEntry;
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error as StdError;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::{self, File};
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{collections::hash_map::DefaultHasher, io::Write};
|
||||
|
||||
use image::imageops::FilterType;
|
||||
use image::{GenericImageView, ImageOutputFormat};
|
||||
use image::error::ImageResult;
|
||||
use image::io::Reader as ImgReader;
|
||||
use image::{imageops::FilterType, EncodableLayout};
|
||||
use image::{ImageFormat, ImageOutputFormat};
|
||||
use lazy_static::lazy_static;
|
||||
use rayon::prelude::*;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use svg_metadata::Metadata as SvgMetadata;
|
||||
|
||||
use config::Config;
|
||||
use errors::{Error, Result};
|
||||
use utils::fs as ufs;
|
||||
|
||||
static RESIZED_SUBDIR: &str = "processed_images";
|
||||
const DEFAULT_Q_JPG: u8 = 75;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref RESIZED_FILENAME: Regex =
|
||||
Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png)"#).unwrap();
|
||||
Regex::new(r#"([0-9a-f]{16})([0-9a-f]{2})[.](jpg|png|webp)"#).unwrap();
|
||||
}
|
||||
|
||||
/// Describes the precise kind of a resize operation
|
||||
/// Size and format read cheaply with `image`'s `Reader`.
|
||||
#[derive(Debug)]
|
||||
struct ImageMeta {
|
||||
size: (u32, u32),
|
||||
format: Option<ImageFormat>,
|
||||
}
|
||||
|
||||
impl ImageMeta {
|
||||
fn read(path: &Path) -> ImageResult<Self> {
|
||||
let reader = ImgReader::open(path).and_then(ImgReader::with_guessed_format)?;
|
||||
let format = reader.format();
|
||||
let size = reader.into_dimensions()?;
|
||||
|
||||
Ok(Self { size, format })
|
||||
}
|
||||
|
||||
fn is_lossy(&self) -> bool {
|
||||
use ImageFormat::*;
|
||||
|
||||
// We assume lossy by default / if unknown format
|
||||
let format = self.format.unwrap_or(Jpeg);
|
||||
!matches!(format, Png | Pnm | Tiff | Tga | Bmp | Ico | Hdr | Farbfeld)
|
||||
}
|
||||
}
|
||||
|
||||
/// De-serialized & sanitized arguments of `resize_image`
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ResizeOp {
|
||||
pub enum ResizeArgs {
|
||||
/// A simple scale operation that doesn't take aspect ratio into account
|
||||
Scale(u32, u32),
|
||||
/// Scales the image to a specified width with height computed such
|
||||
|
@ -43,22 +76,20 @@ pub enum ResizeOp {
|
|||
Fill(u32, u32),
|
||||
}
|
||||
|
||||
impl ResizeOp {
|
||||
pub fn from_args(op: &str, width: Option<u32>, height: Option<u32>) -> Result<ResizeOp> {
|
||||
use ResizeOp::*;
|
||||
impl ResizeArgs {
|
||||
pub fn from_args(op: &str, width: Option<u32>, height: Option<u32>) -> Result<Self> {
|
||||
use ResizeArgs::*;
|
||||
|
||||
// Validate args:
|
||||
match op {
|
||||
"fit_width" => {
|
||||
if width.is_none() {
|
||||
return Err("op=\"fit_width\" requires a `width` argument".to_string().into());
|
||||
return Err("op=\"fit_width\" requires a `width` argument".into());
|
||||
}
|
||||
}
|
||||
"fit_height" => {
|
||||
if height.is_none() {
|
||||
return Err("op=\"fit_height\" requires a `height` argument"
|
||||
.to_string()
|
||||
.into());
|
||||
return Err("op=\"fit_height\" requires a `height` argument".into());
|
||||
}
|
||||
}
|
||||
"scale" | "fit" | "fill" => {
|
||||
|
@ -80,57 +111,89 @@ impl ResizeOp {
|
|||
_ => unreachable!(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn width(self) -> Option<u32> {
|
||||
use ResizeOp::*;
|
||||
|
||||
match self {
|
||||
Scale(w, _) => Some(w),
|
||||
FitWidth(w) => Some(w),
|
||||
FitHeight(_) => None,
|
||||
Fit(w, _) => Some(w),
|
||||
Fill(w, _) => Some(w),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn height(self) -> Option<u32> {
|
||||
use ResizeOp::*;
|
||||
|
||||
match self {
|
||||
Scale(_, h) => Some(h),
|
||||
FitWidth(_) => None,
|
||||
FitHeight(h) => Some(h),
|
||||
Fit(_, h) => Some(h),
|
||||
Fill(_, h) => Some(h),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ResizeOp> for u8 {
|
||||
fn from(op: ResizeOp) -> u8 {
|
||||
use ResizeOp::*;
|
||||
|
||||
match op {
|
||||
Scale(_, _) => 1,
|
||||
FitWidth(_) => 2,
|
||||
FitHeight(_) => 3,
|
||||
Fit(_, _) => 4,
|
||||
Fill(_, _) => 5,
|
||||
}
|
||||
}
|
||||
/// Contains image crop/resize instructions for use by `Processor`
|
||||
///
|
||||
/// The `Processor` applies `crop` first, if any, and then `resize`, if any.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Default, Debug)]
|
||||
struct ResizeOp {
|
||||
crop: Option<(u32, u32, u32, u32)>, // x, y, w, h
|
||||
resize: Option<(u32, u32)>, // w, h
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for ResizeOp {
|
||||
fn hash<H: Hasher>(&self, hasher: &mut H) {
|
||||
hasher.write_u8(u8::from(*self));
|
||||
if let Some(w) = self.width() {
|
||||
hasher.write_u32(w);
|
||||
}
|
||||
if let Some(h) = self.height() {
|
||||
hasher.write_u32(h);
|
||||
impl ResizeOp {
|
||||
fn new(args: ResizeArgs, (orig_w, orig_h): (u32, u32)) -> Self {
|
||||
use ResizeArgs::*;
|
||||
|
||||
let res = ResizeOp::default();
|
||||
|
||||
match args {
|
||||
Scale(w, h) => res.resize((w, h)),
|
||||
FitWidth(w) => {
|
||||
let h = (orig_h as u64 * w as u64) / orig_w as u64;
|
||||
res.resize((w, h as u32))
|
||||
}
|
||||
FitHeight(h) => {
|
||||
let w = (orig_w as u64 * h as u64) / orig_h as u64;
|
||||
res.resize((w as u32, h))
|
||||
}
|
||||
Fit(w, h) => {
|
||||
if orig_w <= w && orig_h <= h {
|
||||
return res; // ie. no-op
|
||||
}
|
||||
|
||||
let orig_w_h = orig_w as u64 * h as u64;
|
||||
let orig_h_w = orig_h as u64 * w as u64;
|
||||
|
||||
if orig_w_h > orig_h_w {
|
||||
Self::new(FitWidth(w), (orig_w, orig_h))
|
||||
} else {
|
||||
Self::new(FitHeight(h), (orig_w, orig_h))
|
||||
}
|
||||
}
|
||||
Fill(w, h) => {
|
||||
const RATIO_EPSILLION: f32 = 0.1;
|
||||
|
||||
let factor_w = orig_w as f32 / w as f32;
|
||||
let factor_h = orig_h as f32 / h as f32;
|
||||
|
||||
if (factor_w - factor_h).abs() <= RATIO_EPSILLION {
|
||||
// If the horizontal and vertical factor is very similar,
|
||||
// that means the aspect is similar enough that there's not much point
|
||||
// in cropping, so just perform a simple scale in this case.
|
||||
res.resize((w, h))
|
||||
} else {
|
||||
// We perform the fill such that a crop is performed first
|
||||
// and then resize_exact can be used, which should be cheaper than
|
||||
// resizing and then cropping (smaller number of pixels to resize).
|
||||
let (crop_w, crop_h) = if factor_w < factor_h {
|
||||
(orig_w, (factor_w * h as f32).round() as u32)
|
||||
} else {
|
||||
((factor_h * w as f32).round() as u32, orig_h)
|
||||
};
|
||||
|
||||
let (offset_w, offset_h) = if factor_w < factor_h {
|
||||
(0, (orig_h - crop_h) / 2)
|
||||
} else {
|
||||
((orig_w - crop_w) / 2, 0)
|
||||
};
|
||||
|
||||
res.crop((offset_w, offset_h, crop_w, crop_h)).resize((w, h))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn crop(mut self, crop: (u32, u32, u32, u32)) -> Self {
|
||||
self.crop = Some(crop);
|
||||
self
|
||||
}
|
||||
|
||||
fn resize(mut self, size: (u32, u32)) -> Self {
|
||||
self.resize = Some(size);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Thumbnail image format
|
||||
|
@ -140,22 +203,28 @@ pub enum Format {
|
|||
Jpeg(u8),
|
||||
/// PNG
|
||||
Png,
|
||||
/// WebP, The `u8` argument is WebP quality (in percent), None meaning lossless.
|
||||
WebP(Option<u8>),
|
||||
}
|
||||
|
||||
impl Format {
|
||||
pub fn from_args(source: &str, format: &str, quality: u8) -> Result<Format> {
|
||||
fn from_args(meta: &ImageMeta, format: &str, quality: Option<u8>) -> Result<Format> {
|
||||
use Format::*;
|
||||
|
||||
assert!(quality > 0 && quality <= 100, "Jpeg quality must be within the range [1; 100]");
|
||||
|
||||
if let Some(quality) = quality {
|
||||
assert!(quality > 0 && quality <= 100, "Quality must be within the range [1; 100]");
|
||||
}
|
||||
let jpg_quality = quality.unwrap_or(DEFAULT_Q_JPG);
|
||||
match format {
|
||||
"auto" => match Self::is_lossy(source) {
|
||||
Some(true) => Ok(Jpeg(quality)),
|
||||
Some(false) => Ok(Png),
|
||||
None => Err(format!("Unsupported image file: {}", source).into()),
|
||||
},
|
||||
"jpeg" | "jpg" => Ok(Jpeg(quality)),
|
||||
"auto" => {
|
||||
if meta.is_lossy() {
|
||||
Ok(Jpeg(jpg_quality))
|
||||
} else {
|
||||
Ok(Png)
|
||||
}
|
||||
}
|
||||
"jpeg" | "jpg" => Ok(Jpeg(jpg_quality)),
|
||||
"png" => Ok(Png),
|
||||
"webp" => Ok(WebP(quality)),
|
||||
_ => Err(format!("Invalid image format: {}", format).into()),
|
||||
}
|
||||
}
|
||||
|
@ -170,6 +239,8 @@ impl Format {
|
|||
"png" => Some(false),
|
||||
"gif" => Some(false),
|
||||
"bmp" => Some(false),
|
||||
// It is assumed that webp is lossy, but it can be both
|
||||
"webp" => Some(true),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or(None)
|
||||
|
@ -182,6 +253,7 @@ impl Format {
|
|||
match *self {
|
||||
Png => "png",
|
||||
Jpeg(_) => "jpg",
|
||||
WebP(_) => "webp",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -194,16 +266,23 @@ impl Hash for Format {
|
|||
let q = match *self {
|
||||
Png => 0,
|
||||
Jpeg(q) => q,
|
||||
WebP(None) => 0,
|
||||
WebP(Some(q)) => q,
|
||||
};
|
||||
|
||||
hasher.write_u8(q);
|
||||
hasher.write(self.extension().as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
/// Holds all data needed to perform a resize operation
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ImageOp {
|
||||
source: String,
|
||||
/// This is the source input path string as passed in the template, we need this to compute the hash.
|
||||
/// Hashing the resolved `input_path` would include the absolute path to the image
|
||||
/// with all filesystem components.
|
||||
input_src: String,
|
||||
input_path: PathBuf,
|
||||
op: ResizeOp,
|
||||
format: Format,
|
||||
/// Hash of the above parameters
|
||||
|
@ -216,82 +295,32 @@ pub struct ImageOp {
|
|||
}
|
||||
|
||||
impl ImageOp {
|
||||
pub fn new(source: String, op: ResizeOp, format: Format) -> ImageOp {
|
||||
const RESIZE_FILTER: FilterType = FilterType::Lanczos3;
|
||||
|
||||
fn new(input_src: String, input_path: PathBuf, op: ResizeOp, format: Format) -> ImageOp {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
hasher.write(source.as_ref());
|
||||
hasher.write(input_src.as_ref());
|
||||
op.hash(&mut hasher);
|
||||
format.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
ImageOp { source, op, format, hash, collision_id: 0 }
|
||||
ImageOp { input_src, input_path, op, format, hash, collision_id: 0 }
|
||||
}
|
||||
|
||||
pub fn from_args(
|
||||
source: String,
|
||||
op: &str,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
format: &str,
|
||||
quality: u8,
|
||||
) -> Result<ImageOp> {
|
||||
let op = ResizeOp::from_args(op, width, height)?;
|
||||
let format = Format::from_args(&source, format, quality)?;
|
||||
Ok(Self::new(source, op, format))
|
||||
}
|
||||
|
||||
fn perform(&self, content_path: &Path, target_path: &Path) -> Result<()> {
|
||||
use ResizeOp::*;
|
||||
|
||||
let src_path = content_path.join(&self.source);
|
||||
if !ufs::file_stale(&src_path, target_path) {
|
||||
fn perform(&self, target_path: &Path) -> Result<()> {
|
||||
if !ufs::file_stale(&self.input_path, target_path) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut img = image::open(&src_path)?;
|
||||
let (img_w, img_h) = img.dimensions();
|
||||
let mut img = image::open(&self.input_path)?;
|
||||
|
||||
const RESIZE_FILTER: FilterType = FilterType::Lanczos3;
|
||||
const RATIO_EPSILLION: f32 = 0.1;
|
||||
|
||||
let img = match self.op {
|
||||
Scale(w, h) => img.resize_exact(w, h, RESIZE_FILTER),
|
||||
FitWidth(w) => img.resize(w, u32::max_value(), RESIZE_FILTER),
|
||||
FitHeight(h) => img.resize(u32::max_value(), h, RESIZE_FILTER),
|
||||
Fit(w, h) => {
|
||||
if img_w > w || img_h > h {
|
||||
img.resize(w, h, RESIZE_FILTER)
|
||||
} else {
|
||||
img
|
||||
}
|
||||
}
|
||||
Fill(w, h) => {
|
||||
let factor_w = img_w as f32 / w as f32;
|
||||
let factor_h = img_h as f32 / h as f32;
|
||||
|
||||
if (factor_w - factor_h).abs() <= RATIO_EPSILLION {
|
||||
// If the horizontal and vertical factor is very similar,
|
||||
// that means the aspect is similar enough that there's not much point
|
||||
// in cropping, so just perform a simple scale in this case.
|
||||
img.resize_exact(w, h, RESIZE_FILTER)
|
||||
} else {
|
||||
// We perform the fill such that a crop is performed first
|
||||
// and then resize_exact can be used, which should be cheaper than
|
||||
// resizing and then cropping (smaller number of pixels to resize).
|
||||
let (crop_w, crop_h) = if factor_w < factor_h {
|
||||
(img_w, (factor_w * h as f32).round() as u32)
|
||||
} else {
|
||||
((factor_h * w as f32).round() as u32, img_h)
|
||||
};
|
||||
|
||||
let (offset_w, offset_h) = if factor_w < factor_h {
|
||||
(0, (img_h - crop_h) / 2)
|
||||
} else {
|
||||
((img_w - crop_w) / 2, 0)
|
||||
};
|
||||
|
||||
img.crop(offset_w, offset_h, crop_w, crop_h).resize_exact(w, h, RESIZE_FILTER)
|
||||
}
|
||||
}
|
||||
let img = match self.op.crop {
|
||||
Some((x, y, w, h)) => img.crop(x, y, w, h),
|
||||
None => img,
|
||||
};
|
||||
let img = match self.op.resize {
|
||||
Some((w, h)) => img.resize_exact(w, h, Self::RESIZE_FILTER),
|
||||
None => img,
|
||||
};
|
||||
|
||||
let mut f = File::create(target_path)?;
|
||||
|
@ -303,20 +332,55 @@ impl ImageOp {
|
|||
Format::Jpeg(q) => {
|
||||
img.write_to(&mut f, ImageOutputFormat::Jpeg(q))?;
|
||||
}
|
||||
Format::WebP(q) => {
|
||||
let encoder = webp::Encoder::from_image(&img);
|
||||
let memory = match q {
|
||||
Some(q) => encoder.encode(q as f32),
|
||||
None => encoder.encode_lossless(),
|
||||
};
|
||||
f.write_all(&memory.as_bytes())?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// A strcture into which image operations can be enqueued and then performed.
|
||||
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct EnqueueResponse {
|
||||
/// The final URL for that asset
|
||||
pub url: String,
|
||||
/// The path to the static asset generated
|
||||
pub static_path: String,
|
||||
/// New image width
|
||||
pub width: u32,
|
||||
/// New image height
|
||||
pub height: u32,
|
||||
/// Original image width
|
||||
pub orig_width: u32,
|
||||
/// Original image height
|
||||
pub orig_height: u32,
|
||||
}
|
||||
|
||||
impl EnqueueResponse {
|
||||
fn new(url: String, static_path: PathBuf, meta: &ImageMeta, op: &ResizeOp) -> Self {
|
||||
let static_path = static_path.to_string_lossy().into_owned();
|
||||
let (width, height) = op.resize.unwrap_or(meta.size);
|
||||
let (orig_width, orig_height) = meta.size;
|
||||
|
||||
Self { url, static_path, width, height, orig_width, orig_height }
|
||||
}
|
||||
}
|
||||
|
||||
/// A struct into which image operations can be enqueued and then performed.
|
||||
/// All output is written in a subdirectory in `static_path`,
|
||||
/// taking care of file stale status based on timestamps and possible hash collisions.
|
||||
#[derive(Debug)]
|
||||
pub struct Processor {
|
||||
content_path: PathBuf,
|
||||
resized_path: PathBuf,
|
||||
resized_url: String,
|
||||
/// The base path of the Zola site
|
||||
base_path: PathBuf,
|
||||
base_url: String,
|
||||
output_dir: PathBuf,
|
||||
/// A map of a ImageOps by their stored hash.
|
||||
/// Note that this cannot be a HashSet, because hashset handles collisions and we don't want that,
|
||||
/// we need to be aware of and handle collisions ourselves.
|
||||
|
@ -326,36 +390,47 @@ pub struct Processor {
|
|||
}
|
||||
|
||||
impl Processor {
|
||||
pub fn new(content_path: PathBuf, static_path: &Path, base_url: &str) -> Processor {
|
||||
pub fn new(base_path: PathBuf, config: &Config) -> Processor {
|
||||
Processor {
|
||||
content_path,
|
||||
resized_path: static_path.join(RESIZED_SUBDIR),
|
||||
resized_url: Self::resized_url(base_url),
|
||||
output_dir: base_path.join("static").join(RESIZED_SUBDIR),
|
||||
base_url: config.make_permalink(RESIZED_SUBDIR),
|
||||
base_path,
|
||||
img_ops: HashMap::new(),
|
||||
img_ops_collisions: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resized_url(base_url: &str) -> String {
|
||||
if base_url.ends_with('/') {
|
||||
format!("{}{}", base_url, RESIZED_SUBDIR)
|
||||
} else {
|
||||
format!("{}/{}", base_url, RESIZED_SUBDIR)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_base_url(&mut self, base_url: &str) {
|
||||
self.resized_url = Self::resized_url(base_url);
|
||||
}
|
||||
|
||||
pub fn source_exists(&self, source: &str) -> bool {
|
||||
self.content_path.join(source).exists()
|
||||
pub fn set_base_url(&mut self, config: &Config) {
|
||||
self.base_url = config.make_permalink(RESIZED_SUBDIR);
|
||||
}
|
||||
|
||||
pub fn num_img_ops(&self) -> usize {
|
||||
self.img_ops.len() + self.img_ops_collisions.len()
|
||||
}
|
||||
|
||||
pub fn enqueue(
|
||||
&mut self,
|
||||
input_src: String,
|
||||
input_path: PathBuf,
|
||||
op: &str,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
format: &str,
|
||||
quality: Option<u8>,
|
||||
) -> Result<EnqueueResponse> {
|
||||
let meta = ImageMeta::read(&input_path).map_err(|e| {
|
||||
Error::chain(format!("Failed to read image: {}", input_path.display()), e)
|
||||
})?;
|
||||
|
||||
let args = ResizeArgs::from_args(op, width, height)?;
|
||||
let op = ResizeOp::new(args, meta.size);
|
||||
let format = Format::from_args(&meta, format, quality)?;
|
||||
let img_op = ImageOp::new(input_src, input_path, op.clone(), format);
|
||||
let (static_path, url) = self.insert(img_op);
|
||||
|
||||
Ok(EnqueueResponse::new(url, static_path, &meta, &op))
|
||||
}
|
||||
|
||||
fn insert_with_collisions(&mut self, mut img_op: ImageOp) -> u32 {
|
||||
match self.img_ops.entry(img_op.hash) {
|
||||
HEntry::Occupied(entry) => {
|
||||
|
@ -408,25 +483,26 @@ impl Processor {
|
|||
format!("{:016x}{:02x}.{}", hash, collision_id, format.extension())
|
||||
}
|
||||
|
||||
fn op_url(&self, hash: u64, collision_id: u32, format: Format) -> String {
|
||||
format!("{}/{}", &self.resized_url, Self::op_filename(hash, collision_id, format))
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, img_op: ImageOp) -> String {
|
||||
/// Adds the given operation to the queue but do not process it immediately.
|
||||
/// Returns (path in static folder, final URL).
|
||||
fn insert(&mut self, img_op: ImageOp) -> (PathBuf, String) {
|
||||
let hash = img_op.hash;
|
||||
let format = img_op.format;
|
||||
let collision_id = self.insert_with_collisions(img_op);
|
||||
self.op_url(hash, collision_id, format)
|
||||
let filename = Self::op_filename(hash, collision_id, format);
|
||||
let url = format!("{}{}", self.base_url, filename);
|
||||
(Path::new("static").join(RESIZED_SUBDIR).join(filename), url)
|
||||
}
|
||||
|
||||
/// Remove stale processed images in the output directory
|
||||
pub fn prune(&self) -> Result<()> {
|
||||
// Do not create folders if they don't exist
|
||||
if !self.resized_path.exists() {
|
||||
if !self.output_dir.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ufs::ensure_directory_exists(&self.resized_path)?;
|
||||
let entries = fs::read_dir(&self.resized_path)?;
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
let entries = fs::read_dir(&self.output_dir)?;
|
||||
for entry in entries {
|
||||
let entry_path = entry?.path();
|
||||
if entry_path.is_file() {
|
||||
|
@ -445,19 +521,99 @@ impl Processor {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Run the enqueued image operations
|
||||
pub fn do_process(&mut self) -> Result<()> {
|
||||
if !self.img_ops.is_empty() {
|
||||
ufs::ensure_directory_exists(&self.resized_path)?;
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
}
|
||||
|
||||
self.img_ops
|
||||
.par_iter()
|
||||
.map(|(hash, op)| {
|
||||
let target =
|
||||
self.resized_path.join(Self::op_filename(*hash, op.collision_id, op.format));
|
||||
op.perform(&self.content_path, &target)
|
||||
.map_err(|e| Error::chain(format!("Failed to process image: {}", op.source), e))
|
||||
self.output_dir.join(Self::op_filename(*hash, op.collision_id, op.format));
|
||||
op.perform(&target).map_err(|e| {
|
||||
Error::chain(format!("Failed to process image: {}", op.input_path.display()), e)
|
||||
})
|
||||
})
|
||||
.collect::<Result<()>>()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Eq, PartialEq)]
|
||||
pub struct ImageMetaResponse {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
pub format: Option<&'static str>,
|
||||
}
|
||||
|
||||
impl ImageMetaResponse {
|
||||
pub fn new_svg(width: u32, height: u32) -> Self {
|
||||
Self { width, height, format: Some("svg") }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ImageMeta> for ImageMetaResponse {
|
||||
fn from(im: ImageMeta) -> Self {
|
||||
Self {
|
||||
width: im.size.0,
|
||||
height: im.size.1,
|
||||
format: im.format.and_then(|f| f.extensions_str().get(0)).copied(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<webp::WebPImage> for ImageMetaResponse {
|
||||
fn from(img: webp::WebPImage) -> Self {
|
||||
Self { width: img.width(), height: img.height(), format: Some("webp") }
|
||||
}
|
||||
}
|
||||
|
||||
/// Read image dimensions (cheaply), used in `get_image_metadata()`, supports SVG
|
||||
pub fn read_image_metadata<P: AsRef<Path>>(path: P) -> Result<ImageMetaResponse> {
|
||||
let path = path.as_ref();
|
||||
let ext = path.extension().and_then(OsStr::to_str).unwrap_or("").to_lowercase();
|
||||
|
||||
let error = |e: Box<dyn StdError + Send + Sync>| {
|
||||
Error::chain(format!("Failed to read image: {}", path.display()), e)
|
||||
};
|
||||
|
||||
match ext.as_str() {
|
||||
"svg" => {
|
||||
let img = SvgMetadata::parse_file(&path).map_err(|e| error(e.into()))?;
|
||||
match (img.height(), img.width(), img.view_box()) {
|
||||
(Some(h), Some(w), _) => Ok((h, w)),
|
||||
(_, _, Some(view_box)) => Ok((view_box.height, view_box.width)),
|
||||
_ => Err("Invalid dimensions: SVG width/height and viewbox not set.".into()),
|
||||
}
|
||||
.map(|(h, w)| ImageMetaResponse::new_svg(h as u32, w as u32))
|
||||
}
|
||||
"webp" => {
|
||||
// Unfortunatelly we have to load the entire image here, unlike with the others :|
|
||||
let data = fs::read(path).map_err(|e| error(e.into()))?;
|
||||
let decoder = webp::Decoder::new(&data[..]);
|
||||
decoder.decode().map(ImageMetaResponse::from).ok_or_else(|| {
|
||||
Error::msg(format!("Failed to decode WebP image: {}", path.display()))
|
||||
})
|
||||
}
|
||||
_ => ImageMeta::read(path).map(ImageMetaResponse::from).map_err(|e| error(e.into())),
|
||||
}
|
||||
}
|
||||
|
||||
/// Assert that `address` matches `prefix` + RESIZED_FILENAME regex + "." + `extension`,
|
||||
/// this is useful in test so that we don't need to hardcode hash, which is annoying.
|
||||
pub fn assert_processed_path_matches(path: &str, prefix: &str, extension: &str) {
|
||||
let filename = path
|
||||
.strip_prefix(prefix)
|
||||
.unwrap_or_else(|| panic!("Path `{}` doesn't start with `{}`", path, prefix));
|
||||
|
||||
let suffix = format!(".{}", extension);
|
||||
assert!(filename.ends_with(&suffix), "Path `{}` doesn't end with `{}`", path, suffix);
|
||||
|
||||
assert!(
|
||||
RESIZED_FILENAME.is_match_at(filename, 0),
|
||||
"In path `{}`, file stem `{}` doesn't match the RESIZED_FILENAME regex",
|
||||
path,
|
||||
filename
|
||||
);
|
||||
}
|
||||
|
|
165
components/imageproc/tests/resize_image.rs
Normal file
165
components/imageproc/tests/resize_image.rs
Normal file
|
@ -0,0 +1,165 @@
|
|||
use std::env;
|
||||
use std::path::{PathBuf, MAIN_SEPARATOR as SLASH};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use config::Config;
|
||||
use imageproc::{assert_processed_path_matches, ImageMetaResponse, Processor};
|
||||
use utils::fs as ufs;
|
||||
|
||||
static CONFIG: &str = r#"
|
||||
title = "imageproc integration tests"
|
||||
base_url = "https://example.com"
|
||||
compile_sass = false
|
||||
build_search_index = false
|
||||
|
||||
[markdown]
|
||||
highlight_code = false
|
||||
"#;
|
||||
|
||||
lazy_static! {
|
||||
static ref TEST_IMGS: PathBuf =
|
||||
[env!("CARGO_MANIFEST_DIR"), "tests", "test_imgs"].iter().collect();
|
||||
static ref TMPDIR: PathBuf = {
|
||||
let tmpdir = option_env!("CARGO_TARGET_TMPDIR").map(PathBuf::from).unwrap_or_else(|| {
|
||||
env::current_exe().unwrap().parent().unwrap().parent().unwrap().join("tmpdir")
|
||||
});
|
||||
ufs::ensure_directory_exists(&tmpdir).unwrap();
|
||||
tmpdir
|
||||
};
|
||||
static ref PROCESSED_PREFIX: String = format!("static{0}processed_images{0}", SLASH);
|
||||
}
|
||||
|
||||
fn image_op_test(
|
||||
source_img: &str,
|
||||
op: &str,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
format: &str,
|
||||
expect_ext: &str,
|
||||
expect_width: u32,
|
||||
expect_height: u32,
|
||||
orig_width: u32,
|
||||
orig_height: u32,
|
||||
) {
|
||||
let source_path = TEST_IMGS.join(source_img);
|
||||
|
||||
let config = Config::parse(&CONFIG).unwrap();
|
||||
let mut proc = Processor::new(TMPDIR.clone(), &config);
|
||||
|
||||
let resp =
|
||||
proc.enqueue(source_img.into(), source_path, op, width, height, format, None).unwrap();
|
||||
assert_processed_path_matches(&resp.url, "https://example.com/processed_images/", expect_ext);
|
||||
assert_processed_path_matches(&resp.static_path, PROCESSED_PREFIX.as_str(), expect_ext);
|
||||
assert_eq!(resp.width, expect_width);
|
||||
assert_eq!(resp.height, expect_height);
|
||||
assert_eq!(resp.orig_width, orig_width);
|
||||
assert_eq!(resp.orig_height, orig_height);
|
||||
|
||||
proc.do_process().unwrap();
|
||||
|
||||
let processed_path = PathBuf::from(&resp.static_path);
|
||||
let processed_size = imageproc::read_image_metadata(&TMPDIR.join(processed_path))
|
||||
.map(|meta| (meta.width, meta.height))
|
||||
.unwrap();
|
||||
assert_eq!(processed_size, (expect_width, expect_height));
|
||||
}
|
||||
|
||||
fn image_meta_test(source_img: &str) -> ImageMetaResponse {
|
||||
let source_path = TEST_IMGS.join(source_img);
|
||||
imageproc::read_image_metadata(&source_path).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_scale() {
|
||||
image_op_test("jpg.jpg", "scale", Some(150), Some(150), "auto", "jpg", 150, 150, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fit_width() {
|
||||
image_op_test("jpg.jpg", "fit_width", Some(150), None, "auto", "jpg", 150, 190, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fit_height() {
|
||||
image_op_test("webp.webp", "fit_height", None, Some(190), "auto", "jpg", 150, 190, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fit1() {
|
||||
image_op_test("jpg.jpg", "fit", Some(150), Some(200), "auto", "jpg", 150, 190, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fit2() {
|
||||
image_op_test("jpg.jpg", "fit", Some(160), Some(180), "auto", "jpg", 142, 180, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fit3() {
|
||||
image_op_test("jpg.jpg", "fit", Some(400), Some(400), "auto", "jpg", 300, 380, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fill1() {
|
||||
image_op_test("jpg.jpg", "fill", Some(100), Some(200), "auto", "jpg", 100, 200, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_fill2() {
|
||||
image_op_test("jpg.jpg", "fill", Some(200), Some(100), "auto", "jpg", 200, 100, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_png_png() {
|
||||
image_op_test("png.png", "scale", Some(150), Some(150), "auto", "png", 150, 150, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_png_jpg() {
|
||||
image_op_test("png.png", "scale", Some(150), Some(150), "jpg", "jpg", 150, 150, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_png_webp() {
|
||||
image_op_test("png.png", "scale", Some(150), Some(150), "webp", "webp", 150, 150, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn resize_image_webp_jpg() {
|
||||
image_op_test("webp.webp", "scale", Some(150), Some(150), "auto", "jpg", 150, 150, 300, 380);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_image_metadata_jpg() {
|
||||
assert_eq!(
|
||||
image_meta_test("jpg.jpg"),
|
||||
ImageMetaResponse { width: 300, height: 380, format: Some("jpg") }
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_image_metadata_png() {
|
||||
assert_eq!(
|
||||
image_meta_test("png.png"),
|
||||
ImageMetaResponse { width: 300, height: 380, format: Some("png") }
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_image_metadata_svg() {
|
||||
assert_eq!(
|
||||
image_meta_test("svg.svg"),
|
||||
ImageMetaResponse { width: 300, height: 300, format: Some("svg") }
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_image_metadata_webp() {
|
||||
assert_eq!(
|
||||
image_meta_test("webp.webp"),
|
||||
ImageMetaResponse { width: 300, height: 380, format: Some("webp") }
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: Test that hash remains the same if physical path is changed
|
BIN
components/imageproc/tests/test_imgs/jpg.jpg
Normal file
BIN
components/imageproc/tests/test_imgs/jpg.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 47 KiB |
BIN
components/imageproc/tests/test_imgs/png.png
Normal file
BIN
components/imageproc/tests/test_imgs/png.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 120 KiB |
56
components/imageproc/tests/test_imgs/svg.svg
Normal file
56
components/imageproc/tests/test_imgs/svg.svg
Normal file
|
@ -0,0 +1,56 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="100%" height="100%" viewBox="0 0 300 300">
|
||||
|
||||
<title>SVG Logo</title>
|
||||
<desc>Designed for the SVG Logo Contest in 2006 by Harvey Rayner, and adopted by W3C in 2009. It is available under the Creative Commons license for those who have an SVG product or who are using SVG on their site.</desc>
|
||||
|
||||
<metadata id="license">
|
||||
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:cc="http://web.resource.org/cc/">
|
||||
<cc:Work rdf:about="">
|
||||
<dc:title>SVG Logo</dc:title>
|
||||
<dc:date>14-08-2009</dc:date>
|
||||
<dc:creator>
|
||||
<cc:Agent><dc:title>W3C</dc:title></cc:Agent>
|
||||
<cc:Agent><dc:title>Harvey Rayner, designer</dc:title></cc:Agent>
|
||||
</dc:creator>
|
||||
<dc:description>See document description</dc:description>
|
||||
<cc:license rdf:resource="http://creativecommons.org/licenses/by-nc-sa/2.5/"/>
|
||||
<dc:format>image/svg+xml</dc:format>
|
||||
<dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/>
|
||||
</cc:Work>
|
||||
<cc:License rdf:about="http://creativecommons.org/licenses/by-nc-sa/2.5/">
|
||||
<cc:permits rdf:resource="http://web.resource.org/cc/Reproduction"/>
|
||||
<cc:permits rdf:resource="http://web.resource.org/cc/Distribution"/>
|
||||
<cc:requires rdf:resource="http://web.resource.org/cc/Notice"/>
|
||||
<cc:requires rdf:resource="http://web.resource.org/cc/Attribution"/>
|
||||
<cc:prohibits rdf:resource="http://web.resource.org/cc/CommercialUse"/>
|
||||
<cc:permits rdf:resource="http://web.resource.org/cc/DerivativeWorks"/>
|
||||
<cc:requires rdf:resource="http://web.resource.org/cc/ShareAlike"/>
|
||||
</cc:License>
|
||||
</rdf:RDF>
|
||||
</metadata>
|
||||
|
||||
|
||||
<defs>
|
||||
<g id="SVG" fill="#ffffff" transform="scale(2) translate(20,79)">
|
||||
<path id="S" d="M 5.482,31.319 C2.163,28.001 0.109,23.419 0.109,18.358 C0.109,8.232 8.322,0.024 18.443,0.024 C28.569,0.024 36.782,8.232 36.782,18.358 L26.042,18.358 C26.042,14.164 22.638,10.765 18.443,10.765 C14.249,10.765 10.850,14.164 10.850,18.358 C10.850,20.453 11.701,22.351 13.070,23.721 L13.075,23.721 C14.450,25.101 15.595,25.500 18.443,25.952 L18.443,25.952 C23.509,26.479 28.091,28.006 31.409,31.324 L31.409,31.324 C34.728,34.643 36.782,39.225 36.782,44.286 C36.782,54.412 28.569,62.625 18.443,62.625 C8.322,62.625 0.109,54.412 0.109,44.286 L10.850,44.286 C10.850,48.480 14.249,51.884 18.443,51.884 C22.638,51.884 26.042,48.480 26.042,44.286 C26.042,42.191 25.191,40.298 23.821,38.923 L23.816,38.923 C22.441,37.548 20.468,37.074 18.443,36.697 L18.443,36.692 C13.533,35.939 8.800,34.638 5.482,31.319 L5.482,31.319 L5.482,31.319 Z"/>
|
||||
|
||||
<path id="V" d="M 73.452,0.024 L60.482,62.625 L49.742,62.625 L36.782,0.024 L47.522,0.024 L55.122,36.687 L62.712,0.024 L73.452,0.024 Z"/>
|
||||
|
||||
<path id="G" d="M 91.792,25.952 L110.126,25.952 L110.126,44.286 L110.131,44.286 C110.131,54.413 101.918,62.626 91.792,62.626 C81.665,62.626 73.458,54.413 73.458,44.286 L73.458,44.286 L73.458,18.359 L73.453,18.359 C73.453,8.233 81.665,0.025 91.792,0.025 C101.913,0.025 110.126,8.233 110.126,18.359 L99.385,18.359 C99.385,14.169 95.981,10.765 91.792,10.765 C87.597,10.765 84.198,14.169 84.198,18.359 L84.198,44.286 L84.198,44.286 C84.198,48.481 87.597,51.880 91.792,51.880 C95.981,51.880 99.380,48.481 99.385,44.291 L99.385,44.286 L99.385,36.698 L91.792,36.698 L91.792,25.952 L91.792,25.952 Z"/>
|
||||
</g>
|
||||
</defs>
|
||||
|
||||
<path id="base" fill="#000" d="M8.5,150 H291.5 V250 C291.5,273.5 273.5,291.5 250,291.5 H50 C26.5,291.5 8.5,273.5 8.5,250 Z"/>
|
||||
<g stroke-width="38.0086" stroke="#000">
|
||||
<g id="svgstar" transform="translate(150, 150)">
|
||||
<path id="svgbar" fill="#ffb13b" d="M-84.1487,-15.8513 a22.4171,22.4171 0 1 0 0,31.7026 h168.2974 a22.4171,22.4171 0 1 0 0,-31.7026 Z"/>
|
||||
<use xlink:href="#svgbar" transform="rotate(45)"/>
|
||||
<use xlink:href="#svgbar" transform="rotate(90)"/>
|
||||
<use xlink:href="#svgbar" transform="rotate(135)"/>
|
||||
</g>
|
||||
</g>
|
||||
<use xlink:href="#svgstar"/>
|
||||
<use xlink:href="#base" opacity="0.85"/>
|
||||
<use xlink:href="#SVG"/>
|
||||
|
||||
</svg>
|
After Width: | Height: | Size: 4.1 KiB |
BIN
components/imageproc/tests/test_imgs/webp.webp
Normal file
BIN
components/imageproc/tests/test_imgs/webp.webp
Normal file
Binary file not shown.
After Width: | Height: | Size: 9.6 KiB |
|
@ -5,7 +5,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
slotmap = "0.4"
|
||||
slotmap = "1"
|
||||
rayon = "1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
tera = "1"
|
||||
|
@ -13,6 +13,7 @@ serde = "1"
|
|||
serde_derive = "1"
|
||||
regex = "1"
|
||||
lazy_static = "1"
|
||||
lexical-sort = "0.3"
|
||||
|
||||
front_matter = { path = "../front_matter" }
|
||||
config = { path = "../config" }
|
||||
|
|
|
@ -136,7 +136,7 @@ impl FileInfo {
|
|||
|
||||
// The language code is not present in the config: typo or the user forgot to add it to the
|
||||
// config
|
||||
if !config.languages_codes().contains(&parts[1].as_ref()) {
|
||||
if !config.other_languages().contains_key(&parts[1].as_ref()) {
|
||||
bail!("File {:?} has a language code of {} which isn't present in the config.toml `languages`", self.path, parts[1]);
|
||||
}
|
||||
|
||||
|
@ -152,7 +152,7 @@ impl FileInfo {
|
|||
mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use config::{Config, Language};
|
||||
use config::{Config, LanguageOptions};
|
||||
|
||||
use super::{find_content_components, FileInfo};
|
||||
|
||||
|
@ -173,7 +173,7 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn doesnt_fail_with_multiple_content_directories() {
|
||||
fn doesnt_fail_with_multiple_content_directories_in_path() {
|
||||
let file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/content/site/content/posts/tutorials/python/index.md"),
|
||||
&PathBuf::from("/home/vincent/code/content/site"),
|
||||
|
@ -184,7 +184,7 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_valid_language_in_page() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
|
@ -197,7 +197,7 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_valid_language_with_default_locale() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"),
|
||||
&PathBuf::new(),
|
||||
|
@ -210,7 +210,7 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_valid_language_in_page_with_assets() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
|
@ -236,7 +236,7 @@ mod tests {
|
|||
#[test]
|
||||
fn errors_on_unknown_language_in_page_with_i18n_on() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("it"), feed: false, search: false });
|
||||
config.languages.insert("it".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
|
@ -248,7 +248,7 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_valid_language_in_section() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_section(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
|
@ -275,7 +275,7 @@ mod tests {
|
|||
#[test]
|
||||
fn correct_canonical_after_find_language() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
|
|
|
@ -3,11 +3,15 @@ mod page;
|
|||
mod section;
|
||||
mod ser;
|
||||
|
||||
use std::fs::read_dir;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub use self::file_info::FileInfo;
|
||||
pub use self::page::Page;
|
||||
pub use self::section::Section;
|
||||
pub use self::ser::{SerializingPage, SerializingSection};
|
||||
|
||||
use config::Config;
|
||||
use rendering::Heading;
|
||||
|
||||
pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {
|
||||
|
@ -23,9 +27,67 @@ pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {
|
|||
false
|
||||
}
|
||||
|
||||
/// Looks into the current folder for the path and see if there's anything that is not a .md
|
||||
/// file. Those will be copied next to the rendered .html file
|
||||
pub fn find_related_assets(path: &Path, config: &Config) -> Vec<PathBuf> {
|
||||
let mut assets = vec![];
|
||||
|
||||
for entry in read_dir(path).unwrap().filter_map(std::result::Result::ok) {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.is_file() {
|
||||
match entry_path.extension() {
|
||||
Some(e) => match e.to_str() {
|
||||
Some("md") => continue,
|
||||
_ => assets.push(entry_path.to_path_buf()),
|
||||
},
|
||||
None => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||
// against the remaining path. Note that the current behaviour effectively means that
|
||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||
// patterns (no "**" patterns).
|
||||
assets = assets
|
||||
.into_iter()
|
||||
.filter(|path| match path.file_name() {
|
||||
None => false,
|
||||
Some(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
assets
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::File;
|
||||
|
||||
use config::Config;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn can_find_related_assets() {
|
||||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
File::create(tmp_dir.path().join("index.md")).unwrap();
|
||||
File::create(tmp_dir.path().join("example.js")).unwrap();
|
||||
File::create(tmp_dir.path().join("graph.jpg")).unwrap();
|
||||
File::create(tmp_dir.path().join("fail.png")).unwrap();
|
||||
|
||||
let assets = find_related_assets(tmp_dir.path(), &Config::default());
|
||||
assert_eq!(assets.len(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap() != "md").count(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "example.js").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_anchor_at_root() {
|
||||
|
|
|
@ -12,21 +12,23 @@ use config::Config;
|
|||
use errors::{Error, Result};
|
||||
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
|
||||
use rendering::{render_content, Heading, RenderContext};
|
||||
use utils::fs::{find_related_assets, read_file};
|
||||
use utils::site::get_reading_analytics;
|
||||
use utils::slugs::slugify_paths;
|
||||
use utils::templates::render_template;
|
||||
|
||||
use crate::content::file_info::FileInfo;
|
||||
use crate::content::has_anchor;
|
||||
use crate::content::ser::SerializingPage;
|
||||
use utils::slugs::slugify_paths;
|
||||
use crate::content::{find_related_assets, has_anchor};
|
||||
use utils::fs::read_file;
|
||||
|
||||
lazy_static! {
|
||||
// Based on https://regex101.com/r/H2n38Z/1/tests
|
||||
// A regex parsing RFC3339 date followed by {_,-}, some characters and ended by .md
|
||||
static ref RFC3339_DATE: Regex = Regex::new(
|
||||
r"^(?P<datetime>(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])(T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(Z|(\+|-)([01][0-9]|2[0-3]):([0-5][0-9])))?)(_|-)(?P<slug>.+$)"
|
||||
r"^(?P<datetime>(\d{4})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])(T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?(Z|(\+|-)([01][0-9]|2[0-3]):([0-5][0-9])))?)\s?(_|-)(?P<slug>.+$)"
|
||||
).unwrap();
|
||||
|
||||
static ref FOOTNOTES_RE: Regex = Regex::new(r"<sup\s*.*?>\s*.*?</sup>").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq)]
|
||||
|
@ -41,7 +43,7 @@ pub struct Page {
|
|||
pub raw_content: String,
|
||||
/// All the non-md files we found next to the .md file
|
||||
pub assets: Vec<PathBuf>,
|
||||
/// All the non-md files we found next to the .md file as string for use in templates
|
||||
/// All the non-md files we found next to the .md file
|
||||
pub serialized_assets: Vec<String>,
|
||||
/// The HTML rendered of the page
|
||||
pub content: String,
|
||||
|
@ -58,10 +60,18 @@ pub struct Page {
|
|||
/// When <!-- more --> is found in the text, will take the content up to that part
|
||||
/// as summary
|
||||
pub summary: Option<String>,
|
||||
/// The earlier updated page, for pages sorted by updated date
|
||||
pub earlier_updated: Option<DefaultKey>,
|
||||
/// The later updated page, for pages sorted by updated date
|
||||
pub later_updated: Option<DefaultKey>,
|
||||
/// The earlier page, for pages sorted by date
|
||||
pub earlier: Option<DefaultKey>,
|
||||
/// The later page, for pages sorted by date
|
||||
pub later: Option<DefaultKey>,
|
||||
/// The previous page, for pages sorted by title
|
||||
pub title_prev: Option<DefaultKey>,
|
||||
/// The next page, for pages sorted by title
|
||||
pub title_next: Option<DefaultKey>,
|
||||
/// The lighter page, for pages sorted by weight
|
||||
pub lighter: Option<DefaultKey>,
|
||||
/// The heavier page, for pages sorted by weight
|
||||
|
@ -78,12 +88,11 @@ pub struct Page {
|
|||
pub lang: String,
|
||||
/// Contains all the translated version of that page
|
||||
pub translations: Vec<DefaultKey>,
|
||||
/// Contains the internal links that have an anchor: we can only check the anchor
|
||||
/// after all pages have been built and their ToC compiled. The page itself should exist otherwise
|
||||
/// it would have errored before getting there
|
||||
/// (path to markdown, anchor value)
|
||||
pub internal_links_with_anchors: Vec<(String, String)>,
|
||||
/// Contains the external links that need to be checked
|
||||
/// The list of all internal links (as path to markdown file), with optional anchor fragments.
|
||||
/// We can only check the anchor after all pages have been built and their ToC compiled.
|
||||
/// The page itself should exist otherwise it would have errored before getting there.
|
||||
pub internal_links: Vec<(String, Option<String>)>,
|
||||
/// The list of all links to external webpages. They can be validated by the `link_checker`.
|
||||
pub external_links: Vec<String>,
|
||||
}
|
||||
|
||||
|
@ -207,27 +216,7 @@ impl Page {
|
|||
|
||||
if page.file.name == "index" {
|
||||
let parent_dir = path.parent().unwrap();
|
||||
let assets = find_related_assets(parent_dir);
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||
// against the remaining path. Note that the current behaviour effectively means that
|
||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||
// patterns (no "**" patterns).
|
||||
page.assets = assets
|
||||
.into_iter()
|
||||
.filter(|path| match path.file_name() {
|
||||
None => false,
|
||||
Some(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
} else {
|
||||
page.assets = assets;
|
||||
}
|
||||
|
||||
page.assets = find_related_assets(parent_dir, config);
|
||||
page.serialized_assets = page.serialize_assets(&base_path);
|
||||
} else {
|
||||
page.assets = vec![];
|
||||
|
@ -245,8 +234,14 @@ impl Page {
|
|||
config: &Config,
|
||||
anchor_insert: InsertAnchor,
|
||||
) -> Result<()> {
|
||||
let mut context =
|
||||
RenderContext::new(tera, config, &self.permalink, permalinks, anchor_insert);
|
||||
let mut context = RenderContext::new(
|
||||
tera,
|
||||
config,
|
||||
&self.lang,
|
||||
&self.permalink,
|
||||
permalinks,
|
||||
anchor_insert,
|
||||
);
|
||||
|
||||
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
|
||||
|
||||
|
@ -254,11 +249,15 @@ impl Page {
|
|||
Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
|
||||
})?;
|
||||
|
||||
self.summary = res.summary_len.map(|l| res.body[0..l].to_owned());
|
||||
self.summary = if let Some(s) = res.summary_len.map(|l| &res.body[0..l]) {
|
||||
Some(FOOTNOTES_RE.replace(s, "").into_owned())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.content = res.body;
|
||||
self.toc = res.toc;
|
||||
self.external_links = res.external_links;
|
||||
self.internal_links_with_anchors = res.internal_links_with_anchors;
|
||||
self.internal_links = res.internal_links;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -271,7 +270,7 @@ impl Page {
|
|||
};
|
||||
|
||||
let mut context = TeraContext::new();
|
||||
context.insert("config", config);
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
context.insert("page", &self.to_serialized(library));
|
||||
|
@ -300,7 +299,7 @@ impl Page {
|
|||
.to_path_buf();
|
||||
path
|
||||
})
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.map(|path| format!("/{}", path.display()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -329,12 +328,13 @@ mod tests {
|
|||
use tera::Tera;
|
||||
|
||||
use super::Page;
|
||||
use config::{Config, Language};
|
||||
use config::{Config, LanguageOptions};
|
||||
use front_matter::InsertAnchor;
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
|
||||
#[test]
|
||||
fn test_can_parse_a_valid_page() {
|
||||
fn can_parse_a_valid_page() {
|
||||
let config = Config::default_for_test();
|
||||
let content = r#"
|
||||
+++
|
||||
title = "Hello"
|
||||
|
@ -342,16 +342,11 @@ description = "hey there"
|
|||
slug = "hello-world"
|
||||
+++
|
||||
Hello world"#;
|
||||
let res = Page::parse(Path::new("post.md"), content, &Config::default(), &PathBuf::new());
|
||||
let res = Page::parse(Path::new("post.md"), content, &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let mut page = res.unwrap();
|
||||
page.render_markdown(
|
||||
&HashMap::default(),
|
||||
&Tera::default(),
|
||||
&Config::default(),
|
||||
InsertAnchor::None,
|
||||
)
|
||||
.unwrap();
|
||||
page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(page.meta.title.unwrap(), "Hello".to_string());
|
||||
assert_eq!(page.meta.slug.unwrap(), "hello-world".to_string());
|
||||
|
@ -486,7 +481,7 @@ Hello world"#;
|
|||
let mut config = Config::default();
|
||||
config.slugify.paths = SlugifyStrategy::On;
|
||||
let res =
|
||||
Page::parse(Path::new(" file with space.md"), "+++\n+++", &config, &PathBuf::new());
|
||||
Page::parse(Path::new(" file with space.md"), "+++\n+++\n", &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
assert_eq!(page.slug, "file-with-space");
|
||||
|
@ -497,7 +492,7 @@ Hello world"#;
|
|||
fn can_make_path_from_utf8_filename() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.paths = SlugifyStrategy::Safe;
|
||||
let res = Page::parse(Path::new("日本.md"), "+++\n++++", &config, &PathBuf::new());
|
||||
let res = Page::parse(Path::new("日本.md"), "+++\n+++\n", &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
assert_eq!(page.slug, "日本");
|
||||
|
@ -506,7 +501,7 @@ Hello world"#;
|
|||
|
||||
#[test]
|
||||
fn can_specify_summary() {
|
||||
let config = Config::default();
|
||||
let config = Config::default_for_test();
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
|
@ -521,6 +516,33 @@ Hello world
|
|||
assert_eq!(page.summary, Some("<p>Hello world</p>\n".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strips_footnotes_in_summary() {
|
||||
let config = Config::default_for_test();
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
This page has footnotes, here's one. [^1]
|
||||
|
||||
<!-- more -->
|
||||
|
||||
And here's another. [^2]
|
||||
|
||||
[^1]: This is the first footnote.
|
||||
|
||||
[^2]: This is the second footnote."#
|
||||
.to_string();
|
||||
let res = Page::parse(Path::new("hello.md"), &content, &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let mut page = res.unwrap();
|
||||
page.render_markdown(&HashMap::default(), &Tera::default(), &config, InsertAnchor::None)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
page.summary,
|
||||
Some("<p>This page has footnotes, here\'s one. </p>\n".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn page_with_assets_gets_right_info() {
|
||||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
|
@ -545,6 +567,7 @@ Hello world
|
|||
assert_eq!(page.file.parent, path.join("content").join("posts"));
|
||||
assert_eq!(page.slug, "with-assets");
|
||||
assert_eq!(page.assets.len(), 3);
|
||||
assert!(page.serialized_assets[0].starts_with('/'));
|
||||
assert_eq!(page.permalink, "http://a-website.com/posts/with-assets/");
|
||||
}
|
||||
|
||||
|
@ -679,6 +702,63 @@ Hello world
|
|||
assert_eq!(page.slug, "hello");
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/pull/1323#issuecomment-779401063
|
||||
#[test]
|
||||
fn can_get_date_from_short_date_in_filename_respects_slugification_strategy() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.paths = SlugifyStrategy::Off;
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
Hello world
|
||||
<!-- more -->"#
|
||||
.to_string();
|
||||
let res =
|
||||
Page::parse(Path::new("2018-10-08_ こんにちは.md"), &content, &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
|
||||
assert_eq!(page.meta.date, Some("2018-10-08".to_string()));
|
||||
assert_eq!(page.slug, " こんにちは");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_date_from_filename_with_spaces() {
|
||||
let config = Config::default();
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
Hello world
|
||||
<!-- more -->"#
|
||||
.to_string();
|
||||
let res =
|
||||
Page::parse(Path::new("2018-10-08 - hello.md"), &content, &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
|
||||
assert_eq!(page.meta.date, Some("2018-10-08".to_string()));
|
||||
assert_eq!(page.slug, "hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_date_from_filename_with_spaces_respects_slugification() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.paths = SlugifyStrategy::Off;
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
Hello world
|
||||
<!-- more -->"#
|
||||
.to_string();
|
||||
let res =
|
||||
Page::parse(Path::new("2018-10-08 - hello.md"), &content, &config, &PathBuf::new());
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
|
||||
assert_eq!(page.meta.date, Some("2018-10-08".to_string()));
|
||||
assert_eq!(page.slug, " hello");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_date_from_full_rfc3339_date_in_filename() {
|
||||
let config = Config::default();
|
||||
|
@ -701,6 +781,30 @@ Hello world
|
|||
assert_eq!(page.slug, "hello");
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/pull/1323#issuecomment-779401063
|
||||
#[test]
|
||||
fn can_get_date_from_full_rfc3339_date_in_filename_respects_slugification_strategy() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.paths = SlugifyStrategy::Off;
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
Hello world
|
||||
<!-- more -->"#
|
||||
.to_string();
|
||||
let res = Page::parse(
|
||||
Path::new("2018-10-02T15:00:00Z- こんにちは.md"),
|
||||
&content,
|
||||
&config,
|
||||
&PathBuf::new(),
|
||||
);
|
||||
assert!(res.is_ok());
|
||||
let page = res.unwrap();
|
||||
|
||||
assert_eq!(page.meta.date, Some("2018-10-02T15:00:00Z".to_string()));
|
||||
assert_eq!(page.slug, " こんにちは");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn frontmatter_date_override_filename_date() {
|
||||
let config = Config::default();
|
||||
|
@ -722,7 +826,7 @@ Hello world
|
|||
#[test]
|
||||
fn can_specify_language_in_filename() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
|
@ -739,7 +843,7 @@ Bonjour le monde"#
|
|||
#[test]
|
||||
fn can_specify_language_in_filename_with_date() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
|
@ -758,7 +862,7 @@ Bonjour le monde"#
|
|||
#[test]
|
||||
fn i18n_frontmatter_path_overrides_default_permalink() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let content = r#"
|
||||
+++
|
||||
path = "bonjour"
|
||||
|
|
|
@ -8,13 +8,13 @@ use config::Config;
|
|||
use errors::{Error, Result};
|
||||
use front_matter::{split_section_content, SectionFrontMatter};
|
||||
use rendering::{render_content, Heading, RenderContext};
|
||||
use utils::fs::{find_related_assets, read_file};
|
||||
use utils::fs::read_file;
|
||||
use utils::site::get_reading_analytics;
|
||||
use utils::templates::render_template;
|
||||
|
||||
use crate::content::file_info::FileInfo;
|
||||
use crate::content::has_anchor;
|
||||
use crate::content::ser::SerializingSection;
|
||||
use crate::content::{find_related_assets, has_anchor};
|
||||
use crate::library::Library;
|
||||
|
||||
// Default is used to create a default index section if there is no _index.md in the root content directory
|
||||
|
@ -36,7 +36,7 @@ pub struct Section {
|
|||
pub content: String,
|
||||
/// All the non-md files we found next to the .md file
|
||||
pub assets: Vec<PathBuf>,
|
||||
/// All the non-md files we found next to the .md file as string for use in templates
|
||||
/// All the non-md files we found next to the .md file as string
|
||||
pub serialized_assets: Vec<String>,
|
||||
/// All direct pages of that section
|
||||
pub pages: Vec<DefaultKey>,
|
||||
|
@ -56,12 +56,11 @@ pub struct Section {
|
|||
/// The language of that section. Equal to the default lang if the user doesn't setup `languages` in config.
|
||||
/// Corresponds to the lang in the _index.{lang}.md file scheme
|
||||
pub lang: String,
|
||||
/// Contains the internal links that have an anchor: we can only check the anchor
|
||||
/// after all pages have been built and their ToC compiled. The page itself should exist otherwise
|
||||
/// it would have errored before getting there
|
||||
/// (path to markdown, anchor value)
|
||||
pub internal_links_with_anchors: Vec<(String, String)>,
|
||||
/// Contains the external links that need to be checked
|
||||
/// The list of all internal links (as path to markdown file), with optional anchor fragments.
|
||||
/// We can only check the anchor after all pages have been built and their ToC compiled.
|
||||
/// The page itself should exist otherwise it would have errored before getting there.
|
||||
pub internal_links: Vec<(String, Option<String>)>,
|
||||
/// The list of all links to external webpages. They can be validated by the `link_checker`.
|
||||
pub external_links: Vec<String>,
|
||||
}
|
||||
|
||||
|
@ -123,27 +122,7 @@ impl Section {
|
|||
let mut section = Section::parse(path, &content, config, base_path)?;
|
||||
|
||||
let parent_dir = path.parent().unwrap();
|
||||
let assets = find_related_assets(parent_dir);
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||
// against the remaining path. Note that the current behaviour effectively means that
|
||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||
// patterns (no "**" patterns).
|
||||
section.assets = assets
|
||||
.into_iter()
|
||||
.filter(|path| match path.file_name() {
|
||||
None => false,
|
||||
Some(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
} else {
|
||||
section.assets = assets;
|
||||
}
|
||||
|
||||
section.assets = find_related_assets(parent_dir, config);
|
||||
section.serialized_assets = section.serialize_assets();
|
||||
|
||||
Ok(section)
|
||||
|
@ -172,6 +151,7 @@ impl Section {
|
|||
let mut context = RenderContext::new(
|
||||
tera,
|
||||
config,
|
||||
&self.lang,
|
||||
&self.permalink,
|
||||
permalinks,
|
||||
self.meta.insert_anchor_links,
|
||||
|
@ -185,7 +165,7 @@ impl Section {
|
|||
self.content = res.body;
|
||||
self.toc = res.toc;
|
||||
self.external_links = res.external_links;
|
||||
self.internal_links_with_anchors = res.internal_links_with_anchors;
|
||||
self.internal_links = res.internal_links;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -195,7 +175,7 @@ impl Section {
|
|||
let tpl_name = self.get_template_name();
|
||||
|
||||
let mut context = TeraContext::new();
|
||||
context.insert("config", config);
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("current_url", &self.permalink);
|
||||
context.insert("current_path", &self.path);
|
||||
context.insert("section", &self.to_serialized(library));
|
||||
|
@ -217,7 +197,7 @@ impl Section {
|
|||
.iter()
|
||||
.filter_map(|asset| asset.file_name())
|
||||
.filter_map(|filename| filename.to_str())
|
||||
.map(|filename| self.path.clone() + filename)
|
||||
.map(|filename| format!("{}{}", self.path, filename))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -254,7 +234,7 @@ mod tests {
|
|||
use tempfile::tempdir;
|
||||
|
||||
use super::Section;
|
||||
use config::{Config, Language};
|
||||
use config::{Config, LanguageOptions};
|
||||
|
||||
#[test]
|
||||
fn section_with_assets_gets_right_info() {
|
||||
|
@ -278,6 +258,7 @@ mod tests {
|
|||
assert!(res.is_ok());
|
||||
let section = res.unwrap();
|
||||
assert_eq!(section.assets.len(), 3);
|
||||
assert!(section.serialized_assets[0].starts_with('/'));
|
||||
assert_eq!(section.permalink, "http://a-website.com/posts/with-assets/");
|
||||
}
|
||||
|
||||
|
@ -312,7 +293,7 @@ mod tests {
|
|||
#[test]
|
||||
fn can_specify_language_in_filename() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
|
@ -334,7 +315,7 @@ Bonjour le monde"#
|
|||
#[test]
|
||||
fn can_make_links_to_translated_sections_without_double_trailing_slash() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
|
@ -351,7 +332,7 @@ Bonjour le monde"#
|
|||
#[test]
|
||||
fn can_make_links_to_translated_subsections_with_trailing_slash() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { code: String::from("fr"), feed: false, search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let content = r#"
|
||||
+++
|
||||
+++
|
||||
|
|
|
@ -69,7 +69,7 @@ pub struct SerializingPage<'a> {
|
|||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
ancestors: Vec<String>,
|
||||
ancestors: Vec<&'a str>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
updated: &'a Option<String>,
|
||||
|
@ -90,8 +90,12 @@ pub struct SerializingPage<'a> {
|
|||
lang: &'a str,
|
||||
lighter: Option<Box<SerializingPage<'a>>>,
|
||||
heavier: Option<Box<SerializingPage<'a>>>,
|
||||
earlier_updated: Option<Box<SerializingPage<'a>>>,
|
||||
later_updated: Option<Box<SerializingPage<'a>>>,
|
||||
earlier: Option<Box<SerializingPage<'a>>>,
|
||||
later: Option<Box<SerializingPage<'a>>>,
|
||||
title_prev: Option<Box<SerializingPage<'a>>>,
|
||||
title_next: Option<Box<SerializingPage<'a>>>,
|
||||
translations: Vec<TranslatedContent<'a>>,
|
||||
}
|
||||
|
||||
|
@ -113,16 +117,28 @@ impl<'a> SerializingPage<'a> {
|
|||
let heavier = page
|
||||
.heavier
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let earlier_updated = page
|
||||
.earlier_updated
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let later_updated = page
|
||||
.later_updated
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let earlier = page
|
||||
.earlier
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let later = page
|
||||
.later
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let title_prev = page
|
||||
.title_prev
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let title_next = page
|
||||
.title_next
|
||||
.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let ancestors = page
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|k| library.get_section_by_key(*k).file.relative.clone())
|
||||
.map(|k| library.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect();
|
||||
|
||||
let translations = TranslatedContent::find_all_pages(page, library);
|
||||
|
@ -153,8 +169,12 @@ impl<'a> SerializingPage<'a> {
|
|||
lang: &page.lang,
|
||||
lighter,
|
||||
heavier,
|
||||
earlier_updated,
|
||||
later_updated,
|
||||
earlier,
|
||||
later,
|
||||
title_prev,
|
||||
title_next,
|
||||
translations,
|
||||
}
|
||||
}
|
||||
|
@ -177,7 +197,7 @@ impl<'a> SerializingPage<'a> {
|
|||
let ancestors = if let Some(ref lib) = library {
|
||||
page.ancestors
|
||||
.iter()
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.clone())
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
|
@ -215,8 +235,12 @@ impl<'a> SerializingPage<'a> {
|
|||
lang: &page.lang,
|
||||
lighter: None,
|
||||
heavier: None,
|
||||
earlier_updated: None,
|
||||
later_updated: None,
|
||||
earlier: None,
|
||||
later: None,
|
||||
title_prev: None,
|
||||
title_next: None,
|
||||
translations,
|
||||
}
|
||||
}
|
||||
|
@ -227,7 +251,7 @@ pub struct SerializingSection<'a> {
|
|||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
ancestors: Vec<String>,
|
||||
ancestors: Vec<&'a str>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
extra: &'a Map<String, Value>,
|
||||
|
@ -259,7 +283,7 @@ impl<'a> SerializingSection<'a> {
|
|||
let ancestors = section
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|k| library.get_section_by_key(*k).file.relative.clone())
|
||||
.map(|k| library.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect();
|
||||
let translations = TranslatedContent::find_all_sections(section, library);
|
||||
|
||||
|
@ -293,7 +317,7 @@ impl<'a> SerializingSection<'a> {
|
|||
ancestors = section
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.clone())
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.as_str())
|
||||
.collect();
|
||||
translations = TranslatedContent::find_all_sections(section, lib);
|
||||
subsections =
|
||||
|
|
|
@ -3,11 +3,12 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use slotmap::{DefaultKey, DenseSlotMap};
|
||||
|
||||
use front_matter::SortBy;
|
||||
|
||||
use crate::content::{Page, Section};
|
||||
use crate::sorting::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
|
||||
use crate::sorting::{
|
||||
find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight,
|
||||
};
|
||||
use config::Config;
|
||||
use front_matter::{PageFrontMatter, SortBy};
|
||||
|
||||
// Like vec! but for HashSet
|
||||
macro_rules! set {
|
||||
|
@ -82,7 +83,7 @@ impl Library {
|
|||
let file_path = section.file.path.clone();
|
||||
let rel_path = section.path.clone();
|
||||
|
||||
let mut entries = vec![rel_path.clone()];
|
||||
let mut entries = vec![rel_path];
|
||||
entries.extend(section.meta.aliases.iter().map(|a| a.clone()).collect::<Vec<String>>());
|
||||
self.insert_reverse_aliases(entries, §ion.file.relative);
|
||||
|
||||
|
@ -96,7 +97,7 @@ impl Library {
|
|||
let file_path = page.file.path.clone();
|
||||
let rel_path = page.path.clone();
|
||||
|
||||
let mut entries = vec![rel_path.clone()];
|
||||
let mut entries = vec![rel_path];
|
||||
entries.extend(page.meta.aliases.iter().map(|a| a.clone()).collect::<Vec<String>>());
|
||||
self.insert_reverse_aliases(entries, &page.file.relative);
|
||||
|
||||
|
@ -152,6 +153,16 @@ impl Library {
|
|||
.push(section.file.path.clone());
|
||||
}
|
||||
|
||||
// populate translations if necessary
|
||||
if self.is_multilingual {
|
||||
self.translations
|
||||
.entry(section.file.canonical.clone())
|
||||
.and_modify(|trans| {
|
||||
trans.insert(key);
|
||||
})
|
||||
.or_insert(set![key]);
|
||||
};
|
||||
|
||||
// Index has no ancestors, no need to go through it
|
||||
if section.is_index() {
|
||||
ancestors.insert(section.file.path.clone(), vec![]);
|
||||
|
@ -175,16 +186,6 @@ impl Library {
|
|||
}
|
||||
}
|
||||
ancestors.insert(section.file.path.clone(), parents);
|
||||
|
||||
// populate translations if necessary
|
||||
if self.is_multilingual {
|
||||
self.translations
|
||||
.entry(section.file.canonical.clone())
|
||||
.and_modify(|trans| {
|
||||
trans.insert(key);
|
||||
})
|
||||
.or_insert(set![key]);
|
||||
};
|
||||
}
|
||||
|
||||
for (key, page) in &mut self.pages {
|
||||
|
@ -263,37 +264,47 @@ impl Library {
|
|||
/// Sort all sections pages according to sorting method given
|
||||
/// Pages that cannot be sorted are set to the section.ignored_pages instead
|
||||
pub fn sort_sections_pages(&mut self) {
|
||||
fn get_data<'a, T>(
|
||||
section: &'a Section,
|
||||
pages: &'a DenseSlotMap<DefaultKey, Page>,
|
||||
field: impl Fn(&'a PageFrontMatter) -> Option<T>,
|
||||
) -> Vec<(&'a DefaultKey, Option<T>, &'a str)> {
|
||||
section
|
||||
.pages
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = pages.get(*k) {
|
||||
(k, field(&page.meta), page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let mut updates = HashMap::new();
|
||||
for (key, section) in &self.sections {
|
||||
let (sorted_pages, cannot_be_sorted_pages) = match section.meta.sort_by {
|
||||
SortBy::None => continue,
|
||||
SortBy::Date => {
|
||||
let data = section
|
||||
.pages
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = self.pages.get(*k) {
|
||||
(k, page.meta.datetime, page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let data = get_data(section, &self.pages, |meta| meta.datetime);
|
||||
|
||||
sort_pages_by_date(data)
|
||||
}
|
||||
SortBy::UpdateDate => {
|
||||
let data = get_data(section, &self.pages, |meta| {
|
||||
std::cmp::max(meta.datetime, meta.updated_datetime)
|
||||
});
|
||||
|
||||
sort_pages_by_date(data)
|
||||
}
|
||||
SortBy::Title => {
|
||||
let data = get_data(section, &self.pages, |meta| meta.title.as_deref());
|
||||
|
||||
sort_pages_by_title(data)
|
||||
}
|
||||
SortBy::Weight => {
|
||||
let data = section
|
||||
.pages
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = self.pages.get(*k) {
|
||||
(k, page.meta.weight, page.permalink.as_ref())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let data = get_data(section, &self.pages, |meta| meta.weight);
|
||||
|
||||
sort_pages_by_weight(data)
|
||||
}
|
||||
|
@ -302,24 +313,42 @@ impl Library {
|
|||
}
|
||||
|
||||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
|
||||
// Find sibling between sorted pages first
|
||||
let with_siblings = find_siblings(&sorted);
|
||||
let section_is_transparent = if let Some(section) = self.sections.get(key) {
|
||||
section.meta.transparent
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
for (k2, val1, val2) in with_siblings {
|
||||
if let Some(page) = self.pages.get_mut(k2) {
|
||||
match sort_by {
|
||||
SortBy::Date => {
|
||||
page.earlier = val2;
|
||||
page.later = val1;
|
||||
if !section_is_transparent {
|
||||
// Find sibling between sorted pages first
|
||||
let with_siblings = find_siblings(&sorted);
|
||||
|
||||
for (k2, val1, val2) in with_siblings {
|
||||
if let Some(page) = self.pages.get_mut(k2) {
|
||||
match sort_by {
|
||||
SortBy::Date => {
|
||||
page.earlier = val2;
|
||||
page.later = val1;
|
||||
}
|
||||
SortBy::UpdateDate => {
|
||||
page.earlier_updated = val2;
|
||||
page.later_updated = val1;
|
||||
}
|
||||
SortBy::Title => {
|
||||
page.title_prev = val1;
|
||||
page.title_next = val2;
|
||||
}
|
||||
SortBy::Weight => {
|
||||
page.lighter = val1;
|
||||
page.heavier = val2;
|
||||
}
|
||||
SortBy::None => {
|
||||
unreachable!("Impossible to find siblings in SortBy::None")
|
||||
}
|
||||
}
|
||||
SortBy::Weight => {
|
||||
page.lighter = val1;
|
||||
page.heavier = val2;
|
||||
}
|
||||
SortBy::None => unreachable!("Impossible to find siblings in SortBy::None"),
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -342,22 +371,7 @@ impl Library {
|
|||
.collect()
|
||||
}
|
||||
|
||||
/// Find the parent section & all grandparents section that have transparent=true
|
||||
/// Only used in rebuild.
|
||||
pub fn find_parent_sections<P: AsRef<Path>>(&self, path: P) -> Vec<&Section> {
|
||||
let mut parents = vec![];
|
||||
let page = self.get_page(path.as_ref()).unwrap();
|
||||
for ancestor in page.ancestors.iter().rev() {
|
||||
let section = self.get_section_by_key(*ancestor);
|
||||
if parents.is_empty() || section.meta.transparent {
|
||||
parents.push(section);
|
||||
}
|
||||
}
|
||||
|
||||
parents
|
||||
}
|
||||
|
||||
/// Only used in tests
|
||||
/// Used in integration tests
|
||||
pub fn get_section_key<P: AsRef<Path>>(&self, path: P) -> Option<&DefaultKey> {
|
||||
self.paths_to_sections.get(path.as_ref())
|
||||
}
|
||||
|
@ -366,6 +380,7 @@ impl Library {
|
|||
self.sections.get(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default())
|
||||
}
|
||||
|
||||
/// Used in integration tests
|
||||
pub fn get_section_mut<P: AsRef<Path>>(&mut self, path: P) -> Option<&mut Section> {
|
||||
self.sections
|
||||
.get_mut(self.paths_to_sections.get(path.as_ref()).cloned().unwrap_or_default())
|
||||
|
@ -375,10 +390,6 @@ impl Library {
|
|||
self.sections.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn get_section_mut_by_key(&mut self, key: DefaultKey) -> &mut Section {
|
||||
self.sections.get_mut(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn get_section_path_by_key(&self, key: DefaultKey) -> &str {
|
||||
&self.get_section_by_key(key).file.relative
|
||||
}
|
||||
|
@ -391,10 +402,6 @@ impl Library {
|
|||
self.pages.get(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn get_page_mut_by_key(&mut self, key: DefaultKey) -> &mut Page {
|
||||
self.pages.get_mut(key).unwrap()
|
||||
}
|
||||
|
||||
pub fn remove_section<P: AsRef<Path>>(&mut self, path: P) -> Option<Section> {
|
||||
if let Some(k) = self.paths_to_sections.remove(path.as_ref()) {
|
||||
self.sections.remove(k)
|
||||
|
@ -411,16 +418,10 @@ impl Library {
|
|||
}
|
||||
}
|
||||
|
||||
/// Used in rebuild, to check if we know it already
|
||||
pub fn contains_section<P: AsRef<Path>>(&self, path: P) -> bool {
|
||||
self.paths_to_sections.contains_key(path.as_ref())
|
||||
}
|
||||
|
||||
/// Used in rebuild, to check if we know it already
|
||||
pub fn contains_page<P: AsRef<Path>>(&self, path: P) -> bool {
|
||||
self.paths_to_pages.contains_key(path.as_ref())
|
||||
}
|
||||
|
||||
/// This will check every section/page paths + the aliases and ensure none of them
|
||||
/// are colliding.
|
||||
/// Returns (path colliding, [list of files causing that collision])
|
||||
|
|
|
@ -223,17 +223,18 @@ impl<'a> Paginator<'a> {
|
|||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &config);
|
||||
match self.root {
|
||||
PaginationRoot::Section(s) => {
|
||||
context
|
||||
.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
|
||||
context.insert("lang", &s.lang);
|
||||
context.insert("config", &config.serialize(&s.lang));
|
||||
}
|
||||
PaginationRoot::Taxonomy(t, item) => {
|
||||
context.insert("taxonomy", &t.kind);
|
||||
context.insert("term", &item.serialize(library));
|
||||
context.insert("lang", &t.kind.lang);
|
||||
context.insert("lang", &t.lang);
|
||||
context.insert("config", &config.serialize(&t.lang));
|
||||
}
|
||||
};
|
||||
context.insert("current_url", &pager.permalink);
|
||||
|
@ -413,12 +414,15 @@ mod tests {
|
|||
let taxonomy_item = TaxonomyItem {
|
||||
name: "Something".to_string(),
|
||||
slug: "something".to_string(),
|
||||
path: "/tags/something".to_string(),
|
||||
permalink: "https://vincent.is/tags/something/".to_string(),
|
||||
pages: library.pages().keys().collect(),
|
||||
};
|
||||
let taxonomy = Taxonomy {
|
||||
kind: taxonomy_def,
|
||||
lang: "en".to_owned(),
|
||||
slug: "tags".to_string(),
|
||||
permalink: "/tags/".to_string(),
|
||||
items: vec![taxonomy_item.clone()],
|
||||
};
|
||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library);
|
||||
|
@ -446,12 +450,15 @@ mod tests {
|
|||
let taxonomy_item = TaxonomyItem {
|
||||
name: "Something".to_string(),
|
||||
slug: "something".to_string(),
|
||||
path: "/some-tags/something/".to_string(),
|
||||
permalink: "https://vincent.is/some-tags/something/".to_string(),
|
||||
pages: library.pages().keys().collect(),
|
||||
};
|
||||
let taxonomy = Taxonomy {
|
||||
kind: taxonomy_def,
|
||||
lang: "en".to_owned(),
|
||||
slug: "some-tags".to_string(),
|
||||
permalink: "/some-tags/".to_string(),
|
||||
items: vec![taxonomy_item.clone()],
|
||||
};
|
||||
let paginator = Paginator::from_taxonomy(&taxonomy, &taxonomy_item, &library);
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use std::cmp::Ordering;
|
||||
|
||||
use chrono::NaiveDateTime;
|
||||
use lexical_sort::natural_lexical_cmp;
|
||||
use rayon::prelude::*;
|
||||
use slotmap::DefaultKey;
|
||||
|
||||
|
@ -39,6 +40,28 @@ pub fn sort_pages_by_date(
|
|||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
/// Takes a list of (page key, title, permalink) and sort them by title if possible.
|
||||
/// Uses the a natural lexical comparison as defined by the lexical_sort crate.
|
||||
/// Pages without title will be put in the unsortable bucket.
|
||||
/// The permalink is used to break ties.
|
||||
pub fn sort_pages_by_title(
|
||||
pages: Vec<(&DefaultKey, Option<&str>, &str)>,
|
||||
) -> (Vec<DefaultKey>, Vec<DefaultKey>) {
|
||||
let (mut can_be_sorted, cannot_be_sorted): (Vec<_>, Vec<_>) =
|
||||
pages.into_par_iter().partition(|page| page.1.is_some());
|
||||
|
||||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = natural_lexical_cmp(a.1.unwrap(), b.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(&b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
/// Takes a list of (page key, weight, permalink) and sort them by weight if possible
|
||||
/// Pages without weight will be put in the unsortable bucket
|
||||
/// The permalink is used to break ties
|
||||
|
@ -60,7 +83,8 @@ pub fn sort_pages_by_weight(
|
|||
(can_be_sorted.iter().map(|p| *p.0).collect(), cannot_be_sorted.iter().map(|p| *p.0).collect())
|
||||
}
|
||||
|
||||
/// Find the lighter/heavier and earlier/later pages for all pages having a date/weight
|
||||
/// Find the lighter/heavier, earlier/later, and title_prev/title_next
|
||||
/// pages for all pages having a date/weight/title
|
||||
pub fn find_siblings(
|
||||
sorted: &[DefaultKey],
|
||||
) -> Vec<(DefaultKey, Option<DefaultKey>, Option<DefaultKey>)> {
|
||||
|
@ -71,12 +95,12 @@ pub fn find_siblings(
|
|||
let mut with_siblings = (*key, None, None);
|
||||
|
||||
if i > 0 {
|
||||
// lighter / later
|
||||
// lighter / later / title_prev
|
||||
with_siblings.1 = Some(sorted[i - 1]);
|
||||
}
|
||||
|
||||
if i < length - 1 {
|
||||
// heavier/earlier
|
||||
// heavier / earlier / title_next
|
||||
with_siblings.2 = Some(sorted[i + 1]);
|
||||
}
|
||||
res.push(with_siblings);
|
||||
|
@ -90,7 +114,7 @@ mod tests {
|
|||
use slotmap::DenseSlotMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::{find_siblings, sort_pages_by_date, sort_pages_by_weight};
|
||||
use super::{find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight};
|
||||
use crate::content::Page;
|
||||
use front_matter::PageFrontMatter;
|
||||
|
||||
|
@ -101,6 +125,12 @@ mod tests {
|
|||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_title(title: &str) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.title = Some(title.to_string());
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_weight(weight: usize) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.weight = Some(weight);
|
||||
|
@ -129,6 +159,51 @@ mod tests {
|
|||
assert_eq!(pages[2], key2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_titles() {
|
||||
let titles = vec![
|
||||
"bagel",
|
||||
"track_3",
|
||||
"microkernel",
|
||||
"métro",
|
||||
"BART",
|
||||
"Underground",
|
||||
"track_13",
|
||||
"μ-kernel",
|
||||
"meter",
|
||||
"track_1",
|
||||
];
|
||||
let pages: Vec<Page> = titles.iter().map(|title| create_page_with_title(title)).collect();
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let keys: Vec<_> = pages.iter().map(|p| dense.insert(p)).collect();
|
||||
let input: Vec<_> = pages
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, page)| (&keys[i], page.meta.title.as_deref(), page.permalink.as_ref()))
|
||||
.collect();
|
||||
let (sorted, _) = sort_pages_by_title(input);
|
||||
// Should be sorted by title
|
||||
let sorted_titles: Vec<_> = sorted
|
||||
.iter()
|
||||
.map(|key| dense.get(*key).unwrap().meta.title.as_ref().unwrap())
|
||||
.collect();
|
||||
assert_eq!(
|
||||
sorted_titles,
|
||||
vec![
|
||||
"bagel",
|
||||
"BART",
|
||||
"μ-kernel",
|
||||
"meter",
|
||||
"métro",
|
||||
"microkernel",
|
||||
"track_1",
|
||||
"track_3",
|
||||
"track_13",
|
||||
"Underground",
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_sort_by_weight() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
|
|
|
@ -18,6 +18,7 @@ use utils::slugs::slugify_paths;
|
|||
pub struct SerializedTaxonomyItem<'a> {
|
||||
name: &'a str,
|
||||
slug: &'a str,
|
||||
path: &'a str,
|
||||
permalink: &'a str,
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
}
|
||||
|
@ -34,6 +35,7 @@ impl<'a> SerializedTaxonomyItem<'a> {
|
|||
SerializedTaxonomyItem {
|
||||
name: &item.name,
|
||||
slug: &item.slug,
|
||||
path: &item.path,
|
||||
permalink: &item.permalink,
|
||||
pages,
|
||||
}
|
||||
|
@ -45,6 +47,7 @@ impl<'a> SerializedTaxonomyItem<'a> {
|
|||
pub struct TaxonomyItem {
|
||||
pub name: String,
|
||||
pub slug: String,
|
||||
pub path: String,
|
||||
pub permalink: String,
|
||||
pub pages: Vec<DefaultKey>,
|
||||
}
|
||||
|
@ -52,7 +55,7 @@ pub struct TaxonomyItem {
|
|||
impl TaxonomyItem {
|
||||
pub fn new(
|
||||
name: &str,
|
||||
taxonomy: &TaxonomyConfig,
|
||||
lang: &str,
|
||||
taxo_slug: &str,
|
||||
config: &Config,
|
||||
keys: Vec<DefaultKey>,
|
||||
|
@ -73,16 +76,17 @@ impl TaxonomyItem {
|
|||
.collect();
|
||||
let (mut pages, ignored_pages) = sort_pages_by_date(data);
|
||||
let item_slug = slugify_paths(name, config.slugify.taxonomies);
|
||||
let permalink = if taxonomy.lang != config.default_language {
|
||||
config.make_permalink(&format!("/{}/{}/{}", taxonomy.lang, taxo_slug, item_slug))
|
||||
let path = if lang != config.default_language {
|
||||
format!("/{}/{}/{}/", lang, taxo_slug, item_slug)
|
||||
} else {
|
||||
config.make_permalink(&format!("/{}/{}", taxo_slug, item_slug))
|
||||
format!("/{}/{}/", taxo_slug, item_slug)
|
||||
};
|
||||
let permalink = config.make_permalink(&path);
|
||||
|
||||
// We still append pages without dates at the end
|
||||
pages.extend(ignored_pages);
|
||||
|
||||
TaxonomyItem { name: name.to_string(), permalink, slug: item_slug, pages }
|
||||
TaxonomyItem { name: name.to_string(), permalink, path, slug: item_slug, pages }
|
||||
}
|
||||
|
||||
pub fn serialize<'a>(&'a self, library: &'a Library) -> SerializedTaxonomyItem<'a> {
|
||||
|
@ -103,6 +107,8 @@ impl PartialEq for TaxonomyItem {
|
|||
#[derive(Debug, Clone, PartialEq, Serialize)]
|
||||
pub struct SerializedTaxonomy<'a> {
|
||||
kind: &'a TaxonomyConfig,
|
||||
lang: &'a str,
|
||||
permalink: &'a str,
|
||||
items: Vec<SerializedTaxonomyItem<'a>>,
|
||||
}
|
||||
|
||||
|
@ -110,7 +116,12 @@ impl<'a> SerializedTaxonomy<'a> {
|
|||
pub fn from_taxonomy(taxonomy: &'a Taxonomy, library: &'a Library) -> Self {
|
||||
let items: Vec<SerializedTaxonomyItem> =
|
||||
taxonomy.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||
SerializedTaxonomy { kind: &taxonomy.kind, items }
|
||||
SerializedTaxonomy {
|
||||
kind: &taxonomy.kind,
|
||||
lang: &taxonomy.lang,
|
||||
permalink: &taxonomy.permalink,
|
||||
items,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,7 +129,9 @@ impl<'a> SerializedTaxonomy<'a> {
|
|||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Taxonomy {
|
||||
pub kind: TaxonomyConfig,
|
||||
pub lang: String,
|
||||
pub slug: String,
|
||||
pub permalink: String,
|
||||
// this vec is sorted by the count of item
|
||||
pub items: Vec<TaxonomyItem>,
|
||||
}
|
||||
|
@ -126,6 +139,7 @@ pub struct Taxonomy {
|
|||
impl Taxonomy {
|
||||
fn new(
|
||||
kind: TaxonomyConfig,
|
||||
lang: &str,
|
||||
config: &Config,
|
||||
items: HashMap<String, Vec<DefaultKey>>,
|
||||
library: &Library,
|
||||
|
@ -133,7 +147,7 @@ impl Taxonomy {
|
|||
let mut sorted_items = vec![];
|
||||
let slug = slugify_paths(&kind.name, config.slugify.taxonomies);
|
||||
for (name, pages) in items {
|
||||
sorted_items.push(TaxonomyItem::new(&name, &kind, &slug, config, pages, library));
|
||||
sorted_items.push(TaxonomyItem::new(&name, lang, &slug, config, pages, library));
|
||||
}
|
||||
//sorted_items.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
sorted_items.sort_by(|a, b| match a.slug.cmp(&b.slug) {
|
||||
|
@ -143,7 +157,7 @@ impl Taxonomy {
|
|||
});
|
||||
sorted_items.dedup_by(|a, b| {
|
||||
// custom Eq impl checks for equal permalinks
|
||||
// here we make sure all pages from a get coppied to b
|
||||
// here we make sure all pages from a get copied to b
|
||||
// before dedup gets rid of it
|
||||
if a == b {
|
||||
b.merge(a.to_owned());
|
||||
|
@ -152,7 +166,14 @@ impl Taxonomy {
|
|||
false
|
||||
}
|
||||
});
|
||||
Taxonomy { kind, slug, items: sorted_items }
|
||||
let path = if lang != config.default_language {
|
||||
format!("/{}/{}/", lang, slug)
|
||||
} else {
|
||||
format!("/{}/", slug)
|
||||
};
|
||||
let permalink = config.make_permalink(&path);
|
||||
|
||||
Taxonomy { kind, slug, lang: lang.to_owned(), permalink, items: sorted_items }
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
|
@ -171,8 +192,8 @@ impl Taxonomy {
|
|||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", config);
|
||||
context.insert("lang", &self.kind.lang);
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
context.insert("lang", &self.lang);
|
||||
context.insert("term", &SerializedTaxonomyItem::from_item(item, library));
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert(
|
||||
|
@ -194,11 +215,11 @@ impl Taxonomy {
|
|||
library: &Library,
|
||||
) -> Result<String> {
|
||||
let mut context = Context::new();
|
||||
context.insert("config", config);
|
||||
context.insert("config", &config.serialize(&self.lang));
|
||||
let terms: Vec<SerializedTaxonomyItem> =
|
||||
self.items.iter().map(|i| SerializedTaxonomyItem::from_item(i, library)).collect();
|
||||
context.insert("terms", &terms);
|
||||
context.insert("lang", &self.kind.lang);
|
||||
context.insert("lang", &self.lang);
|
||||
context.insert("taxonomy", &self.kind);
|
||||
context.insert("current_url", &config.make_permalink(&self.kind.name));
|
||||
context.insert("current_path", &format!("/{}/", self.kind.name));
|
||||
|
@ -215,11 +236,26 @@ impl Taxonomy {
|
|||
}
|
||||
|
||||
pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonomy>> {
|
||||
let mut slugs_to_lang = HashMap::new();
|
||||
|
||||
let taxonomies_def = {
|
||||
let mut m = HashMap::new();
|
||||
// the default language taxonomies
|
||||
for t in &config.taxonomies {
|
||||
let slug = slugify_paths(&t.name, config.slugify.taxonomies);
|
||||
m.insert(format!("{}-{}", slug, t.lang), t);
|
||||
let key = format!("{}-{}", slug, config.default_language);
|
||||
slugs_to_lang.insert(key.clone(), config.default_language.as_str());
|
||||
m.insert(key, t);
|
||||
}
|
||||
|
||||
// other languages taxonomies
|
||||
for (code, options) in config.other_languages() {
|
||||
for t in &options.taxonomies {
|
||||
let slug = slugify_paths(&t.name, config.slugify.taxonomies);
|
||||
let key = format!("{}-{}", slug, code);
|
||||
slugs_to_lang.insert(key.clone(), code);
|
||||
m.insert(key, t);
|
||||
}
|
||||
}
|
||||
m
|
||||
};
|
||||
|
@ -253,7 +289,13 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
|
|||
let mut taxonomies = vec![];
|
||||
|
||||
for (name, taxo) in all_taxonomies {
|
||||
taxonomies.push(Taxonomy::new(taxonomies_def[&name].clone(), config, taxo, library));
|
||||
taxonomies.push(Taxonomy::new(
|
||||
taxonomies_def[&name].clone(),
|
||||
slugs_to_lang[&name],
|
||||
config,
|
||||
taxo,
|
||||
library,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(taxonomies)
|
||||
|
@ -266,7 +308,7 @@ mod tests {
|
|||
|
||||
use crate::content::Page;
|
||||
use crate::library::Library;
|
||||
use config::{Config, Language, Slugify, Taxonomy as TaxonomyConfig};
|
||||
use config::{Config, LanguageOptions, Slugify, Taxonomy as TaxonomyConfig};
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
|
||||
#[test]
|
||||
|
@ -275,21 +317,9 @@ mod tests {
|
|||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "categories".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "authors".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
|
@ -338,6 +368,7 @@ mod tests {
|
|||
assert_eq!(tags.items[0].name, "db");
|
||||
assert_eq!(tags.items[0].slug, "db");
|
||||
assert_eq!(tags.items[0].permalink, "http://a-website.com/tags/db/");
|
||||
assert_eq!(tags.items[0].path, "/tags/db/");
|
||||
assert_eq!(tags.items[0].pages.len(), 1);
|
||||
|
||||
assert_eq!(tags.items[1].name, "js");
|
||||
|
@ -370,21 +401,9 @@ mod tests {
|
|||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "categories".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "authors".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
|
@ -438,6 +457,7 @@ mod tests {
|
|||
assert_eq!(tags.items[1].name, "js");
|
||||
assert_eq!(tags.items[1].slug, "js");
|
||||
assert_eq!(tags.items[1].permalink, "http://a-website.com/tags/js/");
|
||||
assert_eq!(tags.items[1].path, "/tags/js/");
|
||||
assert_eq!(tags.items[1].pages.len(), 2);
|
||||
|
||||
assert_eq!(tags.items[2].name, "rust");
|
||||
|
@ -464,11 +484,8 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![TaxonomyConfig {
|
||||
name: "authors".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
}];
|
||||
config.taxonomies =
|
||||
vec![TaxonomyConfig { name: "authors".to_string(), ..TaxonomyConfig::default() }];
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
|
@ -489,31 +506,22 @@ mod tests {
|
|||
#[test]
|
||||
fn can_make_taxonomies_in_multiple_languages() {
|
||||
let mut config = Config::default();
|
||||
config.languages.push(Language { feed: false, code: "fr".to_string(), search: false });
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut library = Library::new(2, 0, true);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "categories".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "auteurs".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let french_taxo = vec![
|
||||
TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let lang_options = config::LanguageOptions {
|
||||
taxonomies: french_taxo,
|
||||
..config::LanguageOptions::default()
|
||||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
|
@ -547,7 +555,7 @@ mod tests {
|
|||
for x in taxonomies {
|
||||
match x.kind.name.as_ref() {
|
||||
"tags" => {
|
||||
if x.kind.lang == "en" {
|
||||
if x.lang == "en" {
|
||||
t = Some(x)
|
||||
}
|
||||
}
|
||||
|
@ -599,18 +607,15 @@ mod tests {
|
|||
fn can_make_utf8_taxonomies() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::Safe;
|
||||
config.languages.push(Language {
|
||||
feed: false,
|
||||
code: "fr".to_string(),
|
||||
..Language::default()
|
||||
});
|
||||
let mut library = Library::new(2, 0, true);
|
||||
|
||||
config.taxonomies = vec![TaxonomyConfig {
|
||||
name: "catégories".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
}];
|
||||
let french_taxo =
|
||||
vec![TaxonomyConfig { name: "catégories".to_string(), ..TaxonomyConfig::default() }];
|
||||
let lang_options = config::LanguageOptions {
|
||||
taxonomies: french_taxo,
|
||||
..config::LanguageOptions::default()
|
||||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
|
||||
let mut page = Page::default();
|
||||
page.lang = "fr".to_string();
|
||||
|
@ -632,36 +637,21 @@ mod tests {
|
|||
fn can_make_slugified_taxonomies_in_multiple_languages() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::On;
|
||||
config.languages.push(Language {
|
||||
feed: false,
|
||||
code: "fr".to_string(),
|
||||
..Language::default()
|
||||
});
|
||||
let mut library = Library::new(2, 0, true);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "categories".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "auteurs".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "categories".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let french_taxo = vec![
|
||||
TaxonomyConfig { name: "auteurs".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
let lang_options = config::LanguageOptions {
|
||||
taxonomies: french_taxo,
|
||||
..config::LanguageOptions::default()
|
||||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
let mut page1 = Page::default();
|
||||
let mut taxo_page1 = HashMap::new();
|
||||
taxo_page1.insert("tags".to_string(), vec!["rust".to_string(), "db".to_string()]);
|
||||
|
@ -694,7 +684,7 @@ mod tests {
|
|||
for x in taxonomies {
|
||||
match x.kind.name.as_ref() {
|
||||
"tags" => {
|
||||
if x.kind.lang == "en" {
|
||||
if x.lang == "en" {
|
||||
t = Some(x)
|
||||
}
|
||||
}
|
||||
|
@ -748,26 +738,10 @@ mod tests {
|
|||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "test-taxonomy".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "test taxonomy".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "test-taxonomy ".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "Test-Taxonomy ".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
|
@ -833,26 +807,10 @@ mod tests {
|
|||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "test-taxonomy".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "test taxonomy".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "test-taxonomy ".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "Test-Taxonomy ".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
|
@ -911,26 +869,10 @@ mod tests {
|
|||
let mut library = Library::new(2, 0, false);
|
||||
|
||||
config.taxonomies = vec![
|
||||
TaxonomyConfig {
|
||||
name: "test-taxonomy".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "test taxonomy".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "test-taxonomy ".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig {
|
||||
name: "Test-Taxonomy ".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
},
|
||||
TaxonomyConfig { name: "test-taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test taxonomy".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "test-taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
TaxonomyConfig { name: "Test-Taxonomy ".to_string(), ..TaxonomyConfig::default() },
|
||||
];
|
||||
|
||||
let mut page1 = Page::default();
|
||||
|
@ -966,7 +908,7 @@ mod tests {
|
|||
let tax = &taxonomies[0];
|
||||
|
||||
// if names are different permalinks should also be different so
|
||||
// the tems are still accessable
|
||||
// the items are still accessible
|
||||
for term1 in tax.items.iter() {
|
||||
for term2 in tax.items.iter() {
|
||||
assert!(term1.name == term2.name || term1.permalink != term2.permalink);
|
||||
|
|
|
@ -16,4 +16,4 @@ default-features = false
|
|||
features = ["blocking", "rustls-tls"]
|
||||
|
||||
[dev-dependencies]
|
||||
mockito = "0.28"
|
||||
mockito = "0.30"
|
||||
|
|
|
@ -7,7 +7,8 @@ include = ["src/**/*"]
|
|||
|
||||
[dependencies]
|
||||
tera = { version = "1", features = ["preserve_order"] }
|
||||
syntect = "4.1"
|
||||
# TODO: go back to version 4/5 once https://github.com/trishume/syntect/pull/337 is merged
|
||||
syntect = { git = "https://github.com/Keats/syntect.git", branch = "scopestack" }
|
||||
pulldown-cmark = { version = "0.8", default-features = false }
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
|
|
112
components/rendering/src/codeblock/fence.rs
Normal file
112
components/rendering/src/codeblock/fence.rs
Normal file
|
@ -0,0 +1,112 @@
|
|||
use std::ops::RangeInclusive;
|
||||
|
||||
fn parse_range(s: &str) -> Option<RangeInclusive<usize>> {
|
||||
match s.find('-') {
|
||||
Some(dash) => {
|
||||
let mut from = s[..dash].parse().ok()?;
|
||||
let mut to = s[dash + 1..].parse().ok()?;
|
||||
if to < from {
|
||||
std::mem::swap(&mut from, &mut to);
|
||||
}
|
||||
Some(from..=to)
|
||||
}
|
||||
None => {
|
||||
let val = s.parse().ok()?;
|
||||
Some(val..=val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FenceSettings<'a> {
|
||||
pub language: Option<&'a str>,
|
||||
pub line_numbers: bool,
|
||||
pub line_number_start: usize,
|
||||
pub highlight_lines: Vec<RangeInclusive<usize>>,
|
||||
pub hide_lines: Vec<RangeInclusive<usize>>,
|
||||
}
|
||||
|
||||
impl<'a> FenceSettings<'a> {
|
||||
pub fn new(fence_info: &'a str) -> Self {
|
||||
let mut me = Self {
|
||||
language: None,
|
||||
line_numbers: false,
|
||||
line_number_start: 1,
|
||||
highlight_lines: Vec::new(),
|
||||
hide_lines: Vec::new(),
|
||||
};
|
||||
|
||||
for token in FenceIter::new(fence_info) {
|
||||
match token {
|
||||
FenceToken::Language(lang) => me.language = Some(lang),
|
||||
FenceToken::EnableLineNumbers => me.line_numbers = true,
|
||||
FenceToken::InitialLineNumber(l) => me.line_number_start = l,
|
||||
FenceToken::HighlightLines(lines) => me.highlight_lines.extend(lines),
|
||||
FenceToken::HideLines(lines) => me.hide_lines.extend(lines),
|
||||
}
|
||||
}
|
||||
|
||||
me
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FenceToken<'a> {
|
||||
Language(&'a str),
|
||||
EnableLineNumbers,
|
||||
InitialLineNumber(usize),
|
||||
HighlightLines(Vec<RangeInclusive<usize>>),
|
||||
HideLines(Vec<RangeInclusive<usize>>),
|
||||
}
|
||||
|
||||
struct FenceIter<'a> {
|
||||
split: std::str::Split<'a, char>,
|
||||
}
|
||||
|
||||
impl<'a> FenceIter<'a> {
|
||||
fn new(fence_info: &'a str) -> Self {
|
||||
Self { split: fence_info.split(',') }
|
||||
}
|
||||
|
||||
fn parse_ranges(token: Option<&str>) -> Vec<RangeInclusive<usize>> {
|
||||
let mut ranges = Vec::new();
|
||||
for range in token.unwrap_or("").split(' ') {
|
||||
if let Some(range) = parse_range(range) {
|
||||
ranges.push(range);
|
||||
}
|
||||
}
|
||||
ranges
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for FenceIter<'a> {
|
||||
type Item = FenceToken<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<FenceToken<'a>> {
|
||||
loop {
|
||||
let tok = self.split.next()?.trim();
|
||||
|
||||
let mut tok_split = tok.split('=');
|
||||
match tok_split.next().unwrap_or("").trim() {
|
||||
"" => continue,
|
||||
"linenostart" => {
|
||||
if let Some(l) = tok_split.next().and_then(|s| s.parse().ok()) {
|
||||
return Some(FenceToken::InitialLineNumber(l));
|
||||
}
|
||||
}
|
||||
"linenos" => return Some(FenceToken::EnableLineNumbers),
|
||||
"hl_lines" => {
|
||||
let ranges = Self::parse_ranges(tok_split.next());
|
||||
return Some(FenceToken::HighlightLines(ranges));
|
||||
}
|
||||
"hide_lines" => {
|
||||
let ranges = Self::parse_ranges(tok_split.next());
|
||||
return Some(FenceToken::HideLines(ranges));
|
||||
}
|
||||
lang => {
|
||||
return Some(FenceToken::Language(lang));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
226
components/rendering/src/codeblock/highlight.rs
Normal file
226
components/rendering/src/codeblock/highlight.rs
Normal file
|
@ -0,0 +1,226 @@
|
|||
use std::fmt::Write;
|
||||
|
||||
use config::highlighting::{SyntaxAndTheme, CLASS_STYLE};
|
||||
use syntect::easy::HighlightLines;
|
||||
use syntect::highlighting::{Color, Theme};
|
||||
use syntect::html::{
|
||||
styled_line_to_highlighted_html, tokens_to_classed_spans, ClassStyle, IncludeBackground,
|
||||
};
|
||||
use syntect::parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet};
|
||||
|
||||
/// Not public, but from syntect::html
|
||||
fn write_css_color(s: &mut String, c: Color) {
|
||||
if c.a != 0xFF {
|
||||
write!(s, "#{:02x}{:02x}{:02x}{:02x}", c.r, c.g, c.b, c.a).unwrap();
|
||||
} else {
|
||||
write!(s, "#{:02x}{:02x}{:02x}", c.r, c.g, c.b).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ClassHighlighter<'config> {
|
||||
syntax_set: &'config SyntaxSet,
|
||||
open_spans: isize,
|
||||
parse_state: ParseState,
|
||||
scope_stack: ScopeStack,
|
||||
}
|
||||
|
||||
impl<'config> ClassHighlighter<'config> {
|
||||
pub fn new(syntax: &'config SyntaxReference, syntax_set: &'config SyntaxSet) -> Self {
|
||||
let parse_state = ParseState::new(syntax);
|
||||
Self { syntax_set, open_spans: 0, parse_state, scope_stack: ScopeStack::new() }
|
||||
}
|
||||
|
||||
/// Parse the line of code and update the internal HTML buffer with tagged HTML
|
||||
///
|
||||
/// *Note:* This function requires `line` to include a newline at the end and
|
||||
/// also use of the `load_defaults_newlines` version of the syntaxes.
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
debug_assert!(line.ends_with("\n"));
|
||||
let parsed_line = self.parse_state.parse_line(line, &self.syntax_set);
|
||||
let (formatted_line, delta) = tokens_to_classed_spans(
|
||||
line,
|
||||
parsed_line.as_slice(),
|
||||
CLASS_STYLE,
|
||||
&mut self.scope_stack,
|
||||
);
|
||||
self.open_spans += delta;
|
||||
formatted_line
|
||||
}
|
||||
|
||||
/// Close all open `<span>` tags and return the finished HTML string
|
||||
pub fn finalize(&mut self) -> String {
|
||||
let mut html = String::with_capacity((self.open_spans * 7) as usize);
|
||||
for _ in 0..self.open_spans {
|
||||
html.push_str("</span>");
|
||||
}
|
||||
html
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct InlineHighlighter<'config> {
|
||||
theme: &'config Theme,
|
||||
fg_color: String,
|
||||
bg_color: Color,
|
||||
syntax_set: &'config SyntaxSet,
|
||||
h: HighlightLines<'config>,
|
||||
}
|
||||
|
||||
impl<'config> InlineHighlighter<'config> {
|
||||
pub fn new(
|
||||
syntax: &'config SyntaxReference,
|
||||
syntax_set: &'config SyntaxSet,
|
||||
theme: &'config Theme,
|
||||
) -> Self {
|
||||
let h = HighlightLines::new(syntax, theme);
|
||||
let mut color = String::new();
|
||||
write_css_color(&mut color, theme.settings.foreground.unwrap_or(Color::BLACK));
|
||||
let fg_color = format!(r#" style="color:{};""#, color);
|
||||
let bg_color = theme.settings.background.unwrap_or(Color::WHITE);
|
||||
Self { theme, fg_color, bg_color, syntax_set, h }
|
||||
}
|
||||
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
let regions = self.h.highlight(line, &self.syntax_set);
|
||||
// TODO: add a param like `IncludeBackground` for `IncludeForeground` in syntect
|
||||
let highlighted = styled_line_to_highlighted_html(®ions, IncludeBackground::IfDifferent(self.bg_color));
|
||||
highlighted.replace(&self.fg_color, "")
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) enum SyntaxHighlighter<'config> {
|
||||
Inlined(InlineHighlighter<'config>),
|
||||
Classed(ClassHighlighter<'config>),
|
||||
/// We might not want highlighting but we want line numbers or to hide some lines
|
||||
NoHighlight,
|
||||
}
|
||||
|
||||
impl<'config> SyntaxHighlighter<'config> {
|
||||
pub fn new(highlight_code: bool, s: SyntaxAndTheme<'config>) -> Self {
|
||||
if highlight_code {
|
||||
if let Some(theme) = s.theme {
|
||||
SyntaxHighlighter::Inlined(InlineHighlighter::new(s.syntax, s.syntax_set, theme))
|
||||
} else {
|
||||
SyntaxHighlighter::Classed(ClassHighlighter::new(s.syntax, s.syntax_set))
|
||||
}
|
||||
} else {
|
||||
SyntaxHighlighter::NoHighlight
|
||||
}
|
||||
}
|
||||
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
use SyntaxHighlighter::*;
|
||||
|
||||
match self {
|
||||
Inlined(h) => h.highlight_line(line),
|
||||
Classed(h) => h.highlight_line(line),
|
||||
NoHighlight => line.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finalize(&mut self) -> Option<String> {
|
||||
use SyntaxHighlighter::*;
|
||||
|
||||
match self {
|
||||
Inlined(_) | NoHighlight => None,
|
||||
Classed(h) => Some(h.finalize()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Inlined needs to set the background/foreground colour on <pre>
|
||||
pub fn pre_style(&self) -> Option<String> {
|
||||
use SyntaxHighlighter::*;
|
||||
|
||||
match self {
|
||||
Classed(_) | NoHighlight => None,
|
||||
Inlined(h) => {
|
||||
let mut styles = String::from("background-color:");
|
||||
write_css_color(&mut styles, h.theme.settings.background.unwrap_or(Color::WHITE));
|
||||
styles.push_str(";color:");
|
||||
write_css_color(&mut styles, h.theme.settings.foreground.unwrap_or(Color::BLACK));
|
||||
styles.push(';');
|
||||
Some(styles)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Classed needs to set a class on the pre
|
||||
pub fn pre_class(&self) -> Option<String> {
|
||||
use SyntaxHighlighter::*;
|
||||
|
||||
match self {
|
||||
Classed(_) => {
|
||||
if let ClassStyle::SpacedPrefixed { prefix } = CLASS_STYLE {
|
||||
Some(format!("{}code", prefix))
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
Inlined(_) | NoHighlight => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Inlined needs to set the background/foreground colour
|
||||
pub fn mark_style(&self) -> Option<String> {
|
||||
use SyntaxHighlighter::*;
|
||||
|
||||
match self {
|
||||
Classed(_) | NoHighlight => None,
|
||||
Inlined(h) => {
|
||||
let mut styles = String::from("background-color:");
|
||||
write_css_color(
|
||||
&mut styles,
|
||||
h.theme.settings.line_highlight.unwrap_or(Color { r: 255, g: 255, b: 0, a: 0 }),
|
||||
);
|
||||
styles.push_str(";");
|
||||
Some(styles)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use config::highlighting::resolve_syntax_and_theme;
|
||||
use config::Config;
|
||||
use syntect::util::LinesWithEndings;
|
||||
|
||||
#[test]
|
||||
fn can_highlight_with_classes() {
|
||||
let mut config = Config::default();
|
||||
config.markdown.highlight_code = true;
|
||||
let code = "import zen\nz = x + y\nprint('hello')\n";
|
||||
let syntax_and_theme = resolve_syntax_and_theme(Some("py"), &config);
|
||||
let mut highlighter =
|
||||
ClassHighlighter::new(syntax_and_theme.syntax, syntax_and_theme.syntax_set);
|
||||
let mut out = String::new();
|
||||
for line in LinesWithEndings::from(&code) {
|
||||
out.push_str(&highlighter.highlight_line(line));
|
||||
}
|
||||
out.push_str(&highlighter.finalize());
|
||||
|
||||
assert!(out.starts_with("<span class"));
|
||||
assert!(out.ends_with("</span>"));
|
||||
assert!(out.contains("z-"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_highlight_inline() {
|
||||
let mut config = Config::default();
|
||||
config.markdown.highlight_code = true;
|
||||
let code = "import zen\nz = x + y\nprint('hello')\n";
|
||||
let syntax_and_theme = resolve_syntax_and_theme(Some("py"), &config);
|
||||
let mut highlighter = InlineHighlighter::new(
|
||||
syntax_and_theme.syntax,
|
||||
syntax_and_theme.syntax_set,
|
||||
syntax_and_theme.theme.unwrap(),
|
||||
);
|
||||
let mut out = String::new();
|
||||
for line in LinesWithEndings::from(&code) {
|
||||
out.push_str(&highlighter.highlight_line(line));
|
||||
}
|
||||
|
||||
assert!(out.starts_with(r#"<span style="color"#));
|
||||
assert!(out.ends_with("</span>"));
|
||||
}
|
||||
}
|
186
components/rendering/src/codeblock/mod.rs
Normal file
186
components/rendering/src/codeblock/mod.rs
Normal file
|
@ -0,0 +1,186 @@
|
|||
mod fence;
|
||||
mod highlight;
|
||||
|
||||
use std::ops::RangeInclusive;
|
||||
|
||||
use syntect::util::LinesWithEndings;
|
||||
|
||||
use crate::codeblock::highlight::SyntaxHighlighter;
|
||||
use config::highlighting::{resolve_syntax_and_theme, HighlightSource};
|
||||
use config::Config;
|
||||
pub(crate) use fence::FenceSettings;
|
||||
|
||||
fn opening_html(
|
||||
language: Option<&str>,
|
||||
pre_style: Option<String>,
|
||||
pre_class: Option<String>,
|
||||
line_numbers: bool,
|
||||
) -> String {
|
||||
let mut html = String::from("<pre");
|
||||
if line_numbers {
|
||||
html.push_str(" data-linenos");
|
||||
}
|
||||
let mut classes = String::new();
|
||||
|
||||
if let Some(lang) = language {
|
||||
classes.push_str("language-");
|
||||
classes.push_str(&lang);
|
||||
classes.push_str(" ");
|
||||
|
||||
html.push_str(" data-lang=\"");
|
||||
html.push_str(lang);
|
||||
html.push('"');
|
||||
}
|
||||
|
||||
if let Some(styles) = pre_style {
|
||||
html.push_str(" style=\"");
|
||||
html.push_str(styles.as_str());
|
||||
html.push('"');
|
||||
}
|
||||
|
||||
if let Some(c) = pre_class {
|
||||
classes.push_str(&c);
|
||||
}
|
||||
|
||||
if !classes.is_empty() {
|
||||
html.push_str(" class=\"");
|
||||
html.push_str(&classes);
|
||||
html.push('"');
|
||||
}
|
||||
|
||||
html.push_str("><code");
|
||||
if let Some(lang) = language {
|
||||
html.push_str(" class=\"language-");
|
||||
html.push_str(lang);
|
||||
html.push_str("\" data-lang=\"");
|
||||
html.push_str(lang);
|
||||
html.push('"');
|
||||
}
|
||||
html.push('>');
|
||||
html
|
||||
}
|
||||
|
||||
pub struct CodeBlock<'config> {
|
||||
highlighter: SyntaxHighlighter<'config>,
|
||||
// fence options
|
||||
line_numbers: bool,
|
||||
line_number_start: usize,
|
||||
highlight_lines: Vec<RangeInclusive<usize>>,
|
||||
hide_lines: Vec<RangeInclusive<usize>>,
|
||||
}
|
||||
|
||||
impl<'config> CodeBlock<'config> {
|
||||
pub fn new<'fence_info>(
|
||||
fence: FenceSettings<'fence_info>,
|
||||
config: &'config Config,
|
||||
// path to the current file if there is one, to point where the error is
|
||||
path: Option<&'config str>,
|
||||
) -> (Self, String) {
|
||||
let syntax_and_theme = resolve_syntax_and_theme(fence.language, config);
|
||||
if syntax_and_theme.source == HighlightSource::NotFound {
|
||||
let lang = fence.language.unwrap();
|
||||
if let Some(p) = path {
|
||||
eprintln!("Warning: Highlight language {} not found in {}", lang, p);
|
||||
} else {
|
||||
eprintln!("Warning: Highlight language {} not found", lang);
|
||||
}
|
||||
}
|
||||
let highlighter = SyntaxHighlighter::new(config.markdown.highlight_code, syntax_and_theme);
|
||||
|
||||
let html_start = opening_html(
|
||||
fence.language,
|
||||
highlighter.pre_style(),
|
||||
highlighter.pre_class(),
|
||||
fence.line_numbers,
|
||||
);
|
||||
(
|
||||
Self {
|
||||
highlighter,
|
||||
line_numbers: fence.line_numbers,
|
||||
line_number_start: fence.line_number_start,
|
||||
highlight_lines: fence.highlight_lines,
|
||||
hide_lines: fence.hide_lines,
|
||||
},
|
||||
html_start,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn highlight(&mut self, content: &str) -> String {
|
||||
let mut buffer = String::new();
|
||||
let mark_style = self.highlighter.mark_style();
|
||||
|
||||
if self.line_numbers {
|
||||
buffer.push_str("<table><tbody>");
|
||||
}
|
||||
|
||||
// syntect leaking here in this file
|
||||
for (i, line) in LinesWithEndings::from(&content).enumerate() {
|
||||
let one_indexed = i + 1;
|
||||
// first do we need to skip that line?
|
||||
let mut skip = false;
|
||||
for range in &self.hide_lines {
|
||||
if range.contains(&one_indexed) {
|
||||
skip = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if skip {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Next is it supposed to be higlighted?
|
||||
let mut is_higlighted = false;
|
||||
for range in &self.highlight_lines {
|
||||
if range.contains(&one_indexed) {
|
||||
is_higlighted = true;
|
||||
}
|
||||
}
|
||||
|
||||
if self.line_numbers {
|
||||
buffer.push_str("<tr><td>");
|
||||
let num = format!("{}", self.line_number_start + i);
|
||||
if is_higlighted {
|
||||
buffer.push_str("<mark");
|
||||
if let Some(ref s) = mark_style {
|
||||
buffer.push_str(" style=\"");
|
||||
buffer.push_str(&s);
|
||||
buffer.push_str("\">");
|
||||
} else {
|
||||
buffer.push_str(">")
|
||||
}
|
||||
buffer.push_str(&num);
|
||||
buffer.push_str("</mark>");
|
||||
} else {
|
||||
buffer.push_str(&num);
|
||||
}
|
||||
buffer.push_str("</td><td>");
|
||||
}
|
||||
|
||||
let highlighted_line = self.highlighter.highlight_line(line);
|
||||
if is_higlighted {
|
||||
buffer.push_str("<mark");
|
||||
if let Some(ref s) = mark_style {
|
||||
buffer.push_str(" style=\"");
|
||||
buffer.push_str(&s);
|
||||
buffer.push_str("\">");
|
||||
} else {
|
||||
buffer.push_str(">")
|
||||
}
|
||||
buffer.push_str(&highlighted_line);
|
||||
buffer.push_str("</mark>");
|
||||
} else {
|
||||
buffer.push_str(&highlighted_line);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(rest) = self.highlighter.finalize() {
|
||||
buffer.push_str(&rest);
|
||||
}
|
||||
|
||||
if self.line_numbers {
|
||||
buffer.push_str("</tr></tbody></table>");
|
||||
}
|
||||
|
||||
buffer
|
||||
}
|
||||
}
|
|
@ -20,12 +20,13 @@ impl<'a> RenderContext<'a> {
|
|||
pub fn new(
|
||||
tera: &'a Tera,
|
||||
config: &'a Config,
|
||||
lang: &'a str,
|
||||
current_page_permalink: &'a str,
|
||||
permalinks: &'a HashMap<String, String>,
|
||||
insert_anchor: InsertAnchor,
|
||||
) -> RenderContext<'a> {
|
||||
let mut tera_context = Context::new();
|
||||
tera_context.insert("config", config);
|
||||
tera_context.insert("config", &config.serialize(lang));
|
||||
Self {
|
||||
tera: Cow::Borrowed(tera),
|
||||
tera_context,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
mod codeblock;
|
||||
mod context;
|
||||
mod markdown;
|
||||
mod shortcode;
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
use lazy_static::lazy_static;
|
||||
use pulldown_cmark as cmark;
|
||||
use regex::Regex;
|
||||
use syntect::html::{start_highlighted_html_snippet, IncludeBackground};
|
||||
|
||||
use crate::context::RenderContext;
|
||||
use crate::table_of_contents::{make_table_of_contents, Heading};
|
||||
use config::highlighting::THEME_SET;
|
||||
use errors::{Error, Result};
|
||||
use front_matter::InsertAnchor;
|
||||
use utils::site::resolve_internal_link;
|
||||
|
@ -13,10 +11,7 @@ use utils::slugs::slugify_anchors;
|
|||
use utils::vec::InsertMany;
|
||||
|
||||
use self::cmark::{Event, LinkType, Options, Parser, Tag};
|
||||
|
||||
mod codeblock;
|
||||
mod fence;
|
||||
use self::codeblock::CodeBlock;
|
||||
use crate::codeblock::{CodeBlock, FenceSettings};
|
||||
|
||||
const CONTINUE_READING: &str = "<span id=\"continue-reading\"></span>";
|
||||
const ANCHOR_LINK_TEMPLATE: &str = "anchor-link.html";
|
||||
|
@ -26,11 +21,13 @@ pub struct Rendered {
|
|||
pub body: String,
|
||||
pub summary_len: Option<usize>,
|
||||
pub toc: Vec<Heading>,
|
||||
pub internal_links_with_anchors: Vec<(String, String)>,
|
||||
/// Links to site-local pages: relative path plus optional anchor target.
|
||||
pub internal_links: Vec<(String, Option<String>)>,
|
||||
/// Outgoing links to external webpages (i.e. HTTP(S) targets).
|
||||
pub external_links: Vec<String>,
|
||||
}
|
||||
|
||||
// tracks a heading in a slice of pulldown-cmark events
|
||||
/// Tracks a heading in a slice of pulldown-cmark events
|
||||
#[derive(Debug)]
|
||||
struct HeadingRef {
|
||||
start_idx: usize,
|
||||
|
@ -49,7 +46,7 @@ impl HeadingRef {
|
|||
// for example an article could have several titles named Example
|
||||
// We add a counter after the slug if the slug is already present, which
|
||||
// means we will have example, example-1, example-2 etc
|
||||
fn find_anchor(anchors: &[String], name: String, level: u8) -> String {
|
||||
fn find_anchor(anchors: &[String], name: String, level: u16) -> String {
|
||||
if level == 0 && !anchors.contains(&name) {
|
||||
return name;
|
||||
}
|
||||
|
@ -62,13 +59,13 @@ fn find_anchor(anchors: &[String], name: String, level: u8) -> String {
|
|||
find_anchor(anchors, name, level + 1)
|
||||
}
|
||||
|
||||
// Returns whether the given string starts with a schema.
|
||||
//
|
||||
// Although there exists [a list of registered URI schemes][uri-schemes], a link may use arbitrary,
|
||||
// private schemes. This function checks if the given string starts with something that just looks
|
||||
// like a scheme, i.e., a case-insensitive identifier followed by a colon.
|
||||
//
|
||||
// [uri-schemes]: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml
|
||||
/// Returns whether the given string starts with a schema.
|
||||
///
|
||||
/// Although there exists [a list of registered URI schemes][uri-schemes], a link may use arbitrary,
|
||||
/// private schemes. This function checks if the given string starts with something that just looks
|
||||
/// like a scheme, i.e., a case-insensitive identifier followed by a colon.
|
||||
///
|
||||
/// [uri-schemes]: https://www.iana.org/assignments/uri-schemes/uri-schemes.xhtml
|
||||
fn starts_with_schema(s: &str) -> bool {
|
||||
lazy_static! {
|
||||
static ref PATTERN: Regex = Regex::new(r"^[0-9A-Za-z\-]+:").unwrap();
|
||||
|
@ -77,14 +74,14 @@ fn starts_with_schema(s: &str) -> bool {
|
|||
PATTERN.is_match(s)
|
||||
}
|
||||
|
||||
// Colocated asset links refers to the files in the same directory,
|
||||
// there it should be a filename only
|
||||
/// Colocated asset links refers to the files in the same directory,
|
||||
/// there it should be a filename only
|
||||
fn is_colocated_asset_link(link: &str) -> bool {
|
||||
!link.contains('/') // http://, ftp://, ../ etc
|
||||
&& !starts_with_schema(link)
|
||||
}
|
||||
|
||||
// Returns whether a link starts with an HTTP(s) scheme.
|
||||
/// Returns whether a link starts with an HTTP(s) scheme.
|
||||
fn is_external_link(link: &str) -> bool {
|
||||
link.starts_with("http:") || link.starts_with("https:")
|
||||
}
|
||||
|
@ -93,7 +90,7 @@ fn fix_link(
|
|||
link_type: LinkType,
|
||||
link: &str,
|
||||
context: &RenderContext,
|
||||
internal_links_with_anchors: &mut Vec<(String, String)>,
|
||||
internal_links: &mut Vec<(String, Option<String>)>,
|
||||
external_links: &mut Vec<String>,
|
||||
) -> Result<String> {
|
||||
if link_type == LinkType::Email {
|
||||
|
@ -107,10 +104,7 @@ fn fix_link(
|
|||
let result = if link.starts_with("@/") {
|
||||
match resolve_internal_link(&link, &context.permalinks) {
|
||||
Ok(resolved) => {
|
||||
if resolved.anchor.is_some() {
|
||||
internal_links_with_anchors
|
||||
.push((resolved.md_path.unwrap(), resolved.anchor.unwrap()));
|
||||
}
|
||||
internal_links.push((resolved.md_path, resolved.anchor));
|
||||
resolved.permalink
|
||||
}
|
||||
Err(_) => {
|
||||
|
@ -166,16 +160,21 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
static ref EMOJI_REPLACER: gh_emoji::Replacer = gh_emoji::Replacer::new();
|
||||
}
|
||||
|
||||
let path = context
|
||||
.tera_context
|
||||
.get("page")
|
||||
.or(context.tera_context.get("section"))
|
||||
.map(|x| x.as_object().unwrap().get("relative_path").unwrap().as_str().unwrap());
|
||||
// the rendered html
|
||||
let mut html = String::with_capacity(content.len());
|
||||
// Set while parsing
|
||||
let mut error = None;
|
||||
|
||||
let mut highlighter: Option<CodeBlock> = None;
|
||||
let mut code_block: Option<CodeBlock> = None;
|
||||
|
||||
let mut inserted_anchors: Vec<String> = vec![];
|
||||
let mut headings: Vec<Heading> = vec![];
|
||||
let mut internal_links_with_anchors = Vec::new();
|
||||
let mut internal_links = Vec::new();
|
||||
let mut external_links = Vec::new();
|
||||
|
||||
let mut opts = Options::empty();
|
||||
|
@ -196,7 +195,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
match event {
|
||||
Event::Text(text) => {
|
||||
// if we are in the middle of a highlighted code block
|
||||
if let Some(ref mut code_block) = highlighter {
|
||||
if let Some(ref mut code_block) = code_block {
|
||||
let html = code_block.highlight(&text);
|
||||
Event::Html(html.into())
|
||||
} else if context.config.markdown.render_emoji {
|
||||
|
@ -208,62 +207,20 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
}
|
||||
}
|
||||
Event::Start(Tag::CodeBlock(ref kind)) => {
|
||||
let language = match kind {
|
||||
let fence = match kind {
|
||||
cmark::CodeBlockKind::Fenced(fence_info) => {
|
||||
let fence_info = fence::FenceSettings::new(fence_info);
|
||||
fence_info.language
|
||||
FenceSettings::new(fence_info)
|
||||
}
|
||||
_ => None,
|
||||
_ => FenceSettings::new(""),
|
||||
};
|
||||
|
||||
if !context.config.highlight_code() {
|
||||
if let Some(lang) = language {
|
||||
let html = format!(
|
||||
r#"<pre><code class="language-{}" data-lang="{}">"#,
|
||||
lang, lang
|
||||
);
|
||||
return Event::Html(html.into());
|
||||
}
|
||||
return Event::Html("<pre><code>".into());
|
||||
}
|
||||
|
||||
let theme = &THEME_SET.themes[context.config.highlight_theme()];
|
||||
match kind {
|
||||
cmark::CodeBlockKind::Indented => (),
|
||||
cmark::CodeBlockKind::Fenced(fence_info) => {
|
||||
// This selects the background color the same way that
|
||||
// start_coloured_html_snippet does
|
||||
let color = theme
|
||||
.settings
|
||||
.background
|
||||
.unwrap_or(::syntect::highlighting::Color::WHITE);
|
||||
|
||||
highlighter = Some(CodeBlock::new(
|
||||
fence_info,
|
||||
&context.config,
|
||||
IncludeBackground::IfDifferent(color),
|
||||
));
|
||||
}
|
||||
};
|
||||
let snippet = start_highlighted_html_snippet(theme);
|
||||
let mut html = snippet.0;
|
||||
if let Some(lang) = language {
|
||||
html.push_str(&format!(
|
||||
r#"<code class="language-{}" data-lang="{}">"#,
|
||||
lang, lang
|
||||
));
|
||||
} else {
|
||||
html.push_str("<code>");
|
||||
}
|
||||
Event::Html(html.into())
|
||||
let (block, begin) = CodeBlock::new(fence, &context.config, path);
|
||||
code_block = Some(block);
|
||||
Event::Html(begin.into())
|
||||
}
|
||||
Event::End(Tag::CodeBlock(_)) => {
|
||||
if !context.config.highlight_code() {
|
||||
return Event::Html("</code></pre>\n".into());
|
||||
}
|
||||
// reset highlight and close the code block
|
||||
highlighter = None;
|
||||
Event::Html("</code></pre>".into())
|
||||
code_block = None;
|
||||
Event::Html("</code></pre>\n".into())
|
||||
}
|
||||
Event::Start(Tag::Image(link_type, src, title)) => {
|
||||
if is_colocated_asset_link(&src) {
|
||||
|
@ -282,7 +239,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
link_type,
|
||||
&link,
|
||||
context,
|
||||
&mut internal_links_with_anchors,
|
||||
&mut internal_links,
|
||||
&mut external_links,
|
||||
) {
|
||||
Ok(fixed_link) => fixed_link,
|
||||
|
@ -417,7 +374,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
summary_len: if has_summary { html.find(CONTINUE_READING) } else { None },
|
||||
body: html,
|
||||
toc: make_table_of_contents(headings),
|
||||
internal_links_with_anchors,
|
||||
internal_links,
|
||||
external_links,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -1,182 +0,0 @@
|
|||
use config::highlighting::{get_highlighter, SYNTAX_SET, THEME_SET};
|
||||
use config::Config;
|
||||
use std::cmp::min;
|
||||
use std::collections::HashSet;
|
||||
use syntect::easy::HighlightLines;
|
||||
use syntect::highlighting::{Color, Style, Theme};
|
||||
use syntect::html::{styled_line_to_highlighted_html, IncludeBackground};
|
||||
use syntect::parsing::SyntaxSet;
|
||||
|
||||
use super::fence::{FenceSettings, Range};
|
||||
|
||||
pub struct CodeBlock<'config> {
|
||||
highlighter: HighlightLines<'static>,
|
||||
extra_syntax_set: Option<&'config SyntaxSet>,
|
||||
background: IncludeBackground,
|
||||
theme: &'static Theme,
|
||||
|
||||
/// List of ranges of lines to highlight.
|
||||
highlight_lines: Vec<Range>,
|
||||
/// The number of lines in the code block being processed.
|
||||
num_lines: usize,
|
||||
}
|
||||
|
||||
impl<'config> CodeBlock<'config> {
|
||||
pub fn new(fence_info: &str, config: &'config Config, background: IncludeBackground) -> Self {
|
||||
let fence_info = FenceSettings::new(fence_info);
|
||||
let theme = &THEME_SET.themes[config.highlight_theme()];
|
||||
let (highlighter, in_extra) = get_highlighter(fence_info.language, config);
|
||||
Self {
|
||||
highlighter,
|
||||
extra_syntax_set: match in_extra {
|
||||
true => config.markdown.extra_syntax_set.as_ref(),
|
||||
false => None,
|
||||
},
|
||||
background,
|
||||
theme,
|
||||
|
||||
highlight_lines: fence_info.highlight_lines,
|
||||
num_lines: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn highlight(&mut self, text: &str) -> String {
|
||||
let highlighted =
|
||||
self.highlighter.highlight(text, self.extra_syntax_set.unwrap_or(&SYNTAX_SET));
|
||||
let line_boundaries = self.find_line_boundaries(&highlighted);
|
||||
|
||||
// First we make sure that `highlighted` is split at every line
|
||||
// boundary. The `styled_line_to_highlighted_html` function will
|
||||
// merge split items with identical styles, so this is not a
|
||||
// problem.
|
||||
//
|
||||
// Note that this invalidates the values in `line_boundaries`.
|
||||
// The `perform_split` function takes it by value to ensure that
|
||||
// we don't use it later.
|
||||
let mut highlighted = perform_split(&highlighted, line_boundaries);
|
||||
|
||||
let hl_background =
|
||||
self.theme.settings.line_highlight.unwrap_or(Color { r: 255, g: 255, b: 0, a: 0 });
|
||||
|
||||
let hl_lines = self.get_highlighted_lines();
|
||||
color_highlighted_lines(&mut highlighted, &hl_lines, hl_background);
|
||||
|
||||
styled_line_to_highlighted_html(&highlighted, self.background)
|
||||
}
|
||||
|
||||
fn find_line_boundaries(&mut self, styled: &[(Style, &str)]) -> Vec<StyledIdx> {
|
||||
let mut boundaries = Vec::new();
|
||||
for (vec_idx, (_style, s)) in styled.iter().enumerate() {
|
||||
for (str_idx, character) in s.char_indices() {
|
||||
if character == '\n' {
|
||||
boundaries.push(StyledIdx { vec_idx, str_idx });
|
||||
}
|
||||
}
|
||||
}
|
||||
self.num_lines = boundaries.len() + 1;
|
||||
boundaries
|
||||
}
|
||||
|
||||
fn get_highlighted_lines(&self) -> HashSet<usize> {
|
||||
let mut lines = HashSet::new();
|
||||
for range in &self.highlight_lines {
|
||||
for line in range.from..=min(range.to, self.num_lines) {
|
||||
// Ranges are one-indexed
|
||||
lines.insert(line.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
lines
|
||||
}
|
||||
}
|
||||
|
||||
/// This is an index of a character in a `&[(Style, &'b str)]`. The `vec_idx` is the
|
||||
/// index in the slice, and `str_idx` is the byte index of the character in the
|
||||
/// corresponding string slice.
|
||||
///
|
||||
/// The `Ord` impl on this type sorts lexiographically on `vec_idx`, and then `str_idx`.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct StyledIdx {
|
||||
vec_idx: usize,
|
||||
str_idx: usize,
|
||||
}
|
||||
|
||||
/// This is a utility used by `perform_split`. If the `vec_idx` in the `StyledIdx` is
|
||||
/// equal to the provided value, return the `str_idx`, otherwise return `None`.
|
||||
fn get_str_idx_if_vec_idx_is(idx: Option<&StyledIdx>, vec_idx: usize) -> Option<usize> {
|
||||
match idx {
|
||||
Some(idx) if idx.vec_idx == vec_idx => Some(idx.str_idx),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// This function assumes that `line_boundaries` is sorted according to the `Ord` impl on
|
||||
/// the `StyledIdx` type.
|
||||
fn perform_split<'b>(
|
||||
split: &[(Style, &'b str)],
|
||||
line_boundaries: Vec<StyledIdx>,
|
||||
) -> Vec<(Style, &'b str)> {
|
||||
let mut result = Vec::new();
|
||||
|
||||
let mut idxs_iter = line_boundaries.into_iter().peekable();
|
||||
|
||||
for (split_idx, item) in split.iter().enumerate() {
|
||||
let mut last_split = 0;
|
||||
|
||||
// Since `line_boundaries` is sorted, we know that any remaining indexes in
|
||||
// `idxs_iter` have `vec_idx >= split_idx`, and that if there are any with
|
||||
// `vec_idx == split_idx`, they will be first.
|
||||
//
|
||||
// Using the `get_str_idx_if_vec_idx_is` utility, this loop will keep consuming
|
||||
// indexes from `idxs_iter` as long as `vec_idx == split_idx` holds. Once
|
||||
// `vec_idx` becomes larger than `split_idx`, the loop will finish without
|
||||
// consuming that index.
|
||||
//
|
||||
// If `idxs_iter` is empty, or there are no indexes with `vec_idx == split_idx`,
|
||||
// the loop does nothing.
|
||||
while let Some(str_idx) = get_str_idx_if_vec_idx_is(idxs_iter.peek(), split_idx) {
|
||||
// Consume the value we just peeked.
|
||||
idxs_iter.next();
|
||||
|
||||
// This consumes the index to split at. We add one to include the newline
|
||||
// together with its own line, rather than as the first character in the next
|
||||
// line.
|
||||
let split_at = min(str_idx + 1, item.1.len());
|
||||
|
||||
// This will fail if `line_boundaries` is not sorted.
|
||||
debug_assert!(split_at >= last_split);
|
||||
|
||||
// Skip splitting if the string slice would be empty.
|
||||
if last_split != split_at {
|
||||
result.push((item.0, &item.1[last_split..split_at]));
|
||||
last_split = split_at;
|
||||
}
|
||||
}
|
||||
|
||||
// Now append the remainder. If the current item was not split, this will
|
||||
// append the entire item.
|
||||
if last_split != item.1.len() {
|
||||
result.push((item.0, &item.1[last_split..]));
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn color_highlighted_lines(data: &mut [(Style, &str)], lines: &HashSet<usize>, background: Color) {
|
||||
if lines.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut current_line = 0;
|
||||
|
||||
for item in data {
|
||||
if lines.contains(¤t_line) {
|
||||
item.0.background = background;
|
||||
}
|
||||
|
||||
// We split the lines such that every newline is at the end of an item.
|
||||
if item.1.ends_with('\n') {
|
||||
current_line += 1;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Range {
|
||||
pub from: usize,
|
||||
pub to: usize,
|
||||
}
|
||||
|
||||
impl Range {
|
||||
fn parse(s: &str) -> Option<Range> {
|
||||
match s.find('-') {
|
||||
Some(dash) => {
|
||||
let mut from = s[..dash].parse().ok()?;
|
||||
let mut to = s[dash + 1..].parse().ok()?;
|
||||
if to < from {
|
||||
std::mem::swap(&mut from, &mut to);
|
||||
}
|
||||
Some(Range { from, to })
|
||||
}
|
||||
None => {
|
||||
let val = s.parse().ok()?;
|
||||
Some(Range { from: val, to: val })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FenceSettings<'a> {
|
||||
pub language: Option<&'a str>,
|
||||
pub line_numbers: bool,
|
||||
pub highlight_lines: Vec<Range>,
|
||||
}
|
||||
impl<'a> FenceSettings<'a> {
|
||||
pub fn new(fence_info: &'a str) -> Self {
|
||||
let mut me = Self { language: None, line_numbers: false, highlight_lines: Vec::new() };
|
||||
|
||||
for token in FenceIter::new(fence_info) {
|
||||
match token {
|
||||
FenceToken::Language(lang) => me.language = Some(lang),
|
||||
FenceToken::EnableLineNumbers => me.line_numbers = true,
|
||||
FenceToken::HighlightLines(lines) => me.highlight_lines.extend(lines),
|
||||
}
|
||||
}
|
||||
|
||||
me
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FenceToken<'a> {
|
||||
Language(&'a str),
|
||||
EnableLineNumbers,
|
||||
HighlightLines(Vec<Range>),
|
||||
}
|
||||
|
||||
struct FenceIter<'a> {
|
||||
split: std::str::Split<'a, char>,
|
||||
}
|
||||
impl<'a> FenceIter<'a> {
|
||||
fn new(fence_info: &'a str) -> Self {
|
||||
Self { split: fence_info.split(',') }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for FenceIter<'a> {
|
||||
type Item = FenceToken<'a>;
|
||||
|
||||
fn next(&mut self) -> Option<FenceToken<'a>> {
|
||||
loop {
|
||||
let tok = self.split.next()?.trim();
|
||||
|
||||
let mut tok_split = tok.split('=');
|
||||
match tok_split.next().unwrap_or("").trim() {
|
||||
"" => continue,
|
||||
"linenos" => return Some(FenceToken::EnableLineNumbers),
|
||||
"hl_lines" => {
|
||||
let mut ranges = Vec::new();
|
||||
for range in tok_split.next().unwrap_or("").split(' ') {
|
||||
if let Some(range) = Range::parse(range) {
|
||||
ranges.push(range);
|
||||
}
|
||||
}
|
||||
return Some(FenceToken::HighlightLines(ranges));
|
||||
}
|
||||
lang => {
|
||||
return Some(FenceToken::Language(lang));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -268,9 +268,16 @@ mod tests {
|
|||
}
|
||||
|
||||
fn render_shortcodes(code: &str, tera: &Tera) -> String {
|
||||
let config = Config::default();
|
||||
let config = Config::default_for_test();
|
||||
let permalinks = HashMap::new();
|
||||
let context = RenderContext::new(&tera, &config, "", &permalinks, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
super::render_shortcodes(code, &context).unwrap()
|
||||
}
|
||||
|
||||
|
|
56
components/rendering/tests/codeblock_hide_lines.rs
Normal file
56
components/rendering/tests/codeblock_hide_lines.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use tera::Tera;
|
||||
|
||||
use config::Config;
|
||||
use front_matter::InsertAnchor;
|
||||
use rendering::{render_content, RenderContext};
|
||||
|
||||
macro_rules! colored_html_line {
|
||||
( $s:expr ) => {{
|
||||
let mut result = "<span>".to_string();
|
||||
result.push_str($s);
|
||||
result.push_str("\n</span>");
|
||||
result
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! colored_html {
|
||||
( $($s:expr),* $(,)* ) => {{
|
||||
let mut result = "<pre style=\"background-color:#2b303b;color:#c0c5ce;\"><code>".to_string();
|
||||
$(
|
||||
result.push_str(colored_html_line!($s).as_str());
|
||||
)*
|
||||
result.push_str("</code></pre>\n");
|
||||
result
|
||||
}};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hide_lines_simple() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hide_lines=2
|
||||
foo
|
||||
bar
|
||||
baz
|
||||
bat
|
||||
```
|
||||
"#,
|
||||
&context,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(res.body, colored_html!("foo", "baz", "bat"));
|
||||
}
|
|
@ -8,26 +8,28 @@ use rendering::{render_content, RenderContext};
|
|||
|
||||
macro_rules! colored_html_line {
|
||||
( @no $s:expr ) => {{
|
||||
let mut result = "<span style=\"color:#c0c5ce;\">".to_string();
|
||||
let mut result = "<span>".to_string();
|
||||
result.push_str($s);
|
||||
result.push_str("\n</span>");
|
||||
result
|
||||
}};
|
||||
( @hl $s:expr ) => {{
|
||||
let mut result = "<span style=\"background-color:#65737e30;color:#c0c5ce;\">".to_string();
|
||||
let mut result = "<mark style=\"background-color:#65737e30;\">".to_string();
|
||||
result.push_str("<span>");
|
||||
result.push_str($s);
|
||||
result.push_str("\n</span>");
|
||||
result.push_str("</mark>");
|
||||
result
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! colored_html {
|
||||
( $(@$kind:tt $s:expr),* $(,)* ) => {{
|
||||
let mut result = "<pre style=\"background-color:#2b303b;\">\n<code>".to_string();
|
||||
let mut result = "<pre style=\"background-color:#2b303b;color:#c0c5ce;\"><code>".to_string();
|
||||
$(
|
||||
result.push_str(colored_html_line!(@$kind $s).as_str());
|
||||
)*
|
||||
result.push_str("</code></pre>");
|
||||
result.push_str("</code></pre>\n");
|
||||
result
|
||||
}};
|
||||
}
|
||||
|
@ -36,9 +38,16 @@ macro_rules! colored_html {
|
|||
fn hl_lines_simple() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=2
|
||||
|
@ -56,7 +65,8 @@ baz
|
|||
colored_html!(
|
||||
@no "foo",
|
||||
@hl "bar",
|
||||
@no "bar\nbaz",
|
||||
@no "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -65,9 +75,16 @@ baz
|
|||
fn hl_lines_in_middle() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=2-3
|
||||
|
@ -84,7 +101,8 @@ baz
|
|||
res.body,
|
||||
colored_html!(
|
||||
@no "foo",
|
||||
@hl "bar\nbar",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
@ -94,9 +112,16 @@ baz
|
|||
fn hl_lines_all() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=1-4
|
||||
|
@ -112,7 +137,10 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@hl "foo\nbar\nbar\nbaz",
|
||||
@hl "foo",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@hl "baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -121,9 +149,16 @@ baz
|
|||
fn hl_lines_start_from_one() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=1-3
|
||||
|
@ -139,7 +174,9 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@hl "foo\nbar\nbar",
|
||||
@hl "foo",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
@ -149,9 +186,16 @@ baz
|
|||
fn hl_lines_start_from_zero() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=0-3
|
||||
|
@ -167,7 +211,9 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@hl "foo\nbar\nbar",
|
||||
@hl "foo",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
@ -177,9 +223,16 @@ baz
|
|||
fn hl_lines_end() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=3-4
|
||||
|
@ -195,8 +248,10 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@no "foo\nbar",
|
||||
@hl "bar\nbaz",
|
||||
@no "foo",
|
||||
@no "bar",
|
||||
@hl "bar",
|
||||
@hl "baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -205,9 +260,16 @@ baz
|
|||
fn hl_lines_end_out_of_bounds() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=3-4294967295
|
||||
|
@ -223,8 +285,10 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@no "foo\nbar",
|
||||
@hl "bar\nbaz",
|
||||
@no "foo",
|
||||
@no "bar",
|
||||
@hl "bar",
|
||||
@hl "baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -233,9 +297,16 @@ baz
|
|||
fn hl_lines_overlap() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=2-3 1-2
|
||||
|
@ -251,7 +322,9 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@hl "foo\nbar\nbar",
|
||||
@hl "foo",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
@ -260,9 +333,16 @@ baz
|
|||
fn hl_lines_multiple() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=2-3,hl_lines=1-2
|
||||
|
@ -278,7 +358,9 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@hl "foo\nbar\nbar",
|
||||
@hl "foo",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
@ -288,9 +370,16 @@ baz
|
|||
fn hl_lines_extra_spaces() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
``` hl_lines = 2 - 3 1 - 2
|
||||
|
@ -306,7 +395,9 @@ baz
|
|||
assert_eq!(
|
||||
res.body,
|
||||
colored_html!(
|
||||
@hl "foo\nbar\nbar",
|
||||
@hl "foo",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
@ -316,9 +407,16 @@ baz
|
|||
fn hl_lines_int_and_range() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=1 3-4
|
||||
|
@ -336,7 +434,8 @@ baz
|
|||
colored_html!(
|
||||
@hl "foo",
|
||||
@no "bar",
|
||||
@hl "bar\nbaz",
|
||||
@hl "bar",
|
||||
@hl "baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -345,9 +444,16 @@ baz
|
|||
fn hl_lines_single_line_range() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=2-2
|
||||
|
@ -365,7 +471,8 @@ baz
|
|||
colored_html!(
|
||||
@no "foo",
|
||||
@hl "bar",
|
||||
@no "bar\nbaz",
|
||||
@no "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@ -374,9 +481,16 @@ baz
|
|||
fn hl_lines_reverse_range() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(&tera_ctx, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```hl_lines=3-2
|
||||
|
@ -393,7 +507,8 @@ baz
|
|||
res.body,
|
||||
colored_html!(
|
||||
@no "foo",
|
||||
@hl "bar\nbar",
|
||||
@hl "bar",
|
||||
@hl "bar",
|
||||
@no "baz",
|
||||
)
|
||||
);
|
||||
|
|
97
components/rendering/tests/codeblock_linenos.rs
Normal file
97
components/rendering/tests/codeblock_linenos.rs
Normal file
|
@ -0,0 +1,97 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use tera::Tera;
|
||||
|
||||
use config::Config;
|
||||
use front_matter::InsertAnchor;
|
||||
use rendering::{render_content, RenderContext};
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```linenos
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
&context,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
res.body,
|
||||
"<pre data-linenos style=\"background-color:#2b303b;color:#c0c5ce;\"><code><table><tbody><tr><td>1</td><td><span>foo\n</span><tr><td>2</td><td><span>bar\n</span></tr></tbody></table></code></pre>\n"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers_with_linenostart() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```linenos, linenostart=40
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
&context,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
res.body,
|
||||
"<pre data-linenos style=\"background-color:#2b303b;color:#c0c5ce;\"><code><table><tbody><tr><td>40</td><td><span>foo\n</span><tr><td>41</td><td><span>bar\n</span></tr></tbody></table></code></pre>\n"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_line_numbers_with_highlight() {
|
||||
let tera_ctx = Tera::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default_for_test();
|
||||
config.markdown.highlight_code = true;
|
||||
let context = RenderContext::new(
|
||||
&tera_ctx,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
let res = render_content(
|
||||
r#"
|
||||
```linenos, hl_lines=2
|
||||
foo
|
||||
bar
|
||||
```
|
||||
"#,
|
||||
&context,
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
res.body,
|
||||
"<pre data-linenos style=\"background-color:#2b303b;color:#c0c5ce;\"><code><table><tbody><tr><td>1</td><td><span>foo\n</span><tr><td><mark style=\"background-color:#65737e30;\">2</mark></td><td><mark style=\"background-color:#65737e30;\"><span>bar\n</span></mark></tr></tbody></table></code></pre>\n"
|
||||
);
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -5,7 +5,7 @@ authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
|||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
elasticlunr-rs = {version = "2", default-features = false, features = ["da", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] }
|
||||
elasticlunr-rs = {version = "2", default-features = false, features = ["da", "no", "de", "du", "es", "fi", "fr", "it", "pt", "ro", "ru", "sv", "tr"] }
|
||||
ammonia = "3"
|
||||
lazy_static = "1"
|
||||
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
use elasticlunr::pipeline;
|
||||
use elasticlunr::pipeline::TokenizerFn;
|
||||
use elasticlunr::{Index, Language};
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use config::Config;
|
||||
use config::{Config, Search};
|
||||
use errors::{bail, Result};
|
||||
use library::{Library, Section};
|
||||
|
||||
|
@ -26,42 +28,86 @@ lazy_static! {
|
|||
};
|
||||
}
|
||||
|
||||
fn build_fields(config: &Config) -> Vec<String> {
|
||||
fn build_fields(search_config: &Search) -> Vec<String> {
|
||||
let mut fields = vec![];
|
||||
if config.search.include_title {
|
||||
if search_config.include_title {
|
||||
fields.push("title".to_owned());
|
||||
}
|
||||
|
||||
if config.search.include_description {
|
||||
if search_config.include_description {
|
||||
fields.push("description".to_owned());
|
||||
}
|
||||
|
||||
if config.search.include_content {
|
||||
if search_config.include_path {
|
||||
fields.push("path".to_owned());
|
||||
}
|
||||
|
||||
if search_config.include_content {
|
||||
fields.push("body".to_owned());
|
||||
}
|
||||
|
||||
fields
|
||||
}
|
||||
|
||||
fn path_tokenizer(text: &str) -> Vec<String> {
|
||||
text.split(|c: char| c.is_whitespace() || c == '-' || c == '/')
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.trim().to_lowercase())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn build_tokenizers(search_config: &Search, language: Language) -> Vec<TokenizerFn> {
|
||||
let text_tokenizer = match language {
|
||||
#[cfg(feature = "indexing-zh")]
|
||||
Language::Chinese => pipeline::tokenize_chinese,
|
||||
#[cfg(feature = "indexing-ja")]
|
||||
Language::Japanese => pipeline::tokenize_japanese,
|
||||
_ => pipeline::tokenize,
|
||||
};
|
||||
let mut tokenizers: Vec<TokenizerFn> = vec![];
|
||||
if search_config.include_title {
|
||||
tokenizers.push(text_tokenizer);
|
||||
}
|
||||
|
||||
if search_config.include_description {
|
||||
tokenizers.push(text_tokenizer);
|
||||
}
|
||||
|
||||
if search_config.include_path {
|
||||
tokenizers.push(path_tokenizer);
|
||||
}
|
||||
|
||||
if search_config.include_content {
|
||||
tokenizers.push(text_tokenizer);
|
||||
}
|
||||
|
||||
tokenizers
|
||||
}
|
||||
|
||||
fn fill_index(
|
||||
config: &Config,
|
||||
search_config: &Search,
|
||||
title: &Option<String>,
|
||||
description: &Option<String>,
|
||||
path: &str,
|
||||
content: &str,
|
||||
) -> Vec<String> {
|
||||
let mut row = vec![];
|
||||
|
||||
if config.search.include_title {
|
||||
if search_config.include_title {
|
||||
row.push(title.clone().unwrap_or_default());
|
||||
}
|
||||
|
||||
if config.search.include_description {
|
||||
if search_config.include_description {
|
||||
row.push(description.clone().unwrap_or_default());
|
||||
}
|
||||
|
||||
if config.search.include_content {
|
||||
if search_config.include_path {
|
||||
row.push(path.to_string());
|
||||
}
|
||||
|
||||
if search_config.include_content {
|
||||
let body = AMMONIA.clean(&content).to_string();
|
||||
if let Some(truncate_len) = config.search.truncate_content_length {
|
||||
if let Some(truncate_len) = search_config.truncate_content_length {
|
||||
// Not great for unicode
|
||||
// TODO: fix it like the truncate in Tera
|
||||
match body.char_indices().nth(truncate_len) {
|
||||
|
@ -87,28 +133,49 @@ pub fn build_index(lang: &str, library: &Library, config: &Config) -> Result<Str
|
|||
bail!("Tried to build search index for language {} which is not supported", lang);
|
||||
}
|
||||
};
|
||||
let language_options = &config.languages[lang];
|
||||
let mut index = Index::with_language(language, &build_fields(&language_options.search));
|
||||
|
||||
let mut index = Index::with_language(language, &build_fields(&config));
|
||||
let tokenizers = build_tokenizers(&language_options.search, language);
|
||||
|
||||
for section in library.sections_values() {
|
||||
if section.lang == lang {
|
||||
add_section_to_index(&mut index, section, library, config);
|
||||
add_section_to_index(
|
||||
&mut index,
|
||||
section,
|
||||
library,
|
||||
&language_options.search,
|
||||
tokenizers.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(index.to_json())
|
||||
}
|
||||
|
||||
fn add_section_to_index(index: &mut Index, section: &Section, library: &Library, config: &Config) {
|
||||
fn add_section_to_index(
|
||||
index: &mut Index,
|
||||
section: &Section,
|
||||
library: &Library,
|
||||
search_config: &Search,
|
||||
tokenizers: Vec<TokenizerFn>,
|
||||
) {
|
||||
if !section.meta.in_search_index {
|
||||
return;
|
||||
}
|
||||
|
||||
// Don't index redirecting sections
|
||||
if section.meta.redirect_to.is_none() {
|
||||
index.add_doc(
|
||||
index.add_doc_with_tokenizers(
|
||||
§ion.permalink,
|
||||
&fill_index(config, §ion.meta.title, §ion.meta.description, §ion.content),
|
||||
&fill_index(
|
||||
search_config,
|
||||
§ion.meta.title,
|
||||
§ion.meta.description,
|
||||
§ion.path,
|
||||
§ion.content,
|
||||
),
|
||||
tokenizers.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -118,9 +185,16 @@ fn add_section_to_index(index: &mut Index, section: &Section, library: &Library,
|
|||
continue;
|
||||
}
|
||||
|
||||
index.add_doc(
|
||||
index.add_doc_with_tokenizers(
|
||||
&page.permalink,
|
||||
&fill_index(config, &page.meta.title, &page.meta.description, &page.content),
|
||||
&fill_index(
|
||||
search_config,
|
||||
&page.meta.title,
|
||||
&page.meta.description,
|
||||
&page.path,
|
||||
&page.content,
|
||||
),
|
||||
tokenizers.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -134,20 +208,20 @@ mod tests {
|
|||
#[test]
|
||||
fn can_build_fields() {
|
||||
let mut config = Config::default();
|
||||
let fields = build_fields(&config);
|
||||
let fields = build_fields(&config.search);
|
||||
assert_eq!(fields, vec!["title", "body"]);
|
||||
|
||||
config.search.include_content = false;
|
||||
config.search.include_description = true;
|
||||
let fields = build_fields(&config);
|
||||
let fields = build_fields(&config.search);
|
||||
assert_eq!(fields, vec!["title", "description"]);
|
||||
|
||||
config.search.include_content = true;
|
||||
let fields = build_fields(&config);
|
||||
let fields = build_fields(&config.search);
|
||||
assert_eq!(fields, vec!["title", "description", "body"]);
|
||||
|
||||
config.search.include_title = false;
|
||||
let fields = build_fields(&config);
|
||||
let fields = build_fields(&config.search);
|
||||
assert_eq!(fields, vec!["description", "body"]);
|
||||
}
|
||||
|
||||
|
@ -156,9 +230,10 @@ mod tests {
|
|||
let config = Config::default();
|
||||
let title = Some("A title".to_string());
|
||||
let description = Some("A description".to_string());
|
||||
let path = "/a/page/".to_string();
|
||||
let content = "Some content".to_string();
|
||||
|
||||
let res = fill_index(&config, &title, &description, &content);
|
||||
let res = fill_index(&config.search, &title, &description, &path, &content);
|
||||
assert_eq!(res.len(), 2);
|
||||
assert_eq!(res[0], title.unwrap());
|
||||
assert_eq!(res[1], content);
|
||||
|
@ -170,9 +245,10 @@ mod tests {
|
|||
config.search.include_description = true;
|
||||
let title = Some("A title".to_string());
|
||||
let description = Some("A description".to_string());
|
||||
let path = "/a/page/".to_string();
|
||||
let content = "Some content".to_string();
|
||||
|
||||
let res = fill_index(&config, &title, &description, &content);
|
||||
let res = fill_index(&config.search, &title, &description, &path, &content);
|
||||
assert_eq!(res.len(), 3);
|
||||
assert_eq!(res[0], title.unwrap());
|
||||
assert_eq!(res[1], description.unwrap());
|
||||
|
@ -185,9 +261,10 @@ mod tests {
|
|||
config.search.truncate_content_length = Some(5);
|
||||
let title = Some("A title".to_string());
|
||||
let description = Some("A description".to_string());
|
||||
let path = "/a/page/".to_string();
|
||||
let content = "Some content".to_string();
|
||||
|
||||
let res = fill_index(&config, &title, &description, &content);
|
||||
let res = fill_index(&config.search, &title, &description, &path, &content);
|
||||
assert_eq!(res.len(), 2);
|
||||
assert_eq!(res[0], title.unwrap());
|
||||
assert_eq!(res[1], content[..5]);
|
||||
|
|
|
@ -15,7 +15,8 @@ serde_derive = "1"
|
|||
sass-rs = "0.2"
|
||||
lazy_static = "1.1"
|
||||
relative-path = "1"
|
||||
slotmap = "0.4"
|
||||
slotmap = "1"
|
||||
url = "2"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
config = { path = "../config" }
|
||||
|
@ -29,3 +30,4 @@ link_checker = { path = "../link_checker" }
|
|||
|
||||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
path-slash = "0.1.4"
|
||||
|
|
|
@ -59,7 +59,7 @@ pub fn render_feed(
|
|||
pages.iter().take(num_entries).map(|x| x.to_serialized_basic(&library)).collect::<Vec<_>>();
|
||||
|
||||
context.insert("pages", &p);
|
||||
context.insert("config", &site.config);
|
||||
context.insert("config", &site.config.serialize(lang));
|
||||
context.insert("lang", lang);
|
||||
|
||||
let feed_filename = &site.config.feed_filename;
|
||||
|
|
|
@ -14,13 +14,14 @@ use rayon::prelude::*;
|
|||
use tera::{Context, Tera};
|
||||
use walkdir::{DirEntry, WalkDir};
|
||||
|
||||
use config::highlighting::export_theme_css;
|
||||
use config::{get_config, Config};
|
||||
use errors::{bail, Error, Result};
|
||||
use front_matter::InsertAnchor;
|
||||
use library::{find_taxonomies, Library, Page, Paginator, Section, Taxonomy};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::time::Instant;
|
||||
use templates::render_redirect_template;
|
||||
use templates::{load_tera, render_redirect_template};
|
||||
use utils::fs::{
|
||||
copy_directory, copy_file_if_needed, create_directory, create_file, ensure_directory_exists,
|
||||
};
|
||||
|
@ -72,20 +73,20 @@ impl Site {
|
|||
pub fn new<P: AsRef<Path>, P2: AsRef<Path>>(path: P, config_file: P2) -> Result<Site> {
|
||||
let path = path.as_ref();
|
||||
let config_file = config_file.as_ref();
|
||||
let mut config = get_config(config_file);
|
||||
config.load_extra_syntaxes(path)?;
|
||||
let mut config = get_config(config_file)?;
|
||||
config.markdown.load_extra_syntaxes(path)?;
|
||||
|
||||
if let Some(theme) = config.theme.clone() {
|
||||
// Grab data from the extra section of the theme
|
||||
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
|
||||
config
|
||||
.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"), &theme)?;
|
||||
}
|
||||
|
||||
let tera = tpls::load_tera(path, &config)?;
|
||||
let tera = load_tera(path, &config)?;
|
||||
|
||||
let content_path = path.join("content");
|
||||
let static_path = path.join("static");
|
||||
let imageproc =
|
||||
imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);
|
||||
let imageproc = imageproc::Processor::new(path.to_path_buf(), &config);
|
||||
let output_path = path.join(config.output_dir.clone());
|
||||
|
||||
let site = Site {
|
||||
|
@ -123,13 +124,10 @@ impl Site {
|
|||
|
||||
/// The index sections are ALWAYS at those paths
|
||||
/// There are one index section for the default language + 1 per language
|
||||
fn index_section_paths(&self) -> Vec<(PathBuf, Option<String>)> {
|
||||
fn index_section_paths(&self) -> Vec<(PathBuf, Option<&str>)> {
|
||||
let mut res = vec![(self.content_path.join("_index.md"), None)];
|
||||
for language in &self.config.languages {
|
||||
res.push((
|
||||
self.content_path.join(format!("_index.{}.md", language.code)),
|
||||
Some(language.code.clone()),
|
||||
));
|
||||
for (code, _) in self.config.other_languages() {
|
||||
res.push((self.content_path.join(format!("_index.{}.md", code)), Some(code)));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
@ -154,9 +152,9 @@ impl Site {
|
|||
}
|
||||
|
||||
pub fn set_base_url(&mut self, base_url: String) {
|
||||
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (set_base_url)");
|
||||
imageproc.set_base_url(&base_url);
|
||||
self.config.base_url = base_url;
|
||||
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (set_base_url)");
|
||||
imageproc.set_base_url(&self.config);
|
||||
}
|
||||
|
||||
pub fn set_output_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||
|
@ -175,8 +173,12 @@ impl Site {
|
|||
// which we can only decide to use after we've deserialised the section
|
||||
// so it's kinda necessecary
|
||||
let mut dir_walker = WalkDir::new(format!("{}/{}", base_path, "content/")).into_iter();
|
||||
let mut allowed_index_filenames: Vec<_> =
|
||||
self.config.languages.iter().map(|l| format!("_index.{}.md", l.code)).collect();
|
||||
let mut allowed_index_filenames: Vec<_> = self
|
||||
.config
|
||||
.other_languages()
|
||||
.iter()
|
||||
.map(|(code, _)| format!("_index.{}.md", code))
|
||||
.collect();
|
||||
allowed_index_filenames.push("_index.md".to_string());
|
||||
|
||||
loop {
|
||||
|
@ -227,7 +229,7 @@ impl Site {
|
|||
Ok(f) => {
|
||||
let path_str = f.path().file_name().unwrap().to_str().unwrap();
|
||||
if f.path().is_file()
|
||||
&& allowed_index_filenames.iter().find(|&s| *s == path_str).is_some()
|
||||
&& allowed_index_filenames.iter().any(|s| s == path_str)
|
||||
{
|
||||
Some(f)
|
||||
} else {
|
||||
|
@ -242,14 +244,8 @@ impl Site {
|
|||
.collect::<Vec<DirEntry>>();
|
||||
|
||||
for index_file in index_files {
|
||||
let section = match Section::from_file(
|
||||
index_file.path(),
|
||||
&self.config,
|
||||
&self.base_path,
|
||||
) {
|
||||
Err(_) => continue,
|
||||
Ok(sec) => sec,
|
||||
};
|
||||
let section =
|
||||
Section::from_file(index_file.path(), &self.config, &self.base_path)?;
|
||||
|
||||
// if the section is drafted we can skip the enitre dir
|
||||
if section.meta.draft && !self.include_drafts {
|
||||
|
@ -260,8 +256,7 @@ impl Site {
|
|||
self.add_section(section, false)?;
|
||||
}
|
||||
} else {
|
||||
let page = Page::from_file(path, &self.config, &self.base_path)
|
||||
.expect("error deserialising page");
|
||||
let page = Page::from_file(path, &self.config, &self.base_path)?;
|
||||
|
||||
// should we skip drafts?
|
||||
if page.meta.draft && !self.include_drafts {
|
||||
|
@ -287,7 +282,7 @@ impl Site {
|
|||
// taxonomy Tera fns are loaded in `register_early_global_fns`
|
||||
// so we do need to populate it first.
|
||||
self.populate_taxonomies()?;
|
||||
tpls::register_early_global_fns(self);
|
||||
tpls::register_early_global_fns(self)?;
|
||||
self.populate_sections();
|
||||
self.render_markdown()?;
|
||||
tpls::register_tera_global_fns(self);
|
||||
|
@ -663,16 +658,17 @@ impl Site {
|
|||
start = log_time(start, "Generated feed in default language");
|
||||
}
|
||||
|
||||
for lang in &self.config.languages {
|
||||
if !lang.feed {
|
||||
for (code, language) in &self.config.other_languages() {
|
||||
if !language.generate_feed {
|
||||
continue;
|
||||
}
|
||||
let pages =
|
||||
library.pages_values().iter().filter(|p| p.lang == lang.code).cloned().collect();
|
||||
self.render_feed(pages, Some(&PathBuf::from(lang.code.clone())), &lang.code, |c| c)?;
|
||||
library.pages_values().iter().filter(|p| &p.lang == code).cloned().collect();
|
||||
self.render_feed(pages, Some(&PathBuf::from(code)), &code, |c| c)?;
|
||||
start = log_time(start, "Generated feed in other language");
|
||||
}
|
||||
|
||||
self.render_themes_css()?;
|
||||
start = log_time(start, "Rendered themes css");
|
||||
self.render_404()?;
|
||||
start = log_time(start, "Rendered 404");
|
||||
self.render_robots()?;
|
||||
|
@ -690,6 +686,20 @@ impl Site {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn render_themes_css(&self) -> Result<()> {
|
||||
ensure_directory_exists(&self.static_path)?;
|
||||
|
||||
for t in &self.config.markdown.highlight_themes_css {
|
||||
let p = self.static_path.join(&t.filename);
|
||||
if !p.exists() {
|
||||
let content = export_theme_css(&t.theme);
|
||||
create_file(&p, &content)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn build_search_index(&self) -> Result<()> {
|
||||
ensure_directory_exists(&self.output_path)?;
|
||||
// TODO: add those to the SITE_CONTENT map
|
||||
|
@ -707,17 +717,13 @@ impl Site {
|
|||
),
|
||||
)?;
|
||||
|
||||
for language in &self.config.languages {
|
||||
if language.code != self.config.default_language && language.search {
|
||||
for (code, language) in &self.config.other_languages() {
|
||||
if code != &self.config.default_language && language.build_search_index {
|
||||
create_file(
|
||||
&self.output_path.join(&format!("search_index.{}.js", &language.code)),
|
||||
&self.output_path.join(&format!("search_index.{}.js", &code)),
|
||||
&format!(
|
||||
"window.searchIndex = {};",
|
||||
search::build_index(
|
||||
&language.code,
|
||||
&self.library.read().unwrap(),
|
||||
&self.config
|
||||
)?
|
||||
search::build_index(&code, &self.library.read().unwrap(), &self.config)?
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
@ -769,7 +775,7 @@ impl Site {
|
|||
pub fn render_404(&self) -> Result<()> {
|
||||
ensure_directory_exists(&self.output_path)?;
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &self.config);
|
||||
context.insert("config", &self.config.serialize(&self.config.default_language));
|
||||
context.insert("lang", &self.config.default_language);
|
||||
let output = render_template("404.html", &self.tera, context, &self.config.theme)?;
|
||||
let content = self.inject_livereload(output);
|
||||
|
@ -781,7 +787,7 @@ impl Site {
|
|||
pub fn render_robots(&self) -> Result<()> {
|
||||
ensure_directory_exists(&self.output_path)?;
|
||||
let mut context = Context::new();
|
||||
context.insert("config", &self.config);
|
||||
context.insert("config", &self.config.serialize(&self.config.default_language));
|
||||
let content = render_template("robots.txt", &self.tera, context, &self.config.theme)?;
|
||||
self.write_content(&[], "robots.txt", content, false)?;
|
||||
Ok(())
|
||||
|
@ -804,8 +810,8 @@ impl Site {
|
|||
ensure_directory_exists(&self.output_path)?;
|
||||
|
||||
let mut components = Vec::new();
|
||||
if taxonomy.kind.lang != self.config.default_language {
|
||||
components.push(taxonomy.kind.lang.as_ref());
|
||||
if taxonomy.lang != self.config.default_language {
|
||||
components.push(taxonomy.lang.as_ref());
|
||||
}
|
||||
|
||||
components.push(taxonomy.slug.as_ref());
|
||||
|
@ -839,11 +845,7 @@ impl Site {
|
|||
self.render_feed(
|
||||
item.pages.iter().map(|p| library.get_page_by_key(*p)).collect(),
|
||||
Some(&PathBuf::from(format!("{}/{}", taxonomy.slug, item.slug))),
|
||||
if self.config.is_multilingual() && !taxonomy.kind.lang.is_empty() {
|
||||
&taxonomy.kind.lang
|
||||
} else {
|
||||
&self.config.default_language
|
||||
},
|
||||
&taxonomy.lang,
|
||||
|mut context: Context| {
|
||||
context.insert("taxonomy", &taxonomy.kind);
|
||||
context
|
||||
|
|
|
@ -1,18 +1,27 @@
|
|||
use rayon::prelude::*;
|
||||
|
||||
use crate::Site;
|
||||
use errors::{Error, ErrorKind, Result};
|
||||
use core::time;
|
||||
use errors::{bail, Result};
|
||||
use errors::{Error, ErrorKind};
|
||||
use std::{collections::HashMap, path::PathBuf, thread};
|
||||
use url::Url;
|
||||
|
||||
/// Very similar to check_external_links but can't be merged as far as I can see since we always
|
||||
/// want to check the internal links but only the external in zola check :/
|
||||
/// Check whether all internal links pointing to explicit anchor fragments are valid.
|
||||
///
|
||||
/// This is very similar to `check_external_links`, although internal links checking
|
||||
/// is always performed (while external ones only conditionally in `zola check`).
|
||||
pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
||||
println!("Checking all internal links with anchors.");
|
||||
let library = site.library.write().expect("Get lock for check_internal_links_with_anchors");
|
||||
|
||||
// Chain all internal links, from both sections and pages.
|
||||
let page_links = library
|
||||
.pages()
|
||||
.values()
|
||||
.map(|p| {
|
||||
let path = &p.file.path;
|
||||
p.internal_links_with_anchors.iter().map(move |l| (path.clone(), l))
|
||||
p.internal_links.iter().map(move |l| (path.clone(), l))
|
||||
})
|
||||
.flatten();
|
||||
let section_links = library
|
||||
|
@ -20,67 +29,46 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
|||
.values()
|
||||
.map(|p| {
|
||||
let path = &p.file.path;
|
||||
p.internal_links_with_anchors.iter().map(move |l| (path.clone(), l))
|
||||
p.internal_links.iter().map(move |l| (path.clone(), l))
|
||||
})
|
||||
.flatten();
|
||||
let all_links = page_links.chain(section_links).collect::<Vec<_>>();
|
||||
let all_links = page_links.chain(section_links);
|
||||
|
||||
if site.config.is_in_check_mode() {
|
||||
println!("Checking {} internal link(s) with an anchor.", all_links.len());
|
||||
}
|
||||
|
||||
if all_links.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut full_path = site.base_path.clone();
|
||||
full_path.push("content");
|
||||
|
||||
let errors: Vec<_> = all_links
|
||||
.iter()
|
||||
.filter_map(|(page_path, (md_path, anchor))| {
|
||||
// There are a few `expect` here since the presence of the .md file will
|
||||
// already have been checked in the markdown rendering
|
||||
let mut p = full_path.clone();
|
||||
for part in md_path.split('/') {
|
||||
p.push(part);
|
||||
}
|
||||
if md_path.contains("_index.md") {
|
||||
let section = library
|
||||
.get_section(&p)
|
||||
.expect("Couldn't find section in check_internal_links_with_anchors");
|
||||
if section.has_anchor(&anchor) {
|
||||
None
|
||||
} else {
|
||||
Some((page_path, md_path, anchor))
|
||||
}
|
||||
} else {
|
||||
let page = library
|
||||
.get_page(&p)
|
||||
.expect("Couldn't find section in check_internal_links_with_anchors");
|
||||
if page.has_anchor(&anchor) {
|
||||
None
|
||||
} else {
|
||||
Some((page_path, md_path, anchor))
|
||||
}
|
||||
}
|
||||
// Only keep links with anchor fragments, and count them too.
|
||||
// Bare files have already been checked elsewhere, thus they are not interesting here.
|
||||
let mut anchors_total = 0usize;
|
||||
let links_with_anchors = all_links
|
||||
.filter_map(|(page_path, link)| match link {
|
||||
(md_path, Some(anchor)) => Some((page_path, md_path, anchor)),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
.inspect(|_| anchors_total = anchors_total.saturating_add(1));
|
||||
|
||||
if site.config.is_in_check_mode() {
|
||||
println!(
|
||||
"> Checked {} internal link(s) with an anchor: {} error(s) found.",
|
||||
all_links.len(),
|
||||
errors.len()
|
||||
);
|
||||
}
|
||||
// Check for targets existence (including anchors), then keep only the faulty
|
||||
// entries for error reporting purposes.
|
||||
let missing_targets = links_with_anchors.filter(|(_, md_path, anchor)| {
|
||||
// There are a few `expect` here since the presence of the .md file will
|
||||
// already have been checked in the markdown rendering
|
||||
let mut full_path = site.base_path.clone();
|
||||
full_path.push("content");
|
||||
for part in md_path.split('/') {
|
||||
full_path.push(part);
|
||||
}
|
||||
if md_path.contains("_index.md") {
|
||||
let section = library
|
||||
.get_section(&full_path)
|
||||
.expect("Couldn't find section in check_internal_links_with_anchors");
|
||||
!section.has_anchor(&anchor)
|
||||
} else {
|
||||
let page = library
|
||||
.get_page(&full_path)
|
||||
.expect("Couldn't find section in check_internal_links_with_anchors");
|
||||
!page.has_anchor(&anchor)
|
||||
}
|
||||
});
|
||||
|
||||
if errors.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let msg = errors
|
||||
.into_iter()
|
||||
// Format faulty entries into error messages, and collect them.
|
||||
let errors = missing_targets
|
||||
.map(|(page_path, md_path, anchor)| {
|
||||
format!(
|
||||
"The anchor in the link `@/{}#{}` in {} does not exist.",
|
||||
|
@ -89,65 +77,116 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
|||
page_path.to_string_lossy(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Err(Error { kind: ErrorKind::Msg(msg), source: None })
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Finally emit a summary, and return overall anchors-checking result.
|
||||
match errors.len() {
|
||||
0 => {
|
||||
println!("> Successfully checked {} internal link(s) with anchors.", anchors_total);
|
||||
Ok(())
|
||||
}
|
||||
errors_total => {
|
||||
println!(
|
||||
"> Checked {} internal link(s) with anchors: {} target(s) missing.",
|
||||
anchors_total, errors_total,
|
||||
);
|
||||
Err(Error { kind: ErrorKind::Msg(errors.join("\n")), source: None })
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_link_domain(link: &str) -> Result<String> {
|
||||
return match Url::parse(&link) {
|
||||
Ok(url) => match url.host_str().map(String::from) {
|
||||
Some(domain_str) => Ok(domain_str),
|
||||
None => bail!("could not parse domain `{}` from link", link),
|
||||
},
|
||||
Err(err) => bail!("could not parse domain `{}` from link: `{}`", link, err),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn check_external_links(site: &Site) -> Result<()> {
|
||||
let library = site.library.write().expect("Get lock for check_external_links");
|
||||
let page_links = library
|
||||
.pages()
|
||||
.values()
|
||||
.map(|p| {
|
||||
let path = &p.file.path;
|
||||
p.external_links.iter().map(move |l| (path.clone(), l))
|
||||
})
|
||||
.flatten();
|
||||
let section_links = library
|
||||
.sections()
|
||||
.values()
|
||||
.map(|p| {
|
||||
let path = &p.file.path;
|
||||
p.external_links.iter().map(move |l| (path.clone(), l))
|
||||
})
|
||||
.flatten();
|
||||
let all_links = page_links.chain(section_links).collect::<Vec<_>>();
|
||||
|
||||
let mut all_links: Vec<(PathBuf, String, String)> = vec![];
|
||||
|
||||
for p in library.pages_values().into_iter() {
|
||||
for external_link in p.clone().external_links.into_iter() {
|
||||
let domain = get_link_domain(&external_link)?;
|
||||
all_links.push((p.file.path.clone(), external_link, domain));
|
||||
}
|
||||
}
|
||||
|
||||
for s in library.sections_values().into_iter() {
|
||||
for external_link in s.clone().external_links.into_iter() {
|
||||
let domain = get_link_domain(&external_link)?;
|
||||
all_links.push((s.file.path.clone(), external_link, domain));
|
||||
}
|
||||
}
|
||||
|
||||
println!("Checking {} external link(s).", all_links.len());
|
||||
|
||||
let mut links_by_domain: HashMap<String, Vec<(PathBuf, String)>> = HashMap::new();
|
||||
|
||||
for link in all_links.iter() {
|
||||
links_by_domain.entry(link.2.to_string()).or_insert(Vec::new());
|
||||
// Insert content path and link under the domain key
|
||||
links_by_domain
|
||||
.get_mut(&link.2.to_string())
|
||||
.unwrap()
|
||||
.push((link.0.clone(), link.1.clone()));
|
||||
}
|
||||
|
||||
if all_links.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// create thread pool with lots of threads so we can fetch
|
||||
// (almost) all pages simultaneously
|
||||
let threads = std::cmp::min(all_links.len(), 32);
|
||||
// (almost) all pages simultaneously, limiting all links for a single
|
||||
// domain to one thread to avoid rate-limiting
|
||||
let threads = std::cmp::min(links_by_domain.len(), 8);
|
||||
let pool = rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(threads)
|
||||
.build()
|
||||
.map_err(|e| Error { kind: ErrorKind::Msg(e.to_string()), source: None })?;
|
||||
|
||||
let errors: Vec<_> = pool.install(|| {
|
||||
all_links
|
||||
let errors = pool.install(|| {
|
||||
links_by_domain
|
||||
.par_iter()
|
||||
.filter_map(|(page_path, link)| {
|
||||
if site
|
||||
.config
|
||||
.link_checker
|
||||
.skip_prefixes
|
||||
.iter()
|
||||
.any(|prefix| link.starts_with(prefix))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let res = link_checker::check_url(&link, &site.config.link_checker);
|
||||
if link_checker::is_valid(&res) {
|
||||
None
|
||||
} else {
|
||||
Some((page_path, link, res))
|
||||
}
|
||||
.map(|(_domain, links)| {
|
||||
let mut links_to_process = links.len();
|
||||
links
|
||||
.into_iter()
|
||||
.filter_map(move |(page_path, link)| {
|
||||
links_to_process -= 1;
|
||||
|
||||
if site
|
||||
.config
|
||||
.link_checker
|
||||
.skip_prefixes
|
||||
.iter()
|
||||
.any(|prefix| link.starts_with(prefix))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let res = link_checker::check_url(&link, &site.config.link_checker);
|
||||
|
||||
if links_to_process > 0 {
|
||||
// Prevent rate-limiting, wait before next crawl unless we're done with this domain
|
||||
thread::sleep(time::Duration::from_millis(500));
|
||||
}
|
||||
|
||||
if link_checker::is_valid(&res) {
|
||||
None
|
||||
} else {
|
||||
Some((page_path, link, res))
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
println!("> Checked {} external link(s): {} error(s) found.", all_links.len(), errors.len());
|
||||
|
@ -168,5 +207,6 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
Err(Error { kind: ErrorKind::Msg(msg), source: None })
|
||||
}
|
||||
|
|
|
@ -1,72 +1,46 @@
|
|||
use std::path::Path;
|
||||
|
||||
use tera::Tera;
|
||||
|
||||
use crate::Site;
|
||||
use config::Config;
|
||||
use errors::{bail, Error, Result};
|
||||
use templates::{filters, global_fns, ZOLA_TERA};
|
||||
use utils::templates::rewrite_theme_paths;
|
||||
|
||||
pub fn load_tera(path: &Path, config: &Config) -> Result<Tera> {
|
||||
let tpl_glob =
|
||||
format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.{*ml,md}");
|
||||
|
||||
// Only parsing as we might be extending templates from themes and that would error
|
||||
// as we haven't loaded them yet
|
||||
let mut tera =
|
||||
Tera::parse(&tpl_glob).map_err(|e| Error::chain("Error parsing templates", e))?;
|
||||
|
||||
if let Some(ref theme) = config.theme {
|
||||
// Test that the templates folder exist for that theme
|
||||
let theme_path = path.join("themes").join(&theme);
|
||||
if !theme_path.join("templates").exists() {
|
||||
bail!("Theme `{}` is missing a templates folder", theme);
|
||||
}
|
||||
|
||||
let theme_tpl_glob = format!(
|
||||
"{}/{}",
|
||||
path.to_string_lossy().replace("\\", "/"),
|
||||
format!("themes/{}/templates/**/*.{{*ml,md}}", theme)
|
||||
);
|
||||
let mut tera_theme = Tera::parse(&theme_tpl_glob)
|
||||
.map_err(|e| Error::chain("Error parsing templates from themes", e))?;
|
||||
rewrite_theme_paths(&mut tera_theme, &theme);
|
||||
|
||||
if theme_path.join("templates").join("robots.txt").exists() {
|
||||
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
|
||||
}
|
||||
tera.extend(&tera_theme)?;
|
||||
}
|
||||
tera.extend(&ZOLA_TERA)?;
|
||||
tera.build_inheritance_chains()?;
|
||||
|
||||
if path.join("templates").join("robots.txt").exists() {
|
||||
tera.add_template_file(path.join("templates").join("robots.txt"), Some("robots.txt"))?;
|
||||
}
|
||||
|
||||
Ok(tera)
|
||||
}
|
||||
use templates::{filters, global_fns};
|
||||
use tera::Result as TeraResult;
|
||||
|
||||
/// Adds global fns that are to be available to shortcodes while rendering markdown
|
||||
pub fn register_early_global_fns(site: &mut Site) {
|
||||
site.tera.register_filter("markdown", filters::MarkdownFilter::new(site.config.clone()));
|
||||
pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> {
|
||||
site.tera.register_filter(
|
||||
"markdown",
|
||||
filters::MarkdownFilter::new(
|
||||
site.base_path.clone(),
|
||||
site.config.clone(),
|
||||
site.permalinks.clone(),
|
||||
)?,
|
||||
);
|
||||
site.tera.register_filter(
|
||||
"num_format",
|
||||
filters::NumFormatFilter::new(&site.config.default_language),
|
||||
);
|
||||
|
||||
site.tera.register_function(
|
||||
"get_url",
|
||||
global_fns::GetUrl::new(
|
||||
site.base_path.clone(),
|
||||
site.config.clone(),
|
||||
site.permalinks.clone(),
|
||||
vec![site.static_path.clone(), site.output_path.clone(), site.content_path.clone()],
|
||||
),
|
||||
);
|
||||
site.tera
|
||||
.register_function("resize_image", global_fns::ResizeImage::new(site.imageproc.clone()));
|
||||
site.tera.register_function(
|
||||
"resize_image",
|
||||
global_fns::ResizeImage::new(
|
||||
site.base_path.clone(),
|
||||
site.imageproc.clone(),
|
||||
site.config.theme.clone(),
|
||||
),
|
||||
);
|
||||
site.tera.register_function(
|
||||
"get_image_metadata",
|
||||
global_fns::GetImageMeta::new(site.content_path.clone()),
|
||||
global_fns::GetImageMetadata::new(site.base_path.clone(), site.config.theme.clone()),
|
||||
);
|
||||
site.tera.register_function(
|
||||
"load_data",
|
||||
global_fns::LoadData::new(site.base_path.clone(), site.config.theme.clone()),
|
||||
);
|
||||
site.tera.register_function("load_data", global_fns::LoadData::new(site.base_path.clone()));
|
||||
site.tera.register_function("trans", global_fns::Trans::new(site.config.clone()));
|
||||
site.tera.register_function(
|
||||
"get_taxonomy_url",
|
||||
|
@ -78,12 +52,10 @@ pub fn register_early_global_fns(site: &mut Site) {
|
|||
);
|
||||
site.tera.register_function(
|
||||
"get_file_hash",
|
||||
global_fns::GetFileHash::new(vec![
|
||||
site.static_path.clone(),
|
||||
site.output_path.clone(),
|
||||
site.content_path.clone(),
|
||||
]),
|
||||
global_fns::GetFileHash::new(site.base_path.clone(), site.config.theme.clone()),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Functions filled once we have parsed all the pages/sections only, so not available in shortcodes
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
#![allow(dead_code)]
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use path_slash::PathExt;
|
||||
use site::Site;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
|
@ -67,3 +69,274 @@ where
|
|||
site.build().expect("Couldn't build the site");
|
||||
(site, tmp_dir, public.clone())
|
||||
}
|
||||
|
||||
/// Finds the unified path (eg. _index.fr.md -> _index.md) and
|
||||
/// potential language (if not default) associated with a path
|
||||
/// When the path is not a markdown file (.md), None is returned
|
||||
/// Strips base_dir from the start of path
|
||||
fn find_lang_for(entry: &Path, base_dir: &Path) -> Option<(String, Option<String>)> {
|
||||
let ext = entry.extension();
|
||||
if ext.is_none() {
|
||||
// Not a markdown file (no extension), skip
|
||||
return None;
|
||||
}
|
||||
let ext = ext.unwrap();
|
||||
if ext != "md" {
|
||||
// Not a markdown file, skip
|
||||
return None;
|
||||
}
|
||||
let mut no_ext = entry.to_path_buf();
|
||||
let stem = entry.file_stem().unwrap();
|
||||
// Remove .md
|
||||
no_ext.pop();
|
||||
no_ext.push(stem);
|
||||
if let Some(lang) = no_ext.extension() {
|
||||
let stem = no_ext.file_stem();
|
||||
// Remove lang
|
||||
let mut unified_path = no_ext.clone();
|
||||
unified_path.pop();
|
||||
// Readd stem with .md added
|
||||
unified_path.push(&format!("{}.md", stem.unwrap().to_str().unwrap()));
|
||||
let unified_path_str = match unified_path.strip_prefix(base_dir) {
|
||||
Ok(path_without_prefix) => path_without_prefix.to_slash_lossy(),
|
||||
_ => unified_path.to_slash_lossy(),
|
||||
};
|
||||
return Some((unified_path_str, Some(lang.to_str().unwrap().into())));
|
||||
} else {
|
||||
// No lang, return no_ext directly
|
||||
let mut no_ext_string = match no_ext.strip_prefix(base_dir) {
|
||||
Ok(path_without_prefix) => path_without_prefix.to_slash_lossy(),
|
||||
_ => no_ext.to_slash_lossy(),
|
||||
};
|
||||
no_ext_string.push_str(".md");
|
||||
return Some((no_ext_string, None));
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively process a folder to find translations, returning a list of every language
|
||||
/// translated for every page found. Translations for the default language are stored as "DEFAULT"
|
||||
/// TODO: This implementation does not support files with a dot inside (foo.bar.md where bar is
|
||||
/// not a language), because it requires to know what languages are enabled from config, and it's
|
||||
/// unclear how to distinguish (and what to do) between disabled language or "legit" dots
|
||||
pub fn add_translations_from(
|
||||
dir: &Path,
|
||||
strip: &Path,
|
||||
default: &str,
|
||||
) -> HashMap<String, Vec<String>> {
|
||||
let mut expected: HashMap<String, Vec<String>> = HashMap::new();
|
||||
for entry in dir.read_dir().expect("Failed to read dir") {
|
||||
let entry = entry.expect("Failed to read entry").path();
|
||||
if entry.is_dir() {
|
||||
// Recurse
|
||||
expected.extend(add_translations_from(&entry, strip, default));
|
||||
}
|
||||
if let Some((unified_path, lang)) = find_lang_for(&entry, strip) {
|
||||
if let Some(index) = expected.get_mut(&unified_path) {
|
||||
// Insert found lang for rel_path, or DEFAULT otherwise
|
||||
index.push(lang.unwrap_or(default.to_string()));
|
||||
} else {
|
||||
// rel_path is not registered yet, insert it in expected
|
||||
expected.insert(unified_path, vec![lang.unwrap_or(default.to_string())]);
|
||||
}
|
||||
} else {
|
||||
// Not a markdown file, skip
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return expected;
|
||||
}
|
||||
|
||||
/// Calculate output path for Markdown files
|
||||
/// respecting page/section `path` fields, but not aliases (yet)
|
||||
/// Returns a mapping of unified Markdown paths -> translations
|
||||
pub fn find_expected_translations(
|
||||
name: &str,
|
||||
default_language: &str,
|
||||
) -> HashMap<String, Vec<String>> {
|
||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||
path.push(name);
|
||||
path.push("content");
|
||||
|
||||
// Find expected translations from content folder
|
||||
// We remove BASEDIR/content/ from the keys so they match paths in library
|
||||
let mut strip_prefix = path.to_str().unwrap().to_string();
|
||||
strip_prefix.push('/');
|
||||
add_translations_from(&path, &path, default_language)
|
||||
}
|
||||
|
||||
/// Checks whether a given permalink has a corresponding HTML page in output folder
|
||||
pub fn ensure_output_exists(outputdir: &Path, baseurl: &str, link: &str) -> bool {
|
||||
// Remove the baseurl as well as the remaining /, otherwise path will be interpreted
|
||||
// as absolute.
|
||||
let trimmed_url = link.trim_start_matches(baseurl).trim_start_matches('/');
|
||||
let path = outputdir.join(trimmed_url);
|
||||
path.exists()
|
||||
}
|
||||
|
||||
pub struct Translation {
|
||||
path: String,
|
||||
lang: String,
|
||||
permalink: String,
|
||||
}
|
||||
|
||||
pub struct Translations {
|
||||
trans: Vec<Translation>,
|
||||
}
|
||||
|
||||
impl Translations {
|
||||
pub fn for_path(site: &Site, path: &str) -> Translations {
|
||||
let library = site.library.clone();
|
||||
let library = library.read().unwrap();
|
||||
// WORKAROUND because site.content_path is private
|
||||
let unified_path = if let Some(page) =
|
||||
library.get_page(site.base_path.join("content").join(path))
|
||||
{
|
||||
page.file.canonical.clone()
|
||||
} else if let Some(section) = library.get_section(site.base_path.join("content").join(path))
|
||||
{
|
||||
section.file.canonical.clone()
|
||||
} else {
|
||||
panic!("No such page or section: {}", path);
|
||||
};
|
||||
|
||||
let translations = library.translations.get(&unified_path);
|
||||
if translations.is_none() {
|
||||
println!(
|
||||
"Page canonical path {} is not in library translations",
|
||||
unified_path.display()
|
||||
);
|
||||
panic!("Library error");
|
||||
}
|
||||
|
||||
let translations = translations
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|key| {
|
||||
// Are we looking for a section? (no file extension here)
|
||||
if unified_path.ends_with("_index") {
|
||||
//library.get_section_by_key(*key).file.relative.to_string()
|
||||
let section = library.get_section_by_key(*key);
|
||||
Translation {
|
||||
lang: section.lang.clone(),
|
||||
permalink: section.permalink.clone(),
|
||||
path: section.file.path.to_str().unwrap().to_string(),
|
||||
}
|
||||
} else {
|
||||
let page = library.get_page_by_key(*key);
|
||||
Translation {
|
||||
lang: page.lang.clone(),
|
||||
permalink: page.permalink.clone(),
|
||||
path: page.file.path.to_str().unwrap().to_string(),
|
||||
}
|
||||
//library.get_page_by_key(*key).file.relative.to_string()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
Translations { trans: translations }
|
||||
}
|
||||
|
||||
pub fn languages(&self) -> Vec<String> {
|
||||
let mut lang: Vec<String> = self.trans.iter().map(|x| x.lang.clone()).collect();
|
||||
lang.sort_unstable();
|
||||
lang
|
||||
}
|
||||
|
||||
pub fn permalinks(&self) -> Vec<String> {
|
||||
let mut links: Vec<String> = self.trans.iter().map(|x| x.permalink.clone()).collect();
|
||||
links.sort_unstable();
|
||||
links
|
||||
}
|
||||
|
||||
pub fn paths(&self) -> Vec<String> {
|
||||
let mut paths: Vec<String> = self.trans.iter().map(|x| x.path.clone()).collect();
|
||||
paths.sort_unstable();
|
||||
paths
|
||||
}
|
||||
}
|
||||
|
||||
/// Find translations in library for a single path
|
||||
fn library_translations_lang_for(site: &Site, path: &str) -> Vec<String> {
|
||||
let library_translations = Translations::for_path(site, path);
|
||||
library_translations.languages()
|
||||
}
|
||||
|
||||
/// This function takes a list of translations generated by find_expected_translations(),
|
||||
/// a site instance, and a path of a page to check that translations are the same on both sides
|
||||
pub fn ensure_translations_match(
|
||||
translations: &HashMap<String, Vec<String>>,
|
||||
site: &Site,
|
||||
path: &str,
|
||||
) -> bool {
|
||||
let library_page_translations = library_translations_lang_for(site, path);
|
||||
|
||||
if let Some((unified_path, _lang)) = find_lang_for(&PathBuf::from(path), Path::new("")) {
|
||||
if let Some(page_translations) = translations.get(&unified_path) {
|
||||
// We order both claimed translations so we can compare them
|
||||
// library_page_translations is already ordered
|
||||
let mut page_translations = page_translations.clone();
|
||||
page_translations.sort_unstable();
|
||||
|
||||
if page_translations != library_page_translations {
|
||||
// Some translations don't match, print some context
|
||||
// There is a special case where the index page may be autogenerated for a lang
|
||||
// by zola so if we are looking at the index page, library may contain more (not
|
||||
// less) languages than our tests.
|
||||
if unified_path == "_index.md" {
|
||||
for lang in &page_translations {
|
||||
if !library_page_translations.contains(lang) {
|
||||
println!(
|
||||
"Library is missing language: {} for page {}",
|
||||
lang, unified_path
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// All languages from Markdown were found. We don't care if the library
|
||||
// auto-generated more.
|
||||
return true;
|
||||
}
|
||||
println!("Translations don't match for {}:", path);
|
||||
println!(" - library: {:?}", library_page_translations);
|
||||
println!(" - tests: {:?}", page_translations);
|
||||
return false;
|
||||
}
|
||||
// Everything went well
|
||||
return true;
|
||||
} else {
|
||||
// Should never happen because even the default language counts as a translation
|
||||
// Reaching here means either there is a logic error in the tests themselves,
|
||||
// or the permalinks contained a page which does not exist for some reason
|
||||
unreachable!("Translations not found for {}", unified_path);
|
||||
}
|
||||
} else {
|
||||
// None means the page does not end with .md. Only markdown pages should be passed to this function.
|
||||
// Maybe a non-markdown path was found in site's permalinks?
|
||||
unreachable!("{} is not a markdown page (extension not .md)", path);
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given URL (from the permalinks), find the corresponding output page
|
||||
/// and ensure all translation permalinks are linked inside
|
||||
pub fn ensure_translations_in_output(site: &Site, path: &str, permalink: &str) -> bool {
|
||||
let library_page_translations = Translations::for_path(site, path);
|
||||
let translations_permalinks = library_page_translations.permalinks();
|
||||
|
||||
let output_path = permalink.trim_start_matches(&site.config.base_url);
|
||||
// Strip leading / so it's not interpreted as an absolute path
|
||||
let output_path = output_path.trim_start_matches('/');
|
||||
// Don't forget to remove / because
|
||||
let output_path = site.output_path.join(output_path);
|
||||
|
||||
let output = std::fs::read_to_string(&output_path)
|
||||
.expect(&format!("Output not found in {}", output_path.display()));
|
||||
|
||||
for permalink in &translations_permalinks {
|
||||
if !output.contains(permalink) {
|
||||
println!("Page {} has translation {}, but it was not found in output", path, permalink);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -550,7 +550,6 @@ fn can_build_site_with_pagination_for_taxonomy() {
|
|||
paginate_by: Some(2),
|
||||
paginate_path: None,
|
||||
feed: true,
|
||||
lang: site.config.default_language.clone(),
|
||||
});
|
||||
site.load().unwrap();
|
||||
{
|
||||
|
@ -761,8 +760,11 @@ fn can_get_hash_for_static_files() {
|
|||
"index.html",
|
||||
"src=\"https://replace-this-with-your-url.com/scripts/hello.js\""
|
||||
));
|
||||
assert!(file_contains!(public, "index.html",
|
||||
"integrity=\"sha384-01422f31eaa721a6c4ac8c6fa09a27dd9259e0dfcf3c7593d7810d912a9de5ca2f582df978537bcd10f76896db61fbb9\""));
|
||||
assert!(file_contains!(
|
||||
public,
|
||||
"index.html",
|
||||
"integrity=\"sha384-AUIvMeqnIabErIxvoJon3ZJZ4N/PPHWT14ENkSqd5covWC35eFN7zRD3aJbbYfu5\""
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -2,7 +2,7 @@ mod common;
|
|||
|
||||
use std::env;
|
||||
|
||||
use common::build_site;
|
||||
use common::*;
|
||||
use site::Site;
|
||||
|
||||
#[test]
|
||||
|
@ -14,7 +14,7 @@ fn can_parse_multilingual_site() {
|
|||
site.load().unwrap();
|
||||
|
||||
let library = site.library.read().unwrap();
|
||||
assert_eq!(library.pages().len(), 10);
|
||||
assert_eq!(library.pages().len(), 11);
|
||||
assert_eq!(library.sections().len(), 6);
|
||||
|
||||
// default index sections
|
||||
|
@ -174,3 +174,27 @@ fn can_build_multilingual_site() {
|
|||
assert!(file_exists!(public, "search_index.it.js"));
|
||||
assert!(!file_exists!(public, "search_index.fr.js"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn correct_translations_on_all_pages() {
|
||||
let (site, _tmp_dir, public) = build_site("test_site_i18n");
|
||||
|
||||
assert!(public.exists());
|
||||
|
||||
let translations = find_expected_translations("test_site_i18n", &site.config.default_language);
|
||||
|
||||
for (path, link) in &site.permalinks {
|
||||
// link ends with /, does not add index.html
|
||||
let link = format!("{}index.html", link);
|
||||
|
||||
// Ensure every permalink has produced a HTML page
|
||||
assert!(ensure_output_exists(&public, &site.config.base_url, &link));
|
||||
|
||||
// Ensure translations expected here match with those in the library
|
||||
// TODO: add constructive error message inside the function
|
||||
assert!(ensure_translations_match(&translations, &site, &path));
|
||||
|
||||
// Ensure output file contains all translations URLs
|
||||
assert!(ensure_translations_in_output(&site, &path, &link));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,12 +10,13 @@ base64 = "0.13"
|
|||
lazy_static = "1"
|
||||
toml = "0.5"
|
||||
csv = "1"
|
||||
image = "0.23"
|
||||
serde_json = "1.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_derive = "1"
|
||||
sha2 = "0.9"
|
||||
url = "2"
|
||||
nom-bibtex = "0.3"
|
||||
svg_metadata = "0.4.1"
|
||||
num-format = "0.4"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
|
@ -30,4 +31,5 @@ default-features = false
|
|||
features = ["blocking", "rustls-tls"]
|
||||
|
||||
[dev-dependencies]
|
||||
mockito = "0.28"
|
||||
mockito = "0.30"
|
||||
tempfile = "3"
|
||||
|
|
BIN
components/templates/gutenberg.jpg
Normal file
BIN
components/templates/gutenberg.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 47 KiB |
|
@ -14,7 +14,7 @@
|
|||
</link>
|
||||
<description>{{ config.description }}</description>
|
||||
<generator>Zola</generator>
|
||||
<language>{{ config.default_language }}</language>
|
||||
<language>{{ lang }}</language>
|
||||
<atom:link href="{{ feed_url | safe }}" rel="self" type="application/rss+xml"/>
|
||||
<lastBuildDate>{{ last_updated | date(format="%a, %d %b %Y %H:%M:%S %z") }}</lastBuildDate>
|
||||
{%- for page in pages %}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
<div {% if class %}class="{{class}}"{% endif %}>
|
||||
<iframe src="https://www.youtube-nocookie.com/embed/{{id}}{% if autoplay %}?autoplay=1{% endif %}" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
|
||||
<iframe src="https://www.youtube-nocookie.com/embed/{{id}}{% if playlist %}?list={{playlist}}{% endif %}{% if autoplay %}?autoplay=1{% endif %}" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
|
||||
</div>
|
||||
|
|
|
@ -1,25 +1,42 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasher;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use base64::{decode, encode};
|
||||
use config::Config;
|
||||
use rendering::{render_content, RenderContext};
|
||||
use tera::{to_value, try_get_value, Filter as TeraFilter, Result as TeraResult, Value};
|
||||
use tera::{
|
||||
to_value, try_get_value, Error as TeraError, Filter as TeraFilter, Result as TeraResult, Tera,
|
||||
Value,
|
||||
};
|
||||
|
||||
use crate::load_tera;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct MarkdownFilter {
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
tera: Tera,
|
||||
}
|
||||
|
||||
impl MarkdownFilter {
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self { config }
|
||||
pub fn new(
|
||||
path: PathBuf,
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
) -> TeraResult<Self> {
|
||||
let tera = load_tera(&path, &config).map_err(tera::Error::msg)?;
|
||||
Ok(Self { config, permalinks, tera })
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFilter for MarkdownFilter {
|
||||
fn filter(&self, value: &Value, args: &HashMap<String, Value>) -> TeraResult<Value> {
|
||||
let context = RenderContext::from_config(&self.config);
|
||||
let mut context = RenderContext::from_config(&self.config);
|
||||
context.permalinks = Cow::Borrowed(&self.permalinks);
|
||||
context.tera = Cow::Borrowed(&self.tera);
|
||||
|
||||
let s = try_get_value!("markdown", "value", String, value);
|
||||
let inline = match args.get("inline") {
|
||||
Some(val) => try_get_value!("markdown", "inline", bool, val),
|
||||
|
@ -58,18 +75,49 @@ pub fn base64_decode<S: BuildHasher>(
|
|||
Ok(to_value(&String::from_utf8(decode(s.as_bytes()).unwrap()).unwrap()).unwrap())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct NumFormatFilter {
|
||||
default_language: String,
|
||||
}
|
||||
|
||||
impl NumFormatFilter {
|
||||
pub fn new<S: Into<String>>(default_language: S) -> Self {
|
||||
Self { default_language: default_language.into() }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFilter for NumFormatFilter {
|
||||
fn filter(&self, value: &Value, args: &HashMap<String, Value>) -> TeraResult<Value> {
|
||||
use num_format::{Locale, ToFormattedString};
|
||||
|
||||
let num = try_get_value!("num_format", "value", i64, value);
|
||||
let locale = match args.get("locale") {
|
||||
Some(locale) => try_get_value!("num_format", "locale", String, locale),
|
||||
None => self.default_language.clone(),
|
||||
};
|
||||
let locale = Locale::from_name(&locale).map_err(|_| {
|
||||
TeraError::msg(format!(
|
||||
"Filter `num_format` was called with an invalid `locale` argument: `{}`.",
|
||||
locale
|
||||
))
|
||||
})?;
|
||||
Ok(to_value(num.to_formatted_string(&locale)).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use tera::{to_value, Filter};
|
||||
|
||||
use super::{base64_decode, base64_encode, MarkdownFilter};
|
||||
use super::{base64_decode, base64_encode, MarkdownFilter, NumFormatFilter};
|
||||
use config::Config;
|
||||
|
||||
#[test]
|
||||
fn markdown_filter() {
|
||||
let result = MarkdownFilter::new(Config::default())
|
||||
let result = MarkdownFilter::new(PathBuf::new(), Config::default(), HashMap::new())
|
||||
.unwrap()
|
||||
.filter(&to_value(&"# Hey").unwrap(), &HashMap::new());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), to_value(&"<h1 id=\"hey\">Hey</h1>\n").unwrap());
|
||||
|
@ -79,10 +127,11 @@ mod tests {
|
|||
fn markdown_filter_inline() {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("inline".to_string(), to_value(true).unwrap());
|
||||
let result = MarkdownFilter::new(Config::default()).filter(
|
||||
&to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
|
||||
&args,
|
||||
);
|
||||
let result =
|
||||
MarkdownFilter::new(PathBuf::new(), Config::default(), HashMap::new()).unwrap().filter(
|
||||
&to_value(&"Using `map`, `filter`, and `fold` instead of `for`").unwrap(),
|
||||
&args,
|
||||
);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), to_value(&"Using <code>map</code>, <code>filter</code>, and <code>fold</code> instead of <code>for</code>").unwrap());
|
||||
}
|
||||
|
@ -92,18 +141,19 @@ mod tests {
|
|||
fn markdown_filter_inline_tables() {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("inline".to_string(), to_value(true).unwrap());
|
||||
let result = MarkdownFilter::new(Config::default()).filter(
|
||||
&to_value(
|
||||
&r#"
|
||||
let result =
|
||||
MarkdownFilter::new(PathBuf::new(), Config::default(), HashMap::new()).unwrap().filter(
|
||||
&to_value(
|
||||
&r#"
|
||||
|id|author_id| timestamp_created|title |content |
|
||||
|-:|--------:|-----------------------:|:---------------------|:-----------------|
|
||||
| 1| 1|2018-09-05 08:03:43.141Z|How to train your ORM |Badly written blog|
|
||||
| 2| 1|2018-08-22 13:11:50.050Z|How to bake a nice pie|Badly written blog|
|
||||
"#,
|
||||
)
|
||||
.unwrap(),
|
||||
&args,
|
||||
);
|
||||
)
|
||||
.unwrap(),
|
||||
&args,
|
||||
);
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().as_str().unwrap().contains("<table>"));
|
||||
}
|
||||
|
@ -117,15 +167,33 @@ mod tests {
|
|||
config.markdown.external_links_target_blank = true;
|
||||
|
||||
let md = "Hello <https://google.com> :smile: ...";
|
||||
let result =
|
||||
MarkdownFilter::new(config.clone()).filter(&to_value(&md).unwrap(), &HashMap::new());
|
||||
let result = MarkdownFilter::new(PathBuf::new(), config.clone(), HashMap::new())
|
||||
.unwrap()
|
||||
.filter(&to_value(&md).unwrap(), &HashMap::new());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), to_value(&"<p>Hello <a rel=\"noopener\" target=\"_blank\" href=\"https://google.com\">https://google.com</a> 😄 …</p>\n").unwrap());
|
||||
|
||||
let md = "```py\ni=0\n```";
|
||||
let result = MarkdownFilter::new(config).filter(&to_value(&md).unwrap(), &HashMap::new());
|
||||
let result = MarkdownFilter::new(PathBuf::new(), config, HashMap::new())
|
||||
.unwrap()
|
||||
.filter(&to_value(&md).unwrap(), &HashMap::new());
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap().as_str().unwrap().contains("<pre style"));
|
||||
assert!(result.unwrap().as_str().unwrap().contains("style"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mardown_filter_can_use_internal_links() {
|
||||
let mut permalinks = HashMap::new();
|
||||
permalinks.insert("blog/_index.md".to_string(), "/foo/blog".to_string());
|
||||
let md = "Hello. Check out [my blog](@/blog/_index.md)!";
|
||||
let result = MarkdownFilter::new(PathBuf::new(), Config::default(), permalinks)
|
||||
.unwrap()
|
||||
.filter(&to_value(&md).unwrap(), &HashMap::new());
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(
|
||||
result.unwrap(),
|
||||
to_value(&"<p>Hello. Check out <a href=\"/foo/blog\">my blog</a>!</p>\n").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -166,4 +234,44 @@ mod tests {
|
|||
assert_eq!(result.unwrap(), to_value(expected).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn num_format_filter() {
|
||||
let tests = vec![
|
||||
(100, "100"),
|
||||
(1_000, "1,000"),
|
||||
(10_000, "10,000"),
|
||||
(100_000, "100,000"),
|
||||
(1_000_000, "1,000,000"),
|
||||
];
|
||||
|
||||
for (input, expected) in tests {
|
||||
let args = HashMap::new();
|
||||
let result = NumFormatFilter::new("en").filter(&to_value(input).unwrap(), &args);
|
||||
let result = dbg!(result);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), to_value(expected).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn num_format_filter_with_locale() {
|
||||
let tests = vec![
|
||||
("en", 1_000_000, "1,000,000"),
|
||||
("en-IN", 1_000_000, "10,00,000"),
|
||||
// Note:
|
||||
// U+202F is the "NARROW NO-BREAK SPACE" code point.
|
||||
// When displayed to the screen, it looks like a space.
|
||||
("fr", 1_000_000, "1\u{202f}000\u{202f}000"),
|
||||
];
|
||||
|
||||
for (locale, input, expected) in tests {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("locale".to_string(), to_value(locale).unwrap());
|
||||
let result = NumFormatFilter::new("en").filter(&to_value(input).unwrap(), &args);
|
||||
let result = dbg!(result);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), to_value(expected).unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
332
components/templates/src/global_fns/content.rs
Normal file
332
components/templates/src/global_fns/content.rs
Normal file
|
@ -0,0 +1,332 @@
|
|||
use library::{Library, Taxonomy};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, RwLock};
|
||||
use tera::{from_value, to_value, Function as TeraFn, Result, Value};
|
||||
use utils::slugs::{slugify_paths, SlugifyStrategy};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetTaxonomyUrl {
|
||||
taxonomies: HashMap<String, HashMap<String, String>>,
|
||||
default_lang: String,
|
||||
slugify: SlugifyStrategy,
|
||||
}
|
||||
|
||||
impl GetTaxonomyUrl {
|
||||
pub fn new(default_lang: &str, all_taxonomies: &[Taxonomy], slugify: SlugifyStrategy) -> Self {
|
||||
let mut taxonomies = HashMap::new();
|
||||
for taxo in all_taxonomies {
|
||||
let mut items = HashMap::new();
|
||||
for item in &taxo.items {
|
||||
items.insert(slugify_paths(&item.name.clone(), slugify), item.permalink.clone());
|
||||
}
|
||||
taxonomies.insert(format!("{}-{}", taxo.kind.name, taxo.lang), items);
|
||||
}
|
||||
Self { taxonomies, default_lang: default_lang.to_string(), slugify }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetTaxonomyUrl {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let kind = required_arg!(
|
||||
String,
|
||||
args.get("kind"),
|
||||
"`get_taxonomy_url` requires a `kind` argument with a string value"
|
||||
);
|
||||
let name = required_arg!(
|
||||
String,
|
||||
args.get("name"),
|
||||
"`get_taxonomy_url` requires a `name` argument with a string value"
|
||||
);
|
||||
let lang =
|
||||
optional_arg!(String, args.get("lang"), "`get_taxonomy`: `lang` must be a string")
|
||||
.unwrap_or_else(|| self.default_lang.clone());
|
||||
|
||||
let container = match self.taxonomies.get(&format!("{}-{}", kind, lang)) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
return Err(format!(
|
||||
"`get_taxonomy_url` received an unknown taxonomy as kind: {}",
|
||||
kind
|
||||
)
|
||||
.into());
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(permalink) = container.get(&slugify_paths(&name, self.slugify)) {
|
||||
return Ok(to_value(permalink).unwrap());
|
||||
}
|
||||
|
||||
Err(format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into())
|
||||
}
|
||||
|
||||
fn is_safe(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetPage {
|
||||
base_path: PathBuf,
|
||||
library: Arc<RwLock<Library>>,
|
||||
}
|
||||
impl GetPage {
|
||||
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
|
||||
Self { base_path: base_path.join("content"), library }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetPage {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_page` requires a `path` argument with a string value"
|
||||
);
|
||||
let full_path = self.base_path.join(&path);
|
||||
let library = self.library.read().unwrap();
|
||||
match library.get_page(&full_path) {
|
||||
Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()),
|
||||
None => Err(format!("Page `{}` not found.", path).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetSection {
|
||||
base_path: PathBuf,
|
||||
library: Arc<RwLock<Library>>,
|
||||
}
|
||||
impl GetSection {
|
||||
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
|
||||
Self { base_path: base_path.join("content"), library }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetSection {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_section` requires a `path` argument with a string value"
|
||||
);
|
||||
|
||||
let metadata_only = args
|
||||
.get("metadata_only")
|
||||
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||
|
||||
let full_path = self.base_path.join(&path);
|
||||
let library = self.library.read().unwrap();
|
||||
|
||||
match library.get_section(&full_path) {
|
||||
Some(s) => {
|
||||
if metadata_only {
|
||||
Ok(to_value(s.to_serialized_basic(&library)).unwrap())
|
||||
} else {
|
||||
Ok(to_value(s.to_serialized(&library)).unwrap())
|
||||
}
|
||||
}
|
||||
None => Err(format!("Section `{}` not found.", path).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetTaxonomy {
|
||||
library: Arc<RwLock<Library>>,
|
||||
taxonomies: HashMap<String, Taxonomy>,
|
||||
default_lang: String,
|
||||
}
|
||||
impl GetTaxonomy {
|
||||
pub fn new(
|
||||
default_lang: &str,
|
||||
all_taxonomies: Vec<Taxonomy>,
|
||||
library: Arc<RwLock<Library>>,
|
||||
) -> Self {
|
||||
let mut taxonomies = HashMap::new();
|
||||
for taxo in all_taxonomies {
|
||||
taxonomies.insert(format!("{}-{}", taxo.kind.name, taxo.lang), taxo);
|
||||
}
|
||||
Self { taxonomies, library, default_lang: default_lang.to_string() }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetTaxonomy {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let kind = required_arg!(
|
||||
String,
|
||||
args.get("kind"),
|
||||
"`get_taxonomy` requires a `kind` argument with a string value"
|
||||
);
|
||||
|
||||
let lang =
|
||||
optional_arg!(String, args.get("lang"), "`get_taxonomy`: `lang` must be a string")
|
||||
.unwrap_or_else(|| self.default_lang.clone());
|
||||
|
||||
match self.taxonomies.get(&format!("{}-{}", kind, lang)) {
|
||||
Some(t) => Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap()),
|
||||
None => {
|
||||
Err(format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||
use library::TaxonomyItem;
|
||||
|
||||
#[test]
|
||||
fn can_get_taxonomy() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::On;
|
||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||
let taxo_config_fr =
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||
let library = Arc::new(RwLock::new(Library::new(0, 0, false)));
|
||||
let tag = TaxonomyItem::new(
|
||||
"Programming",
|
||||
&config.default_language,
|
||||
"tags",
|
||||
&config,
|
||||
vec![],
|
||||
&library.read().unwrap(),
|
||||
);
|
||||
let tag_fr = TaxonomyItem::new(
|
||||
"Programmation",
|
||||
"fr",
|
||||
"tags",
|
||||
&config,
|
||||
vec![],
|
||||
&library.read().unwrap(),
|
||||
);
|
||||
let tags = Taxonomy {
|
||||
kind: taxo_config,
|
||||
lang: config.default_language.clone(),
|
||||
slug: "tags".to_string(),
|
||||
permalink: "/tags/".to_string(),
|
||||
items: vec![tag],
|
||||
};
|
||||
let tags_fr = Taxonomy {
|
||||
kind: taxo_config_fr,
|
||||
lang: "fr".to_owned(),
|
||||
slug: "tags".to_string(),
|
||||
permalink: "/fr/tags/".to_string(),
|
||||
items: vec![tag_fr],
|
||||
};
|
||||
|
||||
let taxonomies = vec![tags.clone(), tags_fr.clone()];
|
||||
let static_fn =
|
||||
GetTaxonomy::new(&config.default_language, taxonomies.clone(), library.clone());
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
let res = static_fn.call(&args).unwrap();
|
||||
let res_obj = res.as_object().unwrap();
|
||||
assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap());
|
||||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"],
|
||||
Value::String("Programming".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["slug"],
|
||||
Value::String("programming".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()
|
||||
["permalink"],
|
||||
Value::String("http://a-website.com/tags/programming/".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["pages"],
|
||||
Value::Array(vec![])
|
||||
);
|
||||
// Works with other languages as well
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
let res = static_fn.call(&args).unwrap();
|
||||
let res_obj = res.as_object().unwrap();
|
||||
assert_eq!(res_obj["kind"], to_value(tags_fr.kind).unwrap());
|
||||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"],
|
||||
Value::String("Programmation".to_string())
|
||||
);
|
||||
|
||||
// and errors if it can't find it
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("something-else").unwrap());
|
||||
assert!(static_fn.call(&args).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_taxonomy_url() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::On;
|
||||
let taxo_config = TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||
let taxo_config_fr =
|
||||
TaxonomyConfig { name: "tags".to_string(), ..TaxonomyConfig::default() };
|
||||
let library = Library::new(0, 0, false);
|
||||
let tag = TaxonomyItem::new(
|
||||
"Programming",
|
||||
&config.default_language,
|
||||
"tags",
|
||||
&config,
|
||||
vec![],
|
||||
&library,
|
||||
);
|
||||
let tag_fr = TaxonomyItem::new("Programmation", "fr", "tags", &config, vec![], &library);
|
||||
let tags = Taxonomy {
|
||||
kind: taxo_config,
|
||||
lang: config.default_language.clone(),
|
||||
slug: "tags".to_string(),
|
||||
permalink: "/tags/".to_string(),
|
||||
items: vec![tag],
|
||||
};
|
||||
let tags_fr = Taxonomy {
|
||||
kind: taxo_config_fr,
|
||||
lang: "fr".to_owned(),
|
||||
slug: "tags".to_string(),
|
||||
permalink: "/fr/tags/".to_string(),
|
||||
items: vec![tag_fr],
|
||||
};
|
||||
|
||||
let taxonomies = vec![tags.clone(), tags_fr.clone()];
|
||||
let static_fn =
|
||||
GetTaxonomyUrl::new(&config.default_language, &taxonomies, config.slugify.taxonomies);
|
||||
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("Programming").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
to_value("http://a-website.com/tags/programming/").unwrap()
|
||||
);
|
||||
|
||||
// can find it correctly with inconsistent capitalisation
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("programming").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
to_value("http://a-website.com/tags/programming/").unwrap()
|
||||
);
|
||||
|
||||
// works with other languages
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("Programmation").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
to_value("http://a-website.com/fr/tags/programmation/").unwrap()
|
||||
);
|
||||
|
||||
// and errors if it can't find it
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("random").unwrap());
|
||||
assert!(static_fn.call(&args).is_err());
|
||||
}
|
||||
}
|
446
components/templates/src/global_fns/files.rs
Normal file
446
components/templates/src/global_fns/files.rs
Normal file
|
@ -0,0 +1,446 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::{fs, io, result};
|
||||
|
||||
use crate::global_fns::helpers::search_for_file;
|
||||
use base64::encode as encode_b64;
|
||||
use config::Config;
|
||||
use sha2::{digest, Sha256, Sha384, Sha512};
|
||||
use tera::{from_value, to_value, Function as TeraFn, Result, Value};
|
||||
use utils::site::resolve_internal_link;
|
||||
|
||||
fn compute_file_hash<D: digest::Digest>(
|
||||
mut file: fs::File,
|
||||
as_base64: bool,
|
||||
) -> result::Result<String, io::Error>
|
||||
where
|
||||
digest::Output<D>: core::fmt::LowerHex,
|
||||
D: std::io::Write,
|
||||
{
|
||||
let mut hasher = D::new();
|
||||
io::copy(&mut file, &mut hasher)?;
|
||||
if as_base64 {
|
||||
Ok(encode_b64(hasher.finalize()))
|
||||
} else {
|
||||
Ok(format!("{:x}", hasher.finalize()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetUrl {
|
||||
base_path: PathBuf,
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl GetUrl {
|
||||
pub fn new(base_path: PathBuf, config: Config, permalinks: HashMap<String, String>) -> Self {
|
||||
Self { base_path, config, permalinks }
|
||||
}
|
||||
}
|
||||
|
||||
fn make_path_with_lang(path: String, lang: &str, config: &Config) -> Result<String> {
|
||||
if lang == config.default_language {
|
||||
return Ok(path);
|
||||
}
|
||||
|
||||
if !config.other_languages().contains_key(lang) {
|
||||
return Err(
|
||||
format!("`{}` is not an authorized language (check config.languages).", lang).into()
|
||||
);
|
||||
}
|
||||
|
||||
let mut split_path: Vec<String> = path.split('.').map(String::from).collect();
|
||||
let ilast = split_path.len() - 1;
|
||||
split_path[ilast] = format!("{}.{}", lang, split_path[ilast]);
|
||||
Ok(split_path.join("."))
|
||||
}
|
||||
|
||||
impl TeraFn for GetUrl {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_url` requires a `path` argument with a string value"
|
||||
);
|
||||
let cachebust = optional_arg!(
|
||||
bool,
|
||||
args.get("cachebust"),
|
||||
"`get_url`: `cachebust` must be a boolean (true or false)"
|
||||
)
|
||||
.unwrap_or(false);
|
||||
let trailing_slash = optional_arg!(
|
||||
bool,
|
||||
args.get("trailing_slash"),
|
||||
"`get_url`: `trailing_slash` must be a boolean (true or false)"
|
||||
)
|
||||
.unwrap_or(false);
|
||||
let lang = optional_arg!(String, args.get("lang"), "`get_url`: `lang` must be a string.")
|
||||
.unwrap_or_else(|| self.config.default_language.clone());
|
||||
|
||||
// if it starts with @/, resolve it as an internal link
|
||||
if path.starts_with("@/") {
|
||||
let path_with_lang = match make_path_with_lang(path, &lang, &self.config) {
|
||||
Ok(x) => x,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
match resolve_internal_link(&path_with_lang, &self.permalinks) {
|
||||
Ok(resolved) => Ok(to_value(resolved.permalink).unwrap()),
|
||||
Err(_) => Err(format!(
|
||||
"`get_url`: could not resolve URL for link `{}` not found.",
|
||||
path_with_lang
|
||||
)
|
||||
.into()),
|
||||
}
|
||||
} else {
|
||||
// anything else
|
||||
let mut segments = vec![];
|
||||
|
||||
if lang != self.config.default_language {
|
||||
segments.push(lang);
|
||||
};
|
||||
|
||||
segments.push(path);
|
||||
|
||||
let path_with_lang = segments.join("/");
|
||||
|
||||
let mut permalink = self.config.make_permalink(&path_with_lang);
|
||||
if !trailing_slash && permalink.ends_with('/') {
|
||||
permalink.pop(); // Removes the slash
|
||||
}
|
||||
|
||||
if cachebust {
|
||||
match search_for_file(&self.base_path, &path_with_lang, &self.config.theme)
|
||||
.map_err(|e| format!("`get_url`: {}", e))?
|
||||
.and_then(|(p, _)| fs::File::open(&p).ok())
|
||||
.and_then(|f| compute_file_hash::<Sha256>(f, false).ok())
|
||||
{
|
||||
Some(hash) => {
|
||||
permalink = format!("{}?h={}", permalink, hash);
|
||||
}
|
||||
None => {
|
||||
return Err(format!(
|
||||
"`get_url`: Could not find or open file {}",
|
||||
path_with_lang
|
||||
)
|
||||
.into())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(to_value(permalink).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
fn is_safe(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetFileHash {
|
||||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
}
|
||||
impl GetFileHash {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>) -> Self {
|
||||
Self { base_path, theme }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFn for GetFileHash {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_file_hash` requires a `path` argument with a string value"
|
||||
);
|
||||
let sha_type = optional_arg!(
|
||||
u16,
|
||||
args.get("sha_type"),
|
||||
"`get_file_hash`: `sha_type` must be 256, 384 or 512"
|
||||
)
|
||||
.unwrap_or(384);
|
||||
let base64 = optional_arg!(
|
||||
bool,
|
||||
args.get("base64"),
|
||||
"`get_file_hash`: `base64` must be true or false"
|
||||
)
|
||||
.unwrap_or(true);
|
||||
|
||||
let file_path = match search_for_file(&self.base_path, &path, &self.theme)
|
||||
.map_err(|e| format!("`get_file_hash`: {}", e))?
|
||||
{
|
||||
Some((f, _)) => f,
|
||||
None => {
|
||||
return Err(format!("`get_file_hash`: Cannot find file: {}", path).into());
|
||||
}
|
||||
};
|
||||
|
||||
let f = match std::fs::File::open(file_path) {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
return Err(format!("File {} could not be open: {}", path, e).into());
|
||||
}
|
||||
};
|
||||
|
||||
let hash = match sha_type {
|
||||
256 => compute_file_hash::<Sha256>(f, base64),
|
||||
384 => compute_file_hash::<Sha384>(f, base64),
|
||||
512 => compute_file_hash::<Sha512>(f, base64),
|
||||
_ => return Err("`get_file_hash`: Invalid sha value".into()),
|
||||
};
|
||||
|
||||
match hash {
|
||||
Ok(digest) => Ok(to_value(digest).unwrap()),
|
||||
Err(_) => Err("`get_file_hash`: could no compute hash".into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{GetFileHash, GetUrl};
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use tera::{to_value, Function};
|
||||
|
||||
use config::Config;
|
||||
use utils::fs::create_file;
|
||||
|
||||
fn create_temp_dir() -> TempDir {
|
||||
let dir = tempdir().unwrap();
|
||||
create_file(&dir.path().join("app.css"), "// Hello world!").expect("Failed to create file");
|
||||
dir
|
||||
}
|
||||
|
||||
const CONFIG_DATA: &str = r#"
|
||||
base_url = "https://remplace-par-ton-url.fr"
|
||||
default_language = "fr"
|
||||
|
||||
[translations]
|
||||
title = "Un titre"
|
||||
|
||||
[languages.en]
|
||||
[languages.en.translations]
|
||||
title = "A title"
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn can_add_cachebust_to_url() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("cachebust".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css?h=572e691dc68c3fcd653ae463261bdb38f35dc6f01715d9ce68799319dd158840");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_trailing_slashes() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_slashes_and_cachebust() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap());
|
||||
args.insert("cachebust".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css/?h=572e691dc68c3fcd653ae463261bdb38f35dc6f01715d9ce68799319dd158840");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_link_to_some_static_file() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css");
|
||||
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("/app.css").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_when_language_not_available() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("it").unwrap());
|
||||
let err = static_fn.call(&args).unwrap_err();
|
||||
assert_eq!(
|
||||
"`it` is not an authorized language (check config.languages).",
|
||||
format!("{}", err)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_url_with_default_language() {
|
||||
let mut permalinks = HashMap::new();
|
||||
permalinks.insert(
|
||||
"a_section/a_page.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/a_section/a_page/".to_string(),
|
||||
);
|
||||
permalinks.insert(
|
||||
"a_section/a_page.en.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/en/a_section/a_page/".to_string(),
|
||||
);
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"https://remplace-par-ton-url.fr/a_section/a_page/"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_url_with_other_language() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let mut permalinks = HashMap::new();
|
||||
permalinks.insert(
|
||||
"a_section/a_page.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/a_section/a_page/".to_string(),
|
||||
);
|
||||
permalinks.insert(
|
||||
"a_section/a_page.en.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/en/a_section/a_page/".to_string(),
|
||||
);
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"https://remplace-par-ton-url.fr/en/a_section/a_page/"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_feed_url_with_default_language() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config.clone(), HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value(config.feed_filename).unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "https://remplace-par-ton-url.fr/atom.xml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_feed_url_with_other_language() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config.clone(), HashMap::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value(config.feed_filename).unwrap());
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "https://remplace-par-ton-url.fr/en/atom.xml");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha256_no_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(256).unwrap());
|
||||
args.insert("base64".to_string(), to_value(false).unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"572e691dc68c3fcd653ae463261bdb38f35dc6f01715d9ce68799319dd158840"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha256_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(256).unwrap());
|
||||
args.insert("base64".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "Vy5pHcaMP81lOuRjJhvbOPNdxvAXFdnOaHmTGd0ViEA=");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha384_no_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("base64".to_string(), to_value(false).unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"141c09bd28899773b772bbe064d8b718fa1d6f2852b7eafd5ed6689d26b74883b79e2e814cd69d5b52ab476aa284c414"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha384() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"FBwJvSiJl3O3crvgZNi3GPodbyhSt+r9XtZonSa3SIO3ni6BTNadW1KrR2qihMQU"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha512_no_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(512).unwrap());
|
||||
args.insert("base64".to_string(), to_value(false).unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"379dfab35123b9159d9e4e92dc90e2be44cf3c2f7f09b2e2df80a1b219b461de3556c93e1a9ceb3008e999e2d6a54b4f1d65ee9be9be63fa45ec88931623372f"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha512() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(512).unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"N536s1EjuRWdnk6S3JDivkTPPC9/CbLi34Chshm0Yd41Vsk+GpzrMAjpmeLWpUtPHWXum+m+Y/pF7IiTFiM3Lw=="
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_when_file_not_found_for_hash() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("doesnt-exist").unwrap());
|
||||
let err = format!("{}", static_fn.call(&args).unwrap_err());
|
||||
|
||||
assert!(err.contains("Cannot find file"));
|
||||
}
|
||||
}
|
56
components/templates/src/global_fns/helpers.rs
Normal file
56
components/templates/src/global_fns/helpers.rs
Normal file
|
@ -0,0 +1,56 @@
|
|||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use errors::{bail, Result};
|
||||
use utils::fs::is_path_in_directory;
|
||||
|
||||
/// This is used by a few Tera functions to search for files on the filesystem.
|
||||
/// This does try to find the file in 3 different spots:
|
||||
/// 1. base_path + path
|
||||
/// 2. base_path + static + path
|
||||
/// 3. base_path + content + path
|
||||
/// 4. base_path + themes + {current_theme} + static + path
|
||||
/// A path starting with @/ will replace it with `content/` and a path starting with `/` will have
|
||||
/// it removed.
|
||||
/// It also returns the unified path so it can be used as unique hash for a given file.
|
||||
/// It will error if the file is not contained in the Zola directory.
|
||||
pub fn search_for_file(
|
||||
base_path: &Path,
|
||||
path: &str,
|
||||
theme: &Option<String>,
|
||||
) -> Result<Option<(PathBuf, String)>> {
|
||||
let mut search_paths = vec![base_path.join("static"), base_path.join("content")];
|
||||
if let Some(t) = theme {
|
||||
search_paths.push(base_path.join("themes").join(t).join("static"));
|
||||
}
|
||||
let actual_path = if path.starts_with("@/") {
|
||||
Cow::Owned(path.replace("@/", "content/"))
|
||||
} else {
|
||||
Cow::Borrowed(path.trim_start_matches('/'))
|
||||
};
|
||||
|
||||
let mut file_path = base_path.join(&*actual_path);
|
||||
let mut file_exists = file_path.exists();
|
||||
|
||||
if file_exists && !is_path_in_directory(base_path, &file_path)? {
|
||||
bail!("{:?} is not inside the base site directory {:?}", path, base_path);
|
||||
}
|
||||
|
||||
if !file_exists {
|
||||
// we need to search in both search folders now
|
||||
for dir in &search_paths {
|
||||
let p = dir.join(&*actual_path);
|
||||
if p.exists() {
|
||||
file_path = p;
|
||||
file_exists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if file_exists {
|
||||
Ok(Some((file_path, actual_path.into_owned())))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
83
components/templates/src/global_fns/i18n.rs
Normal file
83
components/templates/src/global_fns/i18n.rs
Normal file
|
@ -0,0 +1,83 @@
|
|||
use tera::{from_value, to_value, Error, Function as TeraFn, Result, Value};
|
||||
|
||||
use config::Config;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Trans {
|
||||
config: Config,
|
||||
}
|
||||
impl Trans {
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
}
|
||||
impl TeraFn for Trans {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument.");
|
||||
let lang = optional_arg!(String, args.get("lang"), "`trans`: `lang` must be a string.")
|
||||
.unwrap_or_else(|| self.config.default_language.clone());
|
||||
|
||||
let term = self
|
||||
.config
|
||||
.get_translation(&lang, &key)
|
||||
.map_err(|e| Error::chain("Failed to retrieve term translation", e))?;
|
||||
|
||||
Ok(to_value(term).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
const TRANS_CONFIG: &str = r#"
|
||||
base_url = "https://remplace-par-ton-url.fr"
|
||||
default_language = "fr"
|
||||
|
||||
[translations]
|
||||
title = "Un titre"
|
||||
|
||||
[languages]
|
||||
[languages.en]
|
||||
[languages.en.translations]
|
||||
title = "A title" "#;
|
||||
|
||||
#[test]
|
||||
fn can_translate_a_string() {
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let static_fn = Trans::new(config);
|
||||
let mut args = HashMap::new();
|
||||
|
||||
args.insert("key".to_string(), to_value("title").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "Un titre");
|
||||
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "A title");
|
||||
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "Un titre");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_on_absent_translation_lang() {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("lang".to_string(), to_value("absent").unwrap());
|
||||
args.insert("key".to_string(), to_value("title").unwrap());
|
||||
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let error = Trans::new(config).call(&args).unwrap_err();
|
||||
assert_eq!("Failed to retrieve term translation", format!("{}", error));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_on_absent_translation_key() {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
args.insert("key".to_string(), to_value("absent").unwrap());
|
||||
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let error = Trans::new(config).call(&args).unwrap_err();
|
||||
assert_eq!("Failed to retrieve term translation", format!("{}", error));
|
||||
}
|
||||
}
|
281
components/templates/src/global_fns/images.rs
Normal file
281
components/templates/src/global_fns/images.rs
Normal file
|
@ -0,0 +1,281 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use tera::{from_value, to_value, Function as TeraFn, Result, Value};
|
||||
|
||||
use crate::global_fns::helpers::search_for_file;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResizeImage {
|
||||
/// The base path of the Zola site
|
||||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
}
|
||||
|
||||
impl ResizeImage {
|
||||
pub fn new(
|
||||
base_path: PathBuf,
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
theme: Option<String>,
|
||||
) -> Self {
|
||||
Self { base_path, imageproc, theme }
|
||||
}
|
||||
}
|
||||
|
||||
static DEFAULT_OP: &str = "fill";
|
||||
static DEFAULT_FMT: &str = "auto";
|
||||
|
||||
impl TeraFn for ResizeImage {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`resize_image` requires a `path` argument with a string value"
|
||||
);
|
||||
let width = optional_arg!(
|
||||
u32,
|
||||
args.get("width"),
|
||||
"`resize_image`: `width` must be a non-negative integer"
|
||||
);
|
||||
let height = optional_arg!(
|
||||
u32,
|
||||
args.get("height"),
|
||||
"`resize_image`: `height` must be a non-negative integer"
|
||||
);
|
||||
let op = optional_arg!(String, args.get("op"), "`resize_image`: `op` must be a string")
|
||||
.unwrap_or_else(|| DEFAULT_OP.to_string());
|
||||
|
||||
let format =
|
||||
optional_arg!(String, args.get("format"), "`resize_image`: `format` must be a string")
|
||||
.unwrap_or_else(|| DEFAULT_FMT.to_string());
|
||||
|
||||
let quality =
|
||||
optional_arg!(u8, args.get("quality"), "`resize_image`: `quality` must be a number");
|
||||
if let Some(quality) = quality {
|
||||
if quality == 0 || quality > 100 {
|
||||
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into());
|
||||
}
|
||||
}
|
||||
|
||||
let mut imageproc = self.imageproc.lock().unwrap();
|
||||
let (file_path, unified_path) = match search_for_file(&self.base_path, &path, &self.theme)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?
|
||||
{
|
||||
Some(f) => f,
|
||||
None => {
|
||||
return Err(format!("`resize_image`: Cannot find file: {}", path).into());
|
||||
}
|
||||
};
|
||||
|
||||
let response = imageproc
|
||||
.enqueue(unified_path, file_path, &op, width, height, &format, quality)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
|
||||
to_value(response).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetImageMetadata {
|
||||
/// The base path of the Zola site
|
||||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
result_cache: Arc<Mutex<HashMap<String, Value>>>,
|
||||
}
|
||||
|
||||
impl GetImageMetadata {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>) -> Self {
|
||||
Self { base_path, result_cache: Arc::new(Mutex::new(HashMap::new())), theme }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFn for GetImageMetadata {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_image_metadata` requires a `path` argument with a string value"
|
||||
);
|
||||
let allow_missing = optional_arg!(
|
||||
bool,
|
||||
args.get("allow_missing"),
|
||||
"`get_image_metadata`: `allow_missing` must be a boolean (true or false)"
|
||||
)
|
||||
.unwrap_or(false);
|
||||
|
||||
let (src_path, unified_path) = match search_for_file(&self.base_path, &path, &self.theme)
|
||||
.map_err(|e| format!("`get_image_metadata`: {}", e))?
|
||||
{
|
||||
Some((f, p)) => (f, p),
|
||||
None => {
|
||||
if allow_missing {
|
||||
return Ok(Value::Null);
|
||||
}
|
||||
return Err(format!("`get_image_metadata`: Cannot find path: {}", path).into());
|
||||
}
|
||||
};
|
||||
|
||||
let mut cache = self.result_cache.lock().expect("result cache lock");
|
||||
if let Some(cached_result) = cache.get(&unified_path) {
|
||||
return Ok(cached_result.clone());
|
||||
}
|
||||
|
||||
let response = imageproc::read_image_metadata(&src_path)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
let out = to_value(response).unwrap();
|
||||
cache.insert(unified_path, out.clone());
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{GetImageMetadata, ResizeImage};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs::{copy, create_dir_all};
|
||||
|
||||
use config::Config;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use tera::{to_value, Function};
|
||||
|
||||
fn create_dir_with_image() -> TempDir {
|
||||
let dir = tempdir().unwrap();
|
||||
create_dir_all(dir.path().join("content").join("gallery")).unwrap();
|
||||
create_dir_all(dir.path().join("static")).unwrap();
|
||||
create_dir_all(dir.path().join("themes").join("name").join("static")).unwrap();
|
||||
copy("gutenberg.jpg", dir.path().join("content").join("gutenberg.jpg")).unwrap();
|
||||
copy("gutenberg.jpg", dir.path().join("content").join("gallery").join("asset.jpg"))
|
||||
.unwrap();
|
||||
copy("gutenberg.jpg", dir.path().join("static").join("gutenberg.jpg")).unwrap();
|
||||
copy(
|
||||
"gutenberg.jpg",
|
||||
dir.path().join("themes").join("name").join("static").join("in-theme.jpg"),
|
||||
)
|
||||
.unwrap();
|
||||
dir
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/788
|
||||
// https://github.com/getzola/zola/issues/1035
|
||||
#[test]
|
||||
fn can_resize_image() {
|
||||
let dir = create_dir_with_image();
|
||||
let imageproc = imageproc::Processor::new(dir.path().to_path_buf(), &Config::default());
|
||||
|
||||
let static_fn = ResizeImage::new(
|
||||
dir.path().to_path_buf(),
|
||||
Arc::new(Mutex::new(imageproc)),
|
||||
Some("name".to_owned()),
|
||||
);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("height".to_string(), to_value(40).unwrap());
|
||||
args.insert("width".to_string(), to_value(40).unwrap());
|
||||
|
||||
// hashing is stable based on filename and params so we can compare with hashes
|
||||
|
||||
// 1. resizing an image in static
|
||||
args.insert("path".to_string(), to_value("static/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
let static_path = Path::new("static").join("processed_images");
|
||||
|
||||
// TODO: Use `assert_processed_path_matches()` from imageproc so that hashes don't need to be hardcoded
|
||||
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("6a89d6483cdc5f7700.jpg").display())).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/6a89d6483cdc5f7700.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 2. resizing an image in content with a relative path
|
||||
args.insert("path".to_string(), to_value("content/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("202d9263f4dbc95900.jpg").display())).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/202d9263f4dbc95900.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 3. resizing with an absolute path is the same as the above
|
||||
args.insert("path".to_string(), to_value("/content/gutenberg.jpg").unwrap());
|
||||
let data2 = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data, data2);
|
||||
|
||||
// 4. resizing an image in content starting with `@/` is the same as 2 and 3
|
||||
args.insert("path".to_string(), to_value("@/gutenberg.jpg").unwrap());
|
||||
let data2 = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data, data2);
|
||||
|
||||
// 5. resizing an image with a relative path not starting with static or content
|
||||
args.insert("path".to_string(), to_value("gallery/asset.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("6296a3c153f701be00.jpg").display())).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/6296a3c153f701be00.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 6. Looking up a file in the theme
|
||||
args.insert("path".to_string(), to_value("in-theme.jpg").unwrap());
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value(&format!("{}", static_path.join("6296a3c153f701be00.jpg").display())).unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/6296a3c153f701be00.jpg").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
// TODO: consider https://github.com/getzola/zola/issues/1161
|
||||
#[test]
|
||||
fn can_get_image_metadata() {
|
||||
let dir = create_dir_with_image();
|
||||
|
||||
let static_fn = GetImageMetadata::new(dir.path().to_path_buf(), None);
|
||||
|
||||
// Let's test a few scenarii
|
||||
let mut args = HashMap::new();
|
||||
|
||||
// 1. a call to something in `static` with a relative path
|
||||
args.insert("path".to_string(), to_value("static/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
|
||||
// 2. a call to something in `static` with an absolute path is handled currently the same as the above
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("/static/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
|
||||
// 3. a call to something in `content` with a relative path
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("content/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
|
||||
// 4. a call to something in `content` with a @/ path corresponds to
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
}
|
||||
}
|
|
@ -1,29 +1,42 @@
|
|||
use utils::de::fix_toml_dates;
|
||||
use utils::fs::{get_file_time, is_path_in_directory, read_file};
|
||||
|
||||
use reqwest::{blocking::Client, header};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use std::fmt;
|
||||
use std::collections::HashMap;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use url::Url;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use csv::Reader;
|
||||
use std::collections::HashMap;
|
||||
use reqwest::header::{HeaderValue, CONTENT_TYPE};
|
||||
use reqwest::{blocking::Client, header};
|
||||
use tera::{from_value, to_value, Error, Function as TeraFn, Map, Result, Value};
|
||||
use url::Url;
|
||||
use utils::de::fix_toml_dates;
|
||||
use utils::fs::{get_file_time, read_file};
|
||||
|
||||
use crate::global_fns::helpers::search_for_file;
|
||||
|
||||
static GET_DATA_ARGUMENT_ERROR_MESSAGE: &str =
|
||||
"`load_data`: requires EITHER a `path` or `url` argument";
|
||||
|
||||
enum DataSource {
|
||||
Url(Url),
|
||||
Path(PathBuf),
|
||||
#[derive(Debug, PartialEq, Clone, Copy, Hash)]
|
||||
enum Method {
|
||||
Post,
|
||||
Get,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
impl FromStr for Method {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self> {
|
||||
match s.to_lowercase().as_ref() {
|
||||
"post" => Ok(Method::Post),
|
||||
"get" => Ok(Method::Get),
|
||||
_ => Err("`load_data` method must either be POST or GET.".into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
enum OutputFormat {
|
||||
Toml,
|
||||
Json,
|
||||
|
@ -32,23 +45,11 @@ enum OutputFormat {
|
|||
Plain,
|
||||
}
|
||||
|
||||
impl fmt::Display for OutputFormat {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for OutputFormat {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.to_string().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for OutputFormat {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(output_format: &str) -> Result<Self> {
|
||||
match output_format {
|
||||
match output_format.to_lowercase().as_ref() {
|
||||
"toml" => Ok(OutputFormat::Toml),
|
||||
"csv" => Ok(OutputFormat::Csv),
|
||||
"json" => Ok(OutputFormat::Json),
|
||||
|
@ -71,36 +72,59 @@ impl OutputFormat {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum DataSource {
|
||||
Url(Url),
|
||||
Path(PathBuf),
|
||||
}
|
||||
|
||||
impl DataSource {
|
||||
/// Returns Some(DataSource) on success, from optional load_data() path/url arguments
|
||||
/// Returns an Error when a URL could not be parsed and Ok(None) when the path
|
||||
/// is missing, so that the load_data() function can decide whether this is an error
|
||||
/// Note: if the signature of this function changes, please update LoadData::call()
|
||||
/// so we don't mistakenly unwrap things over there
|
||||
fn from_args(
|
||||
path_arg: Option<String>,
|
||||
url_arg: Option<String>,
|
||||
content_path: &PathBuf,
|
||||
) -> Result<Self> {
|
||||
base_path: &Path,
|
||||
theme: &Option<String>,
|
||||
) -> Result<Option<Self>> {
|
||||
if path_arg.is_some() && url_arg.is_some() {
|
||||
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into());
|
||||
}
|
||||
|
||||
if let Some(path) = path_arg {
|
||||
let full_path = content_path.join(path);
|
||||
if !full_path.exists() {
|
||||
return Err(format!("{} doesn't exist", full_path.display()).into());
|
||||
}
|
||||
return Ok(DataSource::Path(full_path));
|
||||
return match search_for_file(&base_path, &path, &theme)
|
||||
.map_err(|e| format!("`load_data`: {}", e))?
|
||||
{
|
||||
Some((f, _)) => Ok(Some(DataSource::Path(f))),
|
||||
None => Ok(None),
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(url) = url_arg {
|
||||
return Url::parse(&url)
|
||||
.map(DataSource::Url)
|
||||
.map_err(|e| format!("Failed to parse {} as url: {}", url, e).into());
|
||||
.map(Some)
|
||||
.map_err(|e| format!("`load_data`: Failed to parse {} as url: {}", url, e).into());
|
||||
}
|
||||
|
||||
Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into())
|
||||
}
|
||||
|
||||
fn get_cache_key(&self, format: &OutputFormat) -> u64 {
|
||||
fn get_cache_key(
|
||||
&self,
|
||||
format: &OutputFormat,
|
||||
method: Method,
|
||||
post_body: &Option<String>,
|
||||
post_content_type: &Option<String>,
|
||||
) -> u64 {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
format.hash(&mut hasher);
|
||||
method.hash(&mut hasher);
|
||||
post_body.hash(&mut hasher);
|
||||
post_content_type.hash(&mut hasher);
|
||||
self.hash(&mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
|
@ -118,57 +142,23 @@ impl Hash for DataSource {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_data_source_from_args(
|
||||
content_path: &PathBuf,
|
||||
args: &HashMap<String, Value>,
|
||||
) -> Result<DataSource> {
|
||||
let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
|
||||
let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
|
||||
|
||||
DataSource::from_args(path_arg, url_arg, content_path)
|
||||
}
|
||||
|
||||
fn read_data_file(base_path: &PathBuf, full_path: PathBuf) -> Result<String> {
|
||||
if !is_path_in_directory(&base_path, &full_path)
|
||||
.map_err(|e| format!("Failed to read data file {}: {}", full_path.display(), e))?
|
||||
{
|
||||
return Err(format!(
|
||||
"{} is not inside the base site directory {}",
|
||||
full_path.display(),
|
||||
base_path.display()
|
||||
)
|
||||
.into());
|
||||
}
|
||||
read_file(&full_path).map_err(|e| {
|
||||
format!("`load_data`: error {} loading file {}", full_path.to_str().unwrap(), e).into()
|
||||
})
|
||||
}
|
||||
|
||||
fn get_output_format_from_args(
|
||||
args: &HashMap<String, Value>,
|
||||
format_arg: Option<String>,
|
||||
data_source: &DataSource,
|
||||
) -> Result<OutputFormat> {
|
||||
let format_arg = optional_arg!(
|
||||
String,
|
||||
args.get("format"),
|
||||
"`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml, bibtex, plain)"
|
||||
);
|
||||
|
||||
if let Some(format) = format_arg {
|
||||
if format == "plain" {
|
||||
return Ok(OutputFormat::Plain);
|
||||
}
|
||||
return OutputFormat::from_str(&format);
|
||||
}
|
||||
|
||||
let from_extension = if let DataSource::Path(path) = data_source {
|
||||
path.extension().map(|extension| extension.to_str().unwrap()).unwrap_or_else(|| "plain")
|
||||
if let DataSource::Path(path) = data_source {
|
||||
match path.extension().and_then(|e| e.to_str()) {
|
||||
Some(ext) => OutputFormat::from_str(ext).or(Ok(OutputFormat::Plain)),
|
||||
None => Ok(OutputFormat::Plain),
|
||||
}
|
||||
} else {
|
||||
"plain"
|
||||
};
|
||||
|
||||
// Always default to Plain if we don't know what it is
|
||||
OutputFormat::from_str(from_extension).or(Ok(OutputFormat::Plain))
|
||||
// Always default to Plain if we don't know what it is
|
||||
Ok(OutputFormat::Plain)
|
||||
}
|
||||
}
|
||||
|
||||
/// A Tera function to load data from a file or from a URL
|
||||
|
@ -176,11 +166,12 @@ fn get_output_format_from_args(
|
|||
#[derive(Debug)]
|
||||
pub struct LoadData {
|
||||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
client: Arc<Mutex<Client>>,
|
||||
result_cache: Arc<Mutex<HashMap<u64, Value>>>,
|
||||
}
|
||||
impl LoadData {
|
||||
pub fn new(base_path: PathBuf) -> Self {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>) -> Self {
|
||||
let client = Arc::new(Mutex::new(
|
||||
Client::builder()
|
||||
.user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")))
|
||||
|
@ -188,37 +179,137 @@ impl LoadData {
|
|||
.expect("reqwest client build"),
|
||||
));
|
||||
let result_cache = Arc::new(Mutex::new(HashMap::new()));
|
||||
Self { base_path, client, result_cache }
|
||||
Self { base_path, client, result_cache, theme }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFn for LoadData {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let data_source = get_data_source_from_args(&self.base_path, &args)?;
|
||||
let file_format = get_output_format_from_args(&args, &data_source)?;
|
||||
let cache_key = data_source.get_cache_key(&file_format);
|
||||
// Either a local path or a URL
|
||||
let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
|
||||
let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
|
||||
// Optional general params
|
||||
let format_arg = optional_arg!(
|
||||
String,
|
||||
args.get("format"),
|
||||
"`load_data`: `format` needs to be an argument with a string value, being one of the supported `load_data` file types (csv, json, toml, bibtex, plain)"
|
||||
);
|
||||
let required = optional_arg!(
|
||||
bool,
|
||||
args.get("required"),
|
||||
"`load_data`: `required` must be a boolean (true or false)"
|
||||
)
|
||||
.unwrap_or(true);
|
||||
// Remote URL parameters only
|
||||
let post_body_arg =
|
||||
optional_arg!(String, args.get("body"), "`load_data` body must be a string, if set.");
|
||||
let post_content_type = optional_arg!(
|
||||
String,
|
||||
args.get("content_type"),
|
||||
"`load_data` content_type must be a string, if set."
|
||||
);
|
||||
let method_arg = optional_arg!(
|
||||
String,
|
||||
args.get("method"),
|
||||
"`load_data` method must either be POST or GET."
|
||||
);
|
||||
|
||||
let method = match method_arg {
|
||||
Some(ref method_str) => match Method::from_str(&method_str) {
|
||||
Ok(m) => m,
|
||||
Err(e) => return Err(e),
|
||||
},
|
||||
_ => Method::Get,
|
||||
};
|
||||
|
||||
// If the file doesn't exist, source is None
|
||||
let data_source = match (
|
||||
DataSource::from_args(path_arg.clone(), url_arg, &self.base_path, &self.theme),
|
||||
required,
|
||||
) {
|
||||
// If the file was not required, return a Null value to the template
|
||||
(Ok(None), false) | (Err(_), false) => {
|
||||
return Ok(Value::Null);
|
||||
}
|
||||
(Err(e), true) => {
|
||||
return Err(e);
|
||||
}
|
||||
// If the file was required, error
|
||||
(Ok(None), true) => {
|
||||
// source is None only with path_arg (not URL), so path_arg is safely unwrap
|
||||
return Err(format!(
|
||||
"`load_data`: {} doesn't exist",
|
||||
&self.base_path.join(path_arg.unwrap()).display()
|
||||
)
|
||||
.into());
|
||||
}
|
||||
(Ok(Some(data_source)), _) => data_source,
|
||||
};
|
||||
|
||||
let file_format = get_output_format_from_args(format_arg, &data_source)?;
|
||||
let cache_key =
|
||||
data_source.get_cache_key(&file_format, method, &post_body_arg, &post_content_type);
|
||||
|
||||
let mut cache = self.result_cache.lock().expect("result cache lock");
|
||||
let response_client = self.client.lock().expect("response client lock");
|
||||
if let Some(cached_result) = cache.get(&cache_key) {
|
||||
return Ok(cached_result.clone());
|
||||
}
|
||||
|
||||
let data = match data_source {
|
||||
DataSource::Path(path) => read_data_file(&self.base_path, path),
|
||||
DataSource::Path(path) => read_file(&path)
|
||||
.map_err(|e| format!("`load_data`: error reading file {:?}: {}", path, e)),
|
||||
DataSource::Url(url) => {
|
||||
let response = response_client
|
||||
.get(url.as_str())
|
||||
.header(header::ACCEPT, file_format.as_accept_header())
|
||||
.send()
|
||||
.and_then(|res| res.error_for_status())
|
||||
.map_err(|e| match e.status() {
|
||||
Some(status) => format!("Failed to request {}: {}", url, status),
|
||||
None => format!("Could not get response status for url: {}", url),
|
||||
})?;
|
||||
response
|
||||
.text()
|
||||
.map_err(|e| format!("Failed to parse response from {}: {:?}", url, e).into())
|
||||
let response_client = self.client.lock().expect("response client lock");
|
||||
let req = match method {
|
||||
Method::Get => response_client
|
||||
.get(url.as_str())
|
||||
.header(header::ACCEPT, file_format.as_accept_header()),
|
||||
Method::Post => {
|
||||
let mut resp = response_client
|
||||
.post(url.as_str())
|
||||
.header(header::ACCEPT, file_format.as_accept_header());
|
||||
if let Some(content_type) = post_content_type {
|
||||
match HeaderValue::from_str(&content_type) {
|
||||
Ok(c) => {
|
||||
resp = resp.header(CONTENT_TYPE, c);
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(format!(
|
||||
"`load_data`: {} is an illegal content type",
|
||||
&content_type
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(body) = post_body_arg {
|
||||
resp = resp.body(body);
|
||||
}
|
||||
resp
|
||||
}
|
||||
};
|
||||
|
||||
match req.send().and_then(|res| res.error_for_status()) {
|
||||
Ok(r) => r.text().map_err(|e| {
|
||||
format!("`load_data`: Failed to parse response from {}: {:?}", url, e)
|
||||
}),
|
||||
Err(e) => {
|
||||
if !required {
|
||||
// HTTP error is discarded (because required=false) and
|
||||
// Null value is returned to the template
|
||||
return Ok(Value::Null);
|
||||
}
|
||||
Err(match e.status() {
|
||||
Some(status) => {
|
||||
format!("`load_data`: Failed to request {}: {}", url, status)
|
||||
}
|
||||
None => format!(
|
||||
"`load_data`: Could not get response status for url: {}",
|
||||
url
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}?;
|
||||
|
||||
|
@ -252,7 +343,7 @@ fn load_toml(toml_data: String) -> Result<Value> {
|
|||
|
||||
match toml_value {
|
||||
Value::Object(m) => Ok(fix_toml_dates(m)),
|
||||
_ => unreachable!("Loaded something other than a TOML object"),
|
||||
_ => Err("Loaded something other than a TOML object".into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -320,11 +411,11 @@ fn load_csv(csv_data: String) -> Result<Value> {
|
|||
let mut csv_map = Map::new();
|
||||
|
||||
{
|
||||
let hdrs = reader.headers().map_err(|e| {
|
||||
let headers = reader.headers().map_err(|e| {
|
||||
format!("'load_data': {} - unable to read CSV header line (line 1) for CSV file", e)
|
||||
})?;
|
||||
|
||||
let headers_array = hdrs.iter().map(|v| Value::String(v.to_string())).collect();
|
||||
let headers_array = headers.iter().map(|v| Value::String(v.to_string())).collect();
|
||||
|
||||
csv_map.insert(String::from("headers"), Value::Array(headers_array));
|
||||
}
|
||||
|
@ -368,8 +459,11 @@ mod tests {
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::global_fns::load_data::Method;
|
||||
use mockito::mock;
|
||||
use serde_json::json;
|
||||
use std::fs::{copy, create_dir_all};
|
||||
use tempfile::tempdir;
|
||||
use tera::{to_value, Function};
|
||||
|
||||
// NOTE: HTTP mock paths below are randomly generated to avoid name
|
||||
|
@ -382,9 +476,109 @@ mod tests {
|
|||
return test_files.join(filename);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_illegal_method_parameter() {
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value("https://example.com").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args.insert("method".to_string(), to_value("illegalmethod").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("`load_data` method must either be POST or GET."));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_load_remote_data_using_post_method() {
|
||||
let _mg = mock("GET", "/kr1zdgbm4y")
|
||||
.with_header("content-type", "text/plain")
|
||||
.with_body("GET response")
|
||||
.expect(0)
|
||||
.create();
|
||||
let _mp = mock("POST", "/kr1zdgbm4y")
|
||||
.with_header("content-type", "text/plain")
|
||||
.with_body("POST response")
|
||||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args.insert("method".to_string(), to_value("post").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), "POST response");
|
||||
_mg.assert();
|
||||
_mp.assert();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_load_remote_data_using_post_method_with_content_type_header() {
|
||||
let _mjson = mock("POST", "/kr1zdgbm4yw")
|
||||
.match_header("content-type", "application/json")
|
||||
.with_header("content-type", "application/json")
|
||||
.with_body("{i_am:'json'}")
|
||||
.expect(0)
|
||||
.create();
|
||||
let _mtext = mock("POST", "/kr1zdgbm4yw")
|
||||
.match_header("content-type", "text/plain")
|
||||
.with_header("content-type", "text/plain")
|
||||
.with_body("POST response text")
|
||||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4yw");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args.insert("method".to_string(), to_value("post").unwrap());
|
||||
args.insert("content_type".to_string(), to_value("text/plain").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), "POST response text");
|
||||
_mjson.assert();
|
||||
_mtext.assert();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_load_remote_data_using_post_method_with_body() {
|
||||
let _mjson = mock("POST", "/kr1zdgbm4y")
|
||||
.match_body("qwerty")
|
||||
.with_header("content-type", "application/json")
|
||||
.with_body("{i_am:'json'}")
|
||||
.expect(0)
|
||||
.create();
|
||||
let _mtext = mock("POST", "/kr1zdgbm4y")
|
||||
.match_body("this is a match")
|
||||
.with_header("content-type", "text/plain")
|
||||
.with_body("POST response text")
|
||||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args.insert("method".to_string(), to_value("post").unwrap());
|
||||
args.insert("content_type".to_string(), to_value("text/plain").unwrap());
|
||||
args.insert("body".to_string(), to_value("this is a match").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), "POST response text");
|
||||
_mjson.assert();
|
||||
_mtext.assert();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_when_missing_file() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
|
@ -393,56 +587,112 @@ mod tests {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn cant_load_outside_content_dir() {
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")));
|
||||
fn doesnt_fail_when_missing_file_is_not_required() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
|
||||
args.insert("required".to_string(), to_value(false).unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), tera::Value::Null);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_handle_various_local_file_locations() {
|
||||
let dir = tempdir().unwrap();
|
||||
create_dir_all(dir.path().join("content").join("gallery")).unwrap();
|
||||
create_dir_all(dir.path().join("static")).unwrap();
|
||||
copy(get_test_file("test.css"), dir.path().join("content").join("test.css")).unwrap();
|
||||
copy(get_test_file("test.css"), dir.path().join("content").join("gallery").join("new.css"))
|
||||
.unwrap();
|
||||
copy(get_test_file("test.css"), dir.path().join("static").join("test.css")).unwrap();
|
||||
|
||||
let static_fn = LoadData::new(dir.path().to_path_buf(), None);
|
||||
let mut args = HashMap::new();
|
||||
let val = if cfg!(windows) { ".hello {}\r\n" } else { ".hello {}\n" };
|
||||
|
||||
// 1. relative path in `static`
|
||||
args.insert("path".to_string(), to_value("static/test.css").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_str().unwrap().to_string();
|
||||
assert_eq!(data, val);
|
||||
|
||||
// 2. relative path in `content`
|
||||
args.insert("path".to_string(), to_value("content/test.css").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_str().unwrap().to_string();
|
||||
assert_eq!(data, val);
|
||||
|
||||
// 3. absolute path is the same
|
||||
args.insert("path".to_string(), to_value("/content/test.css").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_str().unwrap().to_string();
|
||||
assert_eq!(data, val);
|
||||
|
||||
// 4. path starting with @/
|
||||
args.insert("path".to_string(), to_value("@/test.css").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_str().unwrap().to_string();
|
||||
assert_eq!(data, val);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cannot_load_outside_base_dir() {
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("../../README.md").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("README.md is not inside the base site directory"));
|
||||
println!("{:?} {:?}", std::env::current_dir(), result);
|
||||
assert!(result.unwrap_err().to_string().contains("is not inside the base site directory"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn calculates_cache_key_for_path() {
|
||||
// We can't test against a fixed value, due to the fact the cache key is built from the absolute path
|
||||
let cache_key =
|
||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||
let cache_key_2 =
|
||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||
let cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(
|
||||
&OutputFormat::Toml,
|
||||
Method::Get,
|
||||
&None,
|
||||
&None,
|
||||
);
|
||||
let cache_key_2 = DataSource::Path(get_test_file("test.toml")).get_cache_key(
|
||||
&OutputFormat::Toml,
|
||||
Method::Get,
|
||||
&None,
|
||||
&None,
|
||||
);
|
||||
assert_eq!(cache_key, cache_key_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn calculates_cache_key_for_url() {
|
||||
let _m = mock("GET", "/kr1zdgbm4y")
|
||||
.with_header("content-type", "text/plain")
|
||||
.with_body("Test")
|
||||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y");
|
||||
let cache_key = DataSource::Url(url.parse().unwrap()).get_cache_key(&OutputFormat::Plain);
|
||||
assert_eq!(cache_key, 425638486551656875);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn different_cache_key_per_filename() {
|
||||
let toml_cache_key =
|
||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||
let json_cache_key =
|
||||
DataSource::Path(get_test_file("test.json")).get_cache_key(&OutputFormat::Toml);
|
||||
let toml_cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(
|
||||
&OutputFormat::Toml,
|
||||
Method::Get,
|
||||
&None,
|
||||
&None,
|
||||
);
|
||||
let json_cache_key = DataSource::Path(get_test_file("test.json")).get_cache_key(
|
||||
&OutputFormat::Toml,
|
||||
Method::Get,
|
||||
&None,
|
||||
&None,
|
||||
);
|
||||
assert_ne!(toml_cache_key, json_cache_key);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn different_cache_key_per_format() {
|
||||
let toml_cache_key =
|
||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Toml);
|
||||
let json_cache_key =
|
||||
DataSource::Path(get_test_file("test.toml")).get_cache_key(&OutputFormat::Json);
|
||||
let toml_cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(
|
||||
&OutputFormat::Toml,
|
||||
Method::Get,
|
||||
&None,
|
||||
&None,
|
||||
);
|
||||
let json_cache_key = DataSource::Path(get_test_file("test.toml")).get_cache_key(
|
||||
&OutputFormat::Json,
|
||||
Method::Get,
|
||||
&None,
|
||||
&None,
|
||||
);
|
||||
assert_ne!(toml_cache_key, json_cache_key);
|
||||
}
|
||||
|
||||
|
@ -461,7 +711,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/zpydpkjj67");
|
||||
let static_fn = LoadData::new(PathBuf::new());
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -478,7 +728,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/aazeow0kog");
|
||||
let static_fn = LoadData::new(PathBuf::new());
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -486,10 +736,29 @@ mod tests {
|
|||
assert!(result.is_err());
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
format!("Failed to request {}: 404 Not Found", url)
|
||||
format!("`load_data`: Failed to request {}: 404 Not Found", url)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doesnt_fail_when_request_404s_is_not_required() {
|
||||
let _m = mock("GET", "/aazeow0kog")
|
||||
.with_status(404)
|
||||
.with_header("content-type", "text/plain")
|
||||
.with_body("Not Found")
|
||||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/aazeow0kog");
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
args.insert("required".to_string(), to_value(false).unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
assert_eq!(result.unwrap(), tera::Value::Null);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn set_default_user_agent() {
|
||||
let user_agent = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
|
||||
|
@ -507,7 +776,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/chu8aizahBiy");
|
||||
let static_fn = LoadData::new(PathBuf::new());
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -517,7 +786,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_load_toml() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.toml").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -537,13 +806,14 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn unknown_extension_defaults_to_plain() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.css").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
println!("{:?}", result);
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(result, ".hello {}\r\n",);
|
||||
assert_eq!(result.as_str().unwrap().replace("\r\n", "\n"), ".hello {}\n",);
|
||||
} else {
|
||||
assert_eq!(result, ".hello {}\n",);
|
||||
};
|
||||
|
@ -551,14 +821,17 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_override_known_extension_with_format() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.csv").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(result, "Number,Title\r\n1,Gutenberg\r\n2,Printing",);
|
||||
assert_eq!(
|
||||
result.as_str().unwrap().replace("\r\n", "\n"),
|
||||
"Number,Title\n1,Gutenberg\n2,Printing",
|
||||
);
|
||||
} else {
|
||||
assert_eq!(result, "Number,Title\n1,Gutenberg\n2,Printing",);
|
||||
};
|
||||
|
@ -566,14 +839,14 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn will_use_format_on_unknown_extension() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.css").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
assert_eq!(result, ".hello {}\r\n",);
|
||||
assert_eq!(result.as_str().unwrap().replace("\r\n", "\n"), ".hello {}\n",);
|
||||
} else {
|
||||
assert_eq!(result, ".hello {}\n",);
|
||||
};
|
||||
|
@ -581,7 +854,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_load_csv() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.csv").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -601,7 +874,7 @@ mod tests {
|
|||
// Test points to bad csv file with uneven row lengths
|
||||
#[test]
|
||||
fn bad_csv_should_result_in_error() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("uneven_rows.csv").unwrap());
|
||||
let result = static_fn.call(&args.clone());
|
||||
|
@ -619,9 +892,30 @@ mod tests {
|
|||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bad_csv_should_result_in_error_even_when_not_required() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("uneven_rows.csv").unwrap());
|
||||
args.insert("required".to_string(), to_value(false).unwrap());
|
||||
let result = static_fn.call(&args.clone());
|
||||
|
||||
assert!(result.is_err());
|
||||
|
||||
let error_kind = result.err().unwrap().kind;
|
||||
match error_kind {
|
||||
tera::ErrorKind::Msg(msg) => {
|
||||
if msg != String::from("Error encountered when parsing csv records") {
|
||||
panic!("Error message is wrong. Perhaps wrong error is being returned?");
|
||||
}
|
||||
}
|
||||
_ => panic!("Error encountered was not expected CSV error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_load_json() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.json").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -637,4 +931,67 @@ mod tests {
|
|||
})
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_load_remote_data_using_post_method_with_different_body_not_cached() {
|
||||
let _mjson = mock("POST", "/kr1zdgbm4y3")
|
||||
.with_header("content-type", "application/json")
|
||||
.with_body("{i_am:'json'}")
|
||||
.expect(2)
|
||||
.create();
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y3");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args.insert("method".to_string(), to_value("post").unwrap());
|
||||
args.insert("content_type".to_string(), to_value("text/plain").unwrap());
|
||||
args.insert("body".to_string(), to_value("this is a match").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let mut args2 = HashMap::new();
|
||||
args2.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args2.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args2.insert("method".to_string(), to_value("post").unwrap());
|
||||
args2.insert("content_type".to_string(), to_value("text/plain").unwrap());
|
||||
args2.insert("body".to_string(), to_value("this is a match2").unwrap());
|
||||
let result2 = static_fn.call(&args2);
|
||||
assert!(result2.is_ok());
|
||||
|
||||
_mjson.assert();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_load_remote_data_using_post_method_with_same_body_cached() {
|
||||
let _mjson = mock("POST", "/kr1zdgbm4y2")
|
||||
.match_body("this is a match")
|
||||
.with_header("content-type", "application/json")
|
||||
.with_body("{i_am:'json'}")
|
||||
.expect(1)
|
||||
.create();
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y2");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args.insert("method".to_string(), to_value("post").unwrap());
|
||||
args.insert("content_type".to_string(), to_value("text/plain").unwrap());
|
||||
args.insert("body".to_string(), to_value("this is a match").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
assert!(result.is_ok());
|
||||
|
||||
let mut args2 = HashMap::new();
|
||||
args2.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args2.insert("format".to_string(), to_value("plain").unwrap());
|
||||
args2.insert("method".to_string(), to_value("post").unwrap());
|
||||
args2.insert("content_type".to_string(), to_value("text/plain").unwrap());
|
||||
args2.insert("body".to_string(), to_value("this is a match").unwrap());
|
||||
let result2 = static_fn.call(&args2);
|
||||
assert!(result2.is_ok());
|
||||
|
||||
_mjson.assert();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,852 +1,15 @@
|
|||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::{fs, io, result};
|
||||
|
||||
use sha2::{Digest, Sha256, Sha384, Sha512};
|
||||
use svg_metadata as svg;
|
||||
use tera::{from_value, to_value, Error, Function as TeraFn, Result, Value};
|
||||
|
||||
use config::Config;
|
||||
use image::GenericImageView;
|
||||
use library::{Library, Taxonomy};
|
||||
use utils::site::resolve_internal_link;
|
||||
use utils::slugs::{slugify_paths, SlugifyStrategy};
|
||||
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
mod content;
|
||||
mod files;
|
||||
mod helpers;
|
||||
mod i18n;
|
||||
mod images;
|
||||
mod load_data;
|
||||
|
||||
pub use self::content::{GetPage, GetSection, GetTaxonomy, GetTaxonomyUrl};
|
||||
pub use self::files::{GetFileHash, GetUrl};
|
||||
pub use self::i18n::Trans;
|
||||
pub use self::images::{GetImageMetadata, ResizeImage};
|
||||
pub use self::load_data::LoadData;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Trans {
|
||||
config: Config,
|
||||
}
|
||||
impl Trans {
|
||||
pub fn new(config: Config) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
}
|
||||
impl TeraFn for Trans {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let key = required_arg!(String, args.get("key"), "`trans` requires a `key` argument.");
|
||||
let lang = optional_arg!(String, args.get("lang"), "`trans`: `lang` must be a string.")
|
||||
.unwrap_or_else(|| self.config.default_language.clone());
|
||||
|
||||
let term = self
|
||||
.config
|
||||
.get_translation(lang, key)
|
||||
.map_err(|e| Error::chain("Failed to retrieve term translation", e))?;
|
||||
|
||||
Ok(to_value(term).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetUrl {
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
search_paths: Vec<PathBuf>,
|
||||
}
|
||||
impl GetUrl {
|
||||
pub fn new(
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
search_paths: Vec<PathBuf>,
|
||||
) -> Self {
|
||||
Self { config, permalinks, search_paths }
|
||||
}
|
||||
}
|
||||
|
||||
fn make_path_with_lang(path: String, lang: &str, config: &Config) -> Result<String> {
|
||||
if lang == config.default_language {
|
||||
return Ok(path);
|
||||
}
|
||||
|
||||
if !config.languages.iter().any(|x| x.code == lang) {
|
||||
return Err(
|
||||
format!("`{}` is not an authorized language (check config.languages).", lang).into()
|
||||
);
|
||||
}
|
||||
|
||||
let mut splitted_path: Vec<String> = path.split('.').map(String::from).collect();
|
||||
let ilast = splitted_path.len() - 1;
|
||||
splitted_path[ilast] = format!("{}.{}", lang, splitted_path[ilast]);
|
||||
Ok(splitted_path.join("."))
|
||||
}
|
||||
|
||||
fn open_file(search_paths: &[PathBuf], url: &str) -> result::Result<fs::File, io::Error> {
|
||||
let cleaned_url = url.trim_start_matches("@/").trim_start_matches('/');
|
||||
for base_path in search_paths {
|
||||
match fs::File::open(base_path.join(cleaned_url)) {
|
||||
Ok(f) => return Ok(f),
|
||||
Err(_) => continue,
|
||||
};
|
||||
}
|
||||
Err(io::Error::from(io::ErrorKind::NotFound))
|
||||
}
|
||||
|
||||
fn compute_file_sha256(mut file: fs::File) -> result::Result<String, io::Error> {
|
||||
let mut hasher = Sha256::new();
|
||||
io::copy(&mut file, &mut hasher)?;
|
||||
Ok(format!("{:x}", hasher.finalize()))
|
||||
}
|
||||
|
||||
fn compute_file_sha384(mut file: fs::File) -> result::Result<String, io::Error> {
|
||||
let mut hasher = Sha384::new();
|
||||
io::copy(&mut file, &mut hasher)?;
|
||||
Ok(format!("{:x}", hasher.finalize()))
|
||||
}
|
||||
|
||||
fn compute_file_sha512(mut file: fs::File) -> result::Result<String, io::Error> {
|
||||
let mut hasher = Sha512::new();
|
||||
io::copy(&mut file, &mut hasher)?;
|
||||
Ok(format!("{:x}", hasher.finalize()))
|
||||
}
|
||||
|
||||
fn file_not_found_err(search_paths: &[PathBuf], url: &str) -> Result<Value> {
|
||||
Err(format!(
|
||||
"file `{}` not found; searched in{}",
|
||||
url,
|
||||
search_paths.iter().fold(String::new(), |acc, arg| acc + " " + arg.to_str().unwrap())
|
||||
)
|
||||
.into())
|
||||
}
|
||||
|
||||
impl TeraFn for GetUrl {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let cachebust =
|
||||
args.get("cachebust").map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||
|
||||
let trailing_slash = args
|
||||
.get("trailing_slash")
|
||||
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_url` requires a `path` argument with a string value"
|
||||
);
|
||||
|
||||
let lang = optional_arg!(String, args.get("lang"), "`get_url`: `lang` must be a string.")
|
||||
.unwrap_or_else(|| self.config.default_language.clone());
|
||||
|
||||
if path.starts_with("@/") {
|
||||
let path_with_lang = match make_path_with_lang(path, &lang, &self.config) {
|
||||
Ok(x) => x,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
match resolve_internal_link(&path_with_lang, &self.permalinks) {
|
||||
Ok(resolved) => Ok(to_value(resolved.permalink).unwrap()),
|
||||
Err(_) => {
|
||||
Err(format!("Could not resolve URL for link `{}` not found.", path_with_lang)
|
||||
.into())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// anything else
|
||||
let mut permalink = self.config.make_permalink(&path);
|
||||
if !trailing_slash && permalink.ends_with('/') {
|
||||
permalink.pop(); // Removes the slash
|
||||
}
|
||||
|
||||
if cachebust {
|
||||
match open_file(&self.search_paths, &path).and_then(compute_file_sha256) {
|
||||
Ok(hash) => {
|
||||
permalink = format!("{}?h={}", permalink, hash);
|
||||
}
|
||||
Err(_) => return file_not_found_err(&self.search_paths, &path),
|
||||
};
|
||||
}
|
||||
Ok(to_value(permalink).unwrap())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetFileHash {
|
||||
search_paths: Vec<PathBuf>,
|
||||
}
|
||||
impl GetFileHash {
|
||||
pub fn new(search_paths: Vec<PathBuf>) -> Self {
|
||||
Self { search_paths }
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_SHA_TYPE: u16 = 384;
|
||||
|
||||
impl TeraFn for GetFileHash {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_file_hash` requires a `path` argument with a string value"
|
||||
);
|
||||
let sha_type = optional_arg!(
|
||||
u16,
|
||||
args.get("sha_type"),
|
||||
"`get_file_hash`: `sha_type` must be 256, 384 or 512"
|
||||
)
|
||||
.unwrap_or(DEFAULT_SHA_TYPE);
|
||||
|
||||
let compute_hash_fn = match sha_type {
|
||||
256 => compute_file_sha256,
|
||||
384 => compute_file_sha384,
|
||||
512 => compute_file_sha512,
|
||||
_ => return Err("`get_file_hash`: `sha_type` must be 256, 384 or 512".into()),
|
||||
};
|
||||
|
||||
let hash = open_file(&self.search_paths, &path).and_then(compute_hash_fn);
|
||||
|
||||
match hash {
|
||||
Ok(digest) => Ok(to_value(digest).unwrap()),
|
||||
Err(_) => file_not_found_err(&self.search_paths, &path),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResizeImage {
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
}
|
||||
impl ResizeImage {
|
||||
pub fn new(imageproc: Arc<Mutex<imageproc::Processor>>) -> Self {
|
||||
Self { imageproc }
|
||||
}
|
||||
}
|
||||
|
||||
static DEFAULT_OP: &str = "fill";
|
||||
static DEFAULT_FMT: &str = "auto";
|
||||
const DEFAULT_Q: u8 = 75;
|
||||
|
||||
impl TeraFn for ResizeImage {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`resize_image` requires a `path` argument with a string value"
|
||||
);
|
||||
let width = optional_arg!(
|
||||
u32,
|
||||
args.get("width"),
|
||||
"`resize_image`: `width` must be a non-negative integer"
|
||||
);
|
||||
let height = optional_arg!(
|
||||
u32,
|
||||
args.get("height"),
|
||||
"`resize_image`: `height` must be a non-negative integer"
|
||||
);
|
||||
let op = optional_arg!(String, args.get("op"), "`resize_image`: `op` must be a string")
|
||||
.unwrap_or_else(|| DEFAULT_OP.to_string());
|
||||
|
||||
let format =
|
||||
optional_arg!(String, args.get("format"), "`resize_image`: `format` must be a string")
|
||||
.unwrap_or_else(|| DEFAULT_FMT.to_string());
|
||||
|
||||
let quality =
|
||||
optional_arg!(u8, args.get("quality"), "`resize_image`: `quality` must be a number")
|
||||
.unwrap_or(DEFAULT_Q);
|
||||
if quality == 0 || quality > 100 {
|
||||
return Err("`resize_image`: `quality` must be in range 1-100".to_string().into());
|
||||
}
|
||||
|
||||
let mut imageproc = self.imageproc.lock().unwrap();
|
||||
if !imageproc.source_exists(&path) {
|
||||
return Err(format!("`resize_image`: Cannot find path: {}", path).into());
|
||||
}
|
||||
|
||||
let imageop = imageproc::ImageOp::from_args(path, &op, width, height, &format, quality)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
let url = imageproc.insert(imageop);
|
||||
|
||||
to_value(url).map_err(|err| err.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetImageMeta {
|
||||
content_path: PathBuf,
|
||||
}
|
||||
|
||||
impl GetImageMeta {
|
||||
pub fn new(content_path: PathBuf) -> Self {
|
||||
Self { content_path }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFn for GetImageMeta {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_image_metadata` requires a `path` argument with a string value"
|
||||
);
|
||||
let src_path = self.content_path.join(&path);
|
||||
if !src_path.exists() {
|
||||
return Err(format!("`get_image_metadata`: Cannot find path: {}", path).into());
|
||||
}
|
||||
let (height, width) = image_dimensions(&src_path)?;
|
||||
let mut map = tera::Map::new();
|
||||
map.insert(String::from("height"), Value::Number(tera::Number::from(height)));
|
||||
map.insert(String::from("width"), Value::Number(tera::Number::from(width)));
|
||||
Ok(Value::Object(map))
|
||||
}
|
||||
}
|
||||
|
||||
// Try to read the image dimensions for a given image
|
||||
fn image_dimensions(path: &PathBuf) -> Result<(u32, u32)> {
|
||||
if let Some("svg") = path.extension().and_then(OsStr::to_str) {
|
||||
let img = svg::Metadata::parse_file(&path)
|
||||
.map_err(|e| Error::chain(format!("Failed to process SVG: {}", path.display()), e))?;
|
||||
match (img.height(), img.width(), img.view_box()) {
|
||||
(Some(h), Some(w), _) => Ok((h as u32, w as u32)),
|
||||
(_, _, Some(view_box)) => Ok((view_box.height as u32, view_box.width as u32)),
|
||||
_ => Err("Invalid dimensions: SVG width/height and viewbox not set.".into()),
|
||||
}
|
||||
} else {
|
||||
let img = image::open(&path)
|
||||
.map_err(|e| Error::chain(format!("Failed to process image: {}", path.display()), e))?;
|
||||
Ok((img.height(), img.width()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetTaxonomyUrl {
|
||||
taxonomies: HashMap<String, HashMap<String, String>>,
|
||||
default_lang: String,
|
||||
slugify: SlugifyStrategy,
|
||||
}
|
||||
|
||||
impl GetTaxonomyUrl {
|
||||
pub fn new(default_lang: &str, all_taxonomies: &[Taxonomy], slugify: SlugifyStrategy) -> Self {
|
||||
let mut taxonomies = HashMap::new();
|
||||
for taxo in all_taxonomies {
|
||||
let mut items = HashMap::new();
|
||||
for item in &taxo.items {
|
||||
items.insert(slugify_paths(&item.name.clone(), slugify), item.permalink.clone());
|
||||
}
|
||||
taxonomies.insert(format!("{}-{}", taxo.kind.name, taxo.kind.lang), items);
|
||||
}
|
||||
Self { taxonomies, default_lang: default_lang.to_string(), slugify }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetTaxonomyUrl {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let kind = required_arg!(
|
||||
String,
|
||||
args.get("kind"),
|
||||
"`get_taxonomy_url` requires a `kind` argument with a string value"
|
||||
);
|
||||
let name = required_arg!(
|
||||
String,
|
||||
args.get("name"),
|
||||
"`get_taxonomy_url` requires a `name` argument with a string value"
|
||||
);
|
||||
let lang =
|
||||
optional_arg!(String, args.get("lang"), "`get_taxonomy`: `lang` must be a string")
|
||||
.unwrap_or_else(|| self.default_lang.clone());
|
||||
|
||||
let container = match self.taxonomies.get(&format!("{}-{}", kind, lang)) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
return Err(format!(
|
||||
"`get_taxonomy_url` received an unknown taxonomy as kind: {}",
|
||||
kind
|
||||
)
|
||||
.into());
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(permalink) = container.get(&slugify_paths(&name, self.slugify)) {
|
||||
return Ok(to_value(permalink).unwrap());
|
||||
}
|
||||
|
||||
Err(format!("`get_taxonomy_url`: couldn't find `{}` in `{}` taxonomy", name, kind).into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetPage {
|
||||
base_path: PathBuf,
|
||||
library: Arc<RwLock<Library>>,
|
||||
}
|
||||
impl GetPage {
|
||||
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
|
||||
Self { base_path: base_path.join("content"), library }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetPage {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_page` requires a `path` argument with a string value"
|
||||
);
|
||||
let full_path = self.base_path.join(&path);
|
||||
let library = self.library.read().unwrap();
|
||||
match library.get_page(&full_path) {
|
||||
Some(p) => Ok(to_value(p.to_serialized(&library)).unwrap()),
|
||||
None => Err(format!("Page `{}` not found.", path).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetSection {
|
||||
base_path: PathBuf,
|
||||
library: Arc<RwLock<Library>>,
|
||||
}
|
||||
impl GetSection {
|
||||
pub fn new(base_path: PathBuf, library: Arc<RwLock<Library>>) -> Self {
|
||||
Self { base_path: base_path.join("content"), library }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetSection {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_section` requires a `path` argument with a string value"
|
||||
);
|
||||
|
||||
let metadata_only = args
|
||||
.get("metadata_only")
|
||||
.map_or(false, |c| from_value::<bool>(c.clone()).unwrap_or(false));
|
||||
|
||||
let full_path = self.base_path.join(&path);
|
||||
let library = self.library.read().unwrap();
|
||||
|
||||
match library.get_section(&full_path) {
|
||||
Some(s) => {
|
||||
if metadata_only {
|
||||
Ok(to_value(s.to_serialized_basic(&library)).unwrap())
|
||||
} else {
|
||||
Ok(to_value(s.to_serialized(&library)).unwrap())
|
||||
}
|
||||
}
|
||||
None => Err(format!("Section `{}` not found.", path).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetTaxonomy {
|
||||
library: Arc<RwLock<Library>>,
|
||||
taxonomies: HashMap<String, Taxonomy>,
|
||||
default_lang: String,
|
||||
}
|
||||
impl GetTaxonomy {
|
||||
pub fn new(
|
||||
default_lang: &str,
|
||||
all_taxonomies: Vec<Taxonomy>,
|
||||
library: Arc<RwLock<Library>>,
|
||||
) -> Self {
|
||||
let mut taxonomies = HashMap::new();
|
||||
for taxo in all_taxonomies {
|
||||
taxonomies.insert(format!("{}-{}", taxo.kind.name, taxo.kind.lang), taxo);
|
||||
}
|
||||
Self { taxonomies, library, default_lang: default_lang.to_string() }
|
||||
}
|
||||
}
|
||||
impl TeraFn for GetTaxonomy {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let kind = required_arg!(
|
||||
String,
|
||||
args.get("kind"),
|
||||
"`get_taxonomy` requires a `kind` argument with a string value"
|
||||
);
|
||||
|
||||
let lang =
|
||||
optional_arg!(String, args.get("lang"), "`get_taxonomy`: `lang` must be a string")
|
||||
.unwrap_or_else(|| self.default_lang.clone());
|
||||
|
||||
match self.taxonomies.get(&format!("{}-{}", kind, lang)) {
|
||||
Some(t) => Ok(to_value(t.to_serialized(&self.library.read().unwrap())).unwrap()),
|
||||
None => {
|
||||
Err(format!("`get_taxonomy` received an unknown taxonomy as kind: {}", kind).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{GetFileHash, GetTaxonomy, GetTaxonomyUrl, GetUrl, Trans};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::env::temp_dir;
|
||||
use std::fs::remove_dir_all;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use tera::{to_value, Function, Value};
|
||||
|
||||
use config::{Config, Taxonomy as TaxonomyConfig};
|
||||
use library::{Library, Taxonomy, TaxonomyItem};
|
||||
use utils::fs::{create_directory, create_file};
|
||||
use utils::slugs::SlugifyStrategy;
|
||||
|
||||
struct TestContext {
|
||||
static_path: PathBuf,
|
||||
}
|
||||
impl TestContext {
|
||||
fn setup() -> Self {
|
||||
let dir = temp_dir().join("static");
|
||||
create_directory(&dir).expect("Could not create test directory");
|
||||
create_file(&dir.join("app.css"), "// Hello world!")
|
||||
.expect("Could not create test content (app.css)");
|
||||
Self { static_path: dir }
|
||||
}
|
||||
}
|
||||
impl Drop for TestContext {
|
||||
fn drop(&mut self) {
|
||||
remove_dir_all(&self.static_path).expect("Could not free test directory");
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref TEST_CONTEXT: TestContext = TestContext::setup();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_cachebust_to_url() {
|
||||
let config = Config::default();
|
||||
let static_fn = GetUrl::new(config, HashMap::new(), vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("cachebust".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css?h=572e691dc68c3fcd653ae463261bdb38f35dc6f01715d9ce68799319dd158840");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_trailing_slashes() {
|
||||
let config = Config::default();
|
||||
let static_fn = GetUrl::new(config, HashMap::new(), vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_add_slashes_and_cachebust() {
|
||||
let config = Config::default();
|
||||
let static_fn = GetUrl::new(config, HashMap::new(), vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap());
|
||||
args.insert("cachebust".to_string(), to_value(true).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css/?h=572e691dc68c3fcd653ae463261bdb38f35dc6f01715d9ce68799319dd158840");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_link_to_some_static_file() {
|
||||
let config = Config::default();
|
||||
let static_fn = GetUrl::new(config, HashMap::new(), vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_taxonomy() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::On;
|
||||
let taxo_config = TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
};
|
||||
let taxo_config_fr = TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
};
|
||||
let library = Arc::new(RwLock::new(Library::new(0, 0, false)));
|
||||
let tag = TaxonomyItem::new(
|
||||
"Programming",
|
||||
&taxo_config,
|
||||
"tags",
|
||||
&config,
|
||||
vec![],
|
||||
&library.read().unwrap(),
|
||||
);
|
||||
let tag_fr = TaxonomyItem::new(
|
||||
"Programmation",
|
||||
&taxo_config_fr,
|
||||
"tags",
|
||||
&config,
|
||||
vec![],
|
||||
&library.read().unwrap(),
|
||||
);
|
||||
let tags = Taxonomy { kind: taxo_config, slug: "tags".to_string(), items: vec![tag] };
|
||||
let tags_fr =
|
||||
Taxonomy { kind: taxo_config_fr, slug: "tags".to_string(), items: vec![tag_fr] };
|
||||
|
||||
let taxonomies = vec![tags.clone(), tags_fr.clone()];
|
||||
let static_fn =
|
||||
GetTaxonomy::new(&config.default_language, taxonomies.clone(), library.clone());
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
let res = static_fn.call(&args).unwrap();
|
||||
let res_obj = res.as_object().unwrap();
|
||||
assert_eq!(res_obj["kind"], to_value(tags.kind).unwrap());
|
||||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"],
|
||||
Value::String("Programming".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["slug"],
|
||||
Value::String("programming".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()
|
||||
["permalink"],
|
||||
Value::String("http://a-website.com/tags/programming/".to_string())
|
||||
);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["pages"],
|
||||
Value::Array(vec![])
|
||||
);
|
||||
// Works with other languages as well
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
let res = static_fn.call(&args).unwrap();
|
||||
let res_obj = res.as_object().unwrap();
|
||||
assert_eq!(res_obj["kind"], to_value(tags_fr.kind).unwrap());
|
||||
assert_eq!(res_obj["items"].clone().as_array().unwrap().len(), 1);
|
||||
assert_eq!(
|
||||
res_obj["items"].clone().as_array().unwrap()[0].clone().as_object().unwrap()["name"],
|
||||
Value::String("Programmation".to_string())
|
||||
);
|
||||
|
||||
// and errors if it can't find it
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("something-else").unwrap());
|
||||
assert!(static_fn.call(&args).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_taxonomy_url() {
|
||||
let mut config = Config::default();
|
||||
config.slugify.taxonomies = SlugifyStrategy::On;
|
||||
let taxo_config = TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: config.default_language.clone(),
|
||||
..TaxonomyConfig::default()
|
||||
};
|
||||
let taxo_config_fr = TaxonomyConfig {
|
||||
name: "tags".to_string(),
|
||||
lang: "fr".to_string(),
|
||||
..TaxonomyConfig::default()
|
||||
};
|
||||
let library = Library::new(0, 0, false);
|
||||
let tag = TaxonomyItem::new("Programming", &taxo_config, "tags", &config, vec![], &library);
|
||||
let tag_fr =
|
||||
TaxonomyItem::new("Programmation", &taxo_config_fr, "tags", &config, vec![], &library);
|
||||
let tags = Taxonomy { kind: taxo_config, slug: "tags".to_string(), items: vec![tag] };
|
||||
let tags_fr =
|
||||
Taxonomy { kind: taxo_config_fr, slug: "tags".to_string(), items: vec![tag_fr] };
|
||||
|
||||
let taxonomies = vec![tags.clone(), tags_fr.clone()];
|
||||
let static_fn =
|
||||
GetTaxonomyUrl::new(&config.default_language, &taxonomies, config.slugify.taxonomies);
|
||||
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("Programming").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
to_value("http://a-website.com/tags/programming/").unwrap()
|
||||
);
|
||||
|
||||
// can find it correctly with inconsistent capitalisation
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("programming").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
to_value("http://a-website.com/tags/programming/").unwrap()
|
||||
);
|
||||
|
||||
// works with other languages
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("Programmation").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
to_value("http://a-website.com/fr/tags/programmation/").unwrap()
|
||||
);
|
||||
|
||||
// and errors if it can't find it
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
args.insert("name".to_string(), to_value("random").unwrap());
|
||||
assert!(static_fn.call(&args).is_err());
|
||||
}
|
||||
|
||||
const TRANS_CONFIG: &str = r#"
|
||||
base_url = "https://remplace-par-ton-url.fr"
|
||||
default_language = "fr"
|
||||
languages = [
|
||||
{ code = "en" },
|
||||
]
|
||||
|
||||
[translations]
|
||||
[translations.fr]
|
||||
title = "Un titre"
|
||||
|
||||
[translations.en]
|
||||
title = "A title"
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
fn can_translate_a_string() {
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let static_fn = Trans::new(config);
|
||||
let mut args = HashMap::new();
|
||||
|
||||
args.insert("key".to_string(), to_value("title").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "Un titre");
|
||||
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "A title");
|
||||
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "Un titre");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_on_absent_translation_lang() {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("lang".to_string(), to_value("absent").unwrap());
|
||||
args.insert("key".to_string(), to_value("title").unwrap());
|
||||
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let error = Trans::new(config).call(&args).unwrap_err();
|
||||
assert_eq!("Failed to retrieve term translation", format!("{}", error));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_on_absent_translation_key() {
|
||||
let mut args = HashMap::new();
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
args.insert("key".to_string(), to_value("absent").unwrap());
|
||||
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let error = Trans::new(config).call(&args).unwrap_err();
|
||||
assert_eq!("Failed to retrieve term translation", format!("{}", error));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_when_language_not_available() {
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let static_fn = GetUrl::new(config, HashMap::new(), vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("it").unwrap());
|
||||
let err = static_fn.call(&args).unwrap_err();
|
||||
assert_eq!(
|
||||
"`it` is not an authorized language (check config.languages).",
|
||||
format!("{}", err)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_url_with_default_language() {
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let mut permalinks = HashMap::new();
|
||||
permalinks.insert(
|
||||
"a_section/a_page.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/a_section/a_page/".to_string(),
|
||||
);
|
||||
permalinks.insert(
|
||||
"a_section/a_page.en.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/en/a_section/a_page/".to_string(),
|
||||
);
|
||||
let static_fn = GetUrl::new(config, permalinks, vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"https://remplace-par-ton-url.fr/a_section/a_page/"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_url_with_other_language() {
|
||||
let config = Config::parse(TRANS_CONFIG).unwrap();
|
||||
let mut permalinks = HashMap::new();
|
||||
permalinks.insert(
|
||||
"a_section/a_page.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/a_section/a_page/".to_string(),
|
||||
);
|
||||
permalinks.insert(
|
||||
"a_section/a_page.en.md".to_string(),
|
||||
"https://remplace-par-ton-url.fr/en/a_section/a_page/".to_string(),
|
||||
);
|
||||
let static_fn = GetUrl::new(config, permalinks, vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"https://remplace-par-ton-url.fr/en/a_section/a_page/"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha256() {
|
||||
let static_fn = GetFileHash::new(vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(256).unwrap());
|
||||
assert_eq!(
|
||||
static_fn.call(&args).unwrap(),
|
||||
"572e691dc68c3fcd653ae463261bdb38f35dc6f01715d9ce68799319dd158840"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha384() {
|
||||
let static_fn = GetFileHash::new(vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "141c09bd28899773b772bbe064d8b718fa1d6f2852b7eafd5ed6689d26b74883b79e2e814cd69d5b52ab476aa284c414");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_file_hash_sha512() {
|
||||
let static_fn = GetFileHash::new(vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(512).unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "379dfab35123b9159d9e4e92dc90e2be44cf3c2f7f09b2e2df80a1b219b461de3556c93e1a9ceb3008e999e2d6a54b4f1d65ee9be9be63fa45ec88931623372f");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_when_file_not_found_for_hash() {
|
||||
let static_fn = GetFileHash::new(vec![TEST_CONTEXT.static_path.clone()]);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("doesnt-exist").unwrap());
|
||||
assert_eq!(
|
||||
format!(
|
||||
"file `doesnt-exist` not found; searched in {}",
|
||||
TEST_CONTEXT.static_path.to_str().unwrap()
|
||||
),
|
||||
format!("{}", static_fn.call(&args).unwrap_err())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
pub mod filters;
|
||||
pub mod global_fns;
|
||||
|
||||
use std::path::Path;
|
||||
|
||||
use config::Config;
|
||||
use lazy_static::lazy_static;
|
||||
use tera::{Context, Tera};
|
||||
|
||||
use errors::{Error, Result};
|
||||
use errors::{bail, Error, Result};
|
||||
use utils::templates::rewrite_theme_paths;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref ZOLA_TERA: Tera = {
|
||||
|
@ -51,3 +55,43 @@ pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> {
|
|||
tera.render("internal/alias.html", &context)
|
||||
.map_err(|e| Error::chain(format!("Failed to render alias for '{}'", url), e))
|
||||
}
|
||||
|
||||
pub fn load_tera(path: &Path, config: &Config) -> Result<Tera> {
|
||||
let tpl_glob =
|
||||
format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.{*ml,md}");
|
||||
|
||||
// Only parsing as we might be extending templates from themes and that would error
|
||||
// as we haven't loaded them yet
|
||||
let mut tera =
|
||||
Tera::parse(&tpl_glob).map_err(|e| Error::chain("Error parsing templates", e))?;
|
||||
|
||||
if let Some(ref theme) = config.theme {
|
||||
// Test that the templates folder exist for that theme
|
||||
let theme_path = path.join("themes").join(&theme);
|
||||
if !theme_path.join("templates").exists() {
|
||||
bail!("Theme `{}` is missing a templates folder", theme);
|
||||
}
|
||||
|
||||
let theme_tpl_glob = format!(
|
||||
"{}/{}",
|
||||
path.to_string_lossy().replace("\\", "/"),
|
||||
format!("themes/{}/templates/**/*.{{*ml,md}}", theme)
|
||||
);
|
||||
let mut tera_theme = Tera::parse(&theme_tpl_glob)
|
||||
.map_err(|e| Error::chain("Error parsing templates from themes", e))?;
|
||||
rewrite_theme_paths(&mut tera_theme, &theme);
|
||||
|
||||
if theme_path.join("templates").join("robots.txt").exists() {
|
||||
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
|
||||
}
|
||||
tera.extend(&tera_theme)?;
|
||||
}
|
||||
tera.extend(&ZOLA_TERA)?;
|
||||
tera.build_inheritance_chains()?;
|
||||
|
||||
if path.join("templates").join("robots.txt").exists() {
|
||||
tera.add_template_file(path.join("templates").join("robots.txt"), Some("robots.txt"))?;
|
||||
}
|
||||
|
||||
Ok(tera)
|
||||
}
|
||||
|
|
|
@ -10,12 +10,11 @@ tera = "1"
|
|||
unicode-segmentation = "1.2"
|
||||
walkdir = "2"
|
||||
toml = "0.5"
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
slug = "0.1"
|
||||
percent-encoding = "2"
|
||||
filetime = "0.2.12"
|
||||
minify-html = "0.4"
|
||||
minify-html = "0.4.2"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use serde::{Deserialize, Deserializer};
|
||||
use serde_derive::Deserialize;
|
||||
use tera::{Map, Value};
|
||||
|
||||
/// Used as an attribute when we want to convert from TOML to a string date
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use filetime::{set_file_mtime, FileTime};
|
||||
use std::fs::{copy, create_dir_all, metadata, read_dir, File};
|
||||
use std::fs::{copy, create_dir_all, metadata, File};
|
||||
use std::io::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::SystemTime;
|
||||
|
@ -49,7 +49,7 @@ pub fn create_directory(path: &Path) -> Result<()> {
|
|||
pub fn read_file(path: &Path) -> Result<String> {
|
||||
let mut content = String::new();
|
||||
File::open(path)
|
||||
.map_err(|e| Error::chain(format!("Failed to open '{:?}'", path.display()), e))?
|
||||
.map_err(|e| Error::chain(format!("Failed to open '{}'", path.display()), e))?
|
||||
.read_to_string(&mut content)?;
|
||||
|
||||
// Remove utf-8 BOM if any.
|
||||
|
@ -60,40 +60,6 @@ pub fn read_file(path: &Path) -> Result<String> {
|
|||
Ok(content)
|
||||
}
|
||||
|
||||
/// Return the content of a file, with error handling added.
|
||||
/// The default error message is overwritten by the message given.
|
||||
/// That means it is allocating 2 strings, oh well
|
||||
pub fn read_file_with_error(path: &Path, message: &str) -> Result<String> {
|
||||
let res = read_file(&path);
|
||||
if res.is_ok() {
|
||||
return res;
|
||||
}
|
||||
let mut err = Error::msg(message);
|
||||
err.source = res.unwrap_err().source;
|
||||
Err(err)
|
||||
}
|
||||
|
||||
/// Looks into the current folder for the path and see if there's anything that is not a .md
|
||||
/// file. Those will be copied next to the rendered .html file
|
||||
pub fn find_related_assets(path: &Path) -> Vec<PathBuf> {
|
||||
let mut assets = vec![];
|
||||
|
||||
for entry in read_dir(path).unwrap().filter_map(std::result::Result::ok) {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.is_file() {
|
||||
match entry_path.extension() {
|
||||
Some(e) => match e.to_str() {
|
||||
Some("md") => continue,
|
||||
_ => assets.push(entry_path.to_path_buf()),
|
||||
},
|
||||
None => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assets
|
||||
}
|
||||
|
||||
/// Copy a file but takes into account where to start the copy as
|
||||
/// there might be folders we need to create on the way.
|
||||
pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: bool) -> Result<()> {
|
||||
|
@ -217,25 +183,9 @@ mod tests {
|
|||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use tempfile::{tempdir, tempdir_in};
|
||||
use tempfile::tempdir_in;
|
||||
|
||||
use super::{copy_file, find_related_assets};
|
||||
|
||||
#[test]
|
||||
fn can_find_related_assets() {
|
||||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
File::create(tmp_dir.path().join("index.md")).unwrap();
|
||||
File::create(tmp_dir.path().join("example.js")).unwrap();
|
||||
File::create(tmp_dir.path().join("graph.jpg")).unwrap();
|
||||
File::create(tmp_dir.path().join("fail.png")).unwrap();
|
||||
|
||||
let assets = find_related_assets(tmp_dir.path());
|
||||
assert_eq!(assets.len(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap() != "md").count(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "example.js").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||
}
|
||||
use super::copy_file;
|
||||
|
||||
#[test]
|
||||
fn test_copy_file_timestamp_preserved() {
|
||||
|
|
|
@ -6,8 +6,8 @@ pub fn html(html: String) -> Result<String> {
|
|||
let mut input_bytes = html.as_bytes().to_vec();
|
||||
|
||||
match with_friendly_error(&mut input_bytes, cfg) {
|
||||
Ok(len) => match std::str::from_utf8(&input_bytes) {
|
||||
Ok(result) => Ok(result[..len].to_string()),
|
||||
Ok(len) => match std::str::from_utf8(&input_bytes[..len]) {
|
||||
Ok(result) => Ok(result.to_string()),
|
||||
Err(err) => bail!("Failed to convert bytes to string : {}", err),
|
||||
},
|
||||
Err(minify_error) => {
|
||||
|
@ -47,4 +47,47 @@ mod tests {
|
|||
let res = html(input.to_owned()).unwrap();
|
||||
assert_eq!(res, expected);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1304
|
||||
#[test]
|
||||
fn can_minify_multibyte_characters() {
|
||||
let input = r#"
|
||||
俺が好きなのはキツネの…ケツねw
|
||||
ー丁寧なインタネット生活の人より
|
||||
"#;
|
||||
let expected = r#"俺が好きなのはキツネの…ケツねw ー丁寧なインタネット生活の人より"#;
|
||||
let res = html(input.to_owned()).unwrap();
|
||||
assert_eq!(res, expected);
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/1300
|
||||
#[test]
|
||||
fn can_minify_and_preserve_whitespace_in_pre_elements() {
|
||||
let input = r#"
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<pre><code>fn main() {
|
||||
println!("Hello, world!");
|
||||
<span>loop {
|
||||
println!("Hello, world!");
|
||||
}</span>
|
||||
}
|
||||
</code></pre>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
let expected = r#"<!doctype html><html><head><meta charset=utf-8><body><pre><code>fn main() {
|
||||
println!("Hello, world!");
|
||||
<span>loop {
|
||||
println!("Hello, world!");
|
||||
}</span>
|
||||
}
|
||||
</code></pre>"#;
|
||||
let res = html(input.to_owned()).unwrap();
|
||||
assert_eq!(res, expected);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::collections::HashMap;
|
|||
use std::hash::BuildHasher;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
use errors::{bail, Result};
|
||||
use errors::Result;
|
||||
|
||||
/// Get word count and estimated reading time
|
||||
pub fn get_reading_analytics(content: &str) -> (usize, usize) {
|
||||
|
@ -14,12 +14,15 @@ pub fn get_reading_analytics(content: &str) -> (usize, usize) {
|
|||
(word_count, ((word_count + 199) / 200))
|
||||
}
|
||||
|
||||
/// Result of a successful resolution of an internal link.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct ResolvedInternalLink {
|
||||
/// Resolved link target, as absolute URL address.
|
||||
pub permalink: String,
|
||||
// The 2 fields below are only set when there is an anchor
|
||||
// as we will need that to check if it exists after the markdown rendering is done
|
||||
pub md_path: Option<String>,
|
||||
/// Internal path to the .md file, without the leading `@/`.
|
||||
pub md_path: String,
|
||||
/// Optional anchor target.
|
||||
/// We can check whether it exists only after all the markdown rendering is done.
|
||||
pub anchor: Option<String>,
|
||||
}
|
||||
|
||||
|
@ -36,20 +39,17 @@ pub fn resolve_internal_link<S: BuildHasher>(
|
|||
let parts = clean_link.split('#').collect::<Vec<_>>();
|
||||
// If we have slugification turned off, we might end up with some escaped characters so we need
|
||||
// to decode them first
|
||||
let decoded = &*percent_decode(parts[0].as_bytes()).decode_utf8_lossy();
|
||||
match permalinks.get(decoded) {
|
||||
Some(p) => {
|
||||
if parts.len() > 1 {
|
||||
Ok(ResolvedInternalLink {
|
||||
permalink: format!("{}#{}", p, parts[1]),
|
||||
md_path: Some(decoded.to_string()),
|
||||
anchor: Some(parts[1].to_string()),
|
||||
})
|
||||
} else {
|
||||
Ok(ResolvedInternalLink { permalink: p.to_string(), md_path: None, anchor: None })
|
||||
}
|
||||
}
|
||||
None => bail!(format!("Relative link {} not found.", link)),
|
||||
let decoded = percent_decode(parts[0].as_bytes()).decode_utf8_lossy().to_string();
|
||||
let target =
|
||||
permalinks.get(&decoded).ok_or_else(|| format!("Relative link {} not found.", link))?;
|
||||
if parts.len() > 1 {
|
||||
Ok(ResolvedInternalLink {
|
||||
permalink: format!("{}#{}", target, parts[1]),
|
||||
md_path: decoded,
|
||||
anchor: Some(parts[1].to_string()),
|
||||
})
|
||||
} else {
|
||||
Ok(ResolvedInternalLink { permalink: target.to_string(), md_path: decoded, anchor: None })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -81,7 +81,7 @@ mod tests {
|
|||
permalinks.insert("pages/about.md".to_string(), "https://vincent.is/about".to_string());
|
||||
let res = resolve_internal_link("@/pages/about.md#hello", &permalinks).unwrap();
|
||||
assert_eq!(res.permalink, "https://vincent.is/about#hello");
|
||||
assert_eq!(res.md_path, Some("pages/about.md".to_string()));
|
||||
assert_eq!(res.md_path, "pages/about.md".to_string());
|
||||
assert_eq!(res.anchor, Some("hello".to_string()));
|
||||
}
|
||||
|
||||
|
@ -94,7 +94,7 @@ mod tests {
|
|||
);
|
||||
let res = resolve_internal_link("@/pages/about%20space.md#hello", &permalinks).unwrap();
|
||||
assert_eq!(res.permalink, "https://vincent.is/about%20space/#hello");
|
||||
assert_eq!(res.md_path, Some("pages/about space.md".to_string()));
|
||||
assert_eq!(res.md_path, "pages/about space.md".to_string());
|
||||
assert_eq!(res.anchor, Some("hello".to_string()));
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use serde_derive::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
|
|
|
@ -3,10 +3,17 @@ title = "Zola"
|
|||
description = "Everything you need to make a static site engine in one binary."
|
||||
|
||||
compile_sass = true
|
||||
highlight_code = true
|
||||
insert_anchor_links = true
|
||||
highlight_theme = "kronuz"
|
||||
build_search_index = true
|
||||
|
||||
[markdown]
|
||||
highlight_code = true
|
||||
highlight_theme = "kronuz"
|
||||
#highlight_theme = "css"
|
||||
#highlight_themes_css = [
|
||||
# { theme = "base16-ocean-dark", filename = "syntax-theme-dark.css" },
|
||||
# { theme = "base16-ocean-light", filename = "syntax-theme-light.css" },
|
||||
#]
|
||||
|
||||
|
||||
[extra]
|
||||
author = "Vincent Prouillet"
|
||||
|
|
|
@ -28,10 +28,11 @@ resize_image(path, width, height, op, format, quality)
|
|||
- `"auto"`
|
||||
- `"jpg"`
|
||||
- `"png"`
|
||||
- `"webp"`
|
||||
|
||||
The default is `"auto"`, this means that the format is chosen based on input image format.
|
||||
JPEG is chosen for JPEGs and other lossy formats, and PNG is chosen for PNGs and other lossless formats.
|
||||
- `quality` (_optional_): JPEG quality of the resized image, in percent. Only used when encoding JPEGs; default value is `75`.
|
||||
- `quality` (_optional_): JPEG or WebP quality of the resized image, in percent. Only used when encoding JPEGs or WebPs; for JPEG default value is `75`, for WebP default is lossless.
|
||||
|
||||
### Image processing and return value
|
||||
|
||||
|
@ -43,10 +44,24 @@ static/processed_images/
|
|||
|
||||
The filename of each resized image is a hash of the function arguments,
|
||||
which means that once an image is resized in a certain way, it will be stored in the above directory and will not
|
||||
need to be resized again during subsequent builds (unless the image itself, the dimensions, or other arguments are changed).
|
||||
Therefore, if you have a large number of images, they will only need to be resized once.
|
||||
need to be resized again during subsequent builds (unless the image itself, the dimensions, or other arguments have changed).
|
||||
|
||||
The function returns a full URL to the resized image.
|
||||
The function returns an object with the following schema:
|
||||
|
||||
```
|
||||
/// The final URL for that asset
|
||||
url: String,
|
||||
/// The path to the static asset generated
|
||||
static_path: String,
|
||||
/// New image width
|
||||
width: u32,
|
||||
/// New image height
|
||||
height: u32,
|
||||
/// Original image width
|
||||
orig_width: u32,
|
||||
/// Original image height
|
||||
orig_height: u32,
|
||||
```
|
||||
|
||||
## Resize operations
|
||||
|
||||
|
@ -112,7 +127,8 @@ but it can be used in Markdown using [shortcodes](@/documentation/content/shortc
|
|||
The examples above were generated using a shortcode file named `resize_image.html` with this content:
|
||||
|
||||
```jinja2
|
||||
<img src="{{ resize_image(path=path, width=width, height=height, op=op) }}" />
|
||||
{% set image = resize_image(path=path, width=width, height=height, op=op) %}
|
||||
<img src="{{ image.url }}" />
|
||||
```
|
||||
|
||||
## Creating picture galleries
|
||||
|
@ -128,14 +144,16 @@ This can be used in shortcodes. For example, we can create a very simple html-on
|
|||
picture gallery with the following shortcode named `gallery.html`:
|
||||
|
||||
```jinja2
|
||||
{% for asset in page.assets %}
|
||||
{% if asset is matching("[.](jpg|png)$") %}
|
||||
<a href="{{ get_url(path=asset) }}">
|
||||
<img src="{{ resize_image(path=asset, width=240, height=180, op="fill") }}" />
|
||||
<div>
|
||||
{% for asset in page.assets -%}
|
||||
{%- if asset is matching("[.](jpg|png)$") -%}
|
||||
{% set image = resize_image(path=asset, width=240, height=180) %}
|
||||
<a href="{{ get_url(path=asset) }}" target="_blank">
|
||||
<img src="{{ image.url }}" />
|
||||
</a>
|
||||
 
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
</div>
|
||||
```
|
||||
|
||||
As you can notice, we didn't specify an `op` argument, which means that it'll default to `"fill"`. Similarly,
|
||||
|
|
|
@ -21,9 +21,9 @@ If you want to use per-language taxonomies, ensure you set the `lang` field in t
|
|||
configuration.
|
||||
|
||||
Note: By default, Chinese and Japanese search indexing is not included. You can include
|
||||
the support by building `zola` using `cargo build --features search/indexing-ja search/indexing-zh`.
|
||||
the support by building `zola` using `cargo build --features search/indexing-ja --features search/indexing-zh`.
|
||||
Please also note that, enabling Chinese indexing will increase the binary size by approximately
|
||||
5 MB while enabling Japanese indexing will increase the binary size by approximately 70 MB
|
||||
5 MB while enabling Japanese indexing will increase the binary size by approximately 70 MB
|
||||
due to the incredibly large dictionaries.
|
||||
|
||||
## Content
|
||||
|
|
|
@ -66,12 +66,12 @@ When the article's output path is not specified in the frontmatter, it is extrac
|
|||
- if the filename is `index.md`, its parent folder name (`bar`) is used as output path
|
||||
- otherwise, the output path is extracted from `thing` (the filename without the `.md` extension)
|
||||
|
||||
If the path found starts with a datetime string (`YYYY-mm-dd` or [a RFC3339 datetime](https://www.ietf.org/rfc/rfc3339.txt)) followed by an underscore (`_`) or a dash (`-`), this date is removed from the output path and will be used as the page date (unless already set in the front-matter). Note that the full RFC3339 datetime contains colons, which is not a valid character in a filename on Windows.
|
||||
If the path found starts with a datetime string (`YYYY-mm-dd` or [a RFC3339 datetime](https://www.ietf.org/rfc/rfc3339.txt)) followed by optional whitespace and then an underscore (`_`) or a dash (`-`), this date is removed from the output path and will be used as the page date (unless already set in the front-matter). Note that the full RFC3339 datetime contains colons, which is not a valid character in a filename on Windows.
|
||||
|
||||
The output path extracted from the file path is then slugified or not, depending on the `slugify.paths` config, as explained previously.
|
||||
|
||||
**Example:**
|
||||
The file `content/blog/2018-10-10-hello-world.md` will yield a page at `[base_url]/blog/hello-world`.
|
||||
The file `content/blog/2018-10-10-hello-world.md` will yield a page at `[base_url]/blog/hello-world`. With optional whitespace, the file `content/blog/2021-01-23 -hello new world.md` will yield a page at `[base_url]/blog/hello-new-world`
|
||||
|
||||
## Front matter
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ Any non-Markdown file in a section directory is added to the `assets` collection
|
|||
Markdown file using relative links.
|
||||
|
||||
## Drafting
|
||||
Just like pages sections can be drafted by setting the `draft` option in the front matter. By default this is not done. When a section is drafted it's descendants like pages, subsections and assets will not be processed unless the `--drafts` flag is passed. Note that even pages that don't have a `draft` status will not be processed if one of their parent sections is drafted.
|
||||
Just like pages sections can be drafted by setting the `draft` option in the front matter. By default this is not done. When a section is drafted it's descendants like pages, subsections and assets will not be processed unless the `--drafts` flag is passed. Note that even pages that don't have a `draft` status will not be processed if one of their parent sections is drafted.
|
||||
|
||||
## Front matter
|
||||
|
||||
|
@ -48,7 +48,7 @@ description = ""
|
|||
# A draft section is only loaded if the `--drafts` flag is passed to `zola build`, `zola serve` or `zola check`.
|
||||
draft = false
|
||||
|
||||
# Used to sort pages by "date", "weight" or "none". See below for more information.
|
||||
# Used to sort pages by "date", "title, "weight", or "none". See below for more information.
|
||||
sort_by = "none"
|
||||
|
||||
# Used by the parent section to order its subsections.
|
||||
|
@ -94,7 +94,7 @@ render = true
|
|||
# Useful for the same reason as `render` but when you don't want a 404 when
|
||||
# landing on the root section page.
|
||||
# Example: redirect_to = "documentation/content/overview"
|
||||
redirect_to =
|
||||
redirect_to =
|
||||
|
||||
# If set to "true", the section will pass its pages on to the parent section. Defaults to `false`.
|
||||
# Useful when the section shouldn't split up the parent section, like
|
||||
|
@ -127,6 +127,7 @@ You can also change the pagination path (the word displayed while paginated in t
|
|||
by setting the `paginate_path` variable, which defaults to `page`.
|
||||
|
||||
## Sorting
|
||||
|
||||
It is very common for Zola templates to iterate over pages or sections
|
||||
to display all pages/sections in a given directory. Consider a very simple
|
||||
example: a `blog` directory with three files: `blog/Post_1.md`,
|
||||
|
@ -142,7 +143,7 @@ create a list of links to the posts, a simple template might look like this:
|
|||
This would iterate over the posts in the order specified
|
||||
by the `sort_by` variable set in the `_index.md` page for the corresponding
|
||||
section. The `sort_by` variable can be given one of three values: `date`,
|
||||
`weight` or `none`. If `sort_by` is not set, the pages will be
|
||||
`title`, `weight` or `none`. If `sort_by` is not set, the pages will be
|
||||
sorted in the `none` order, which is not intended for sorted content.
|
||||
|
||||
Any page that is missing the data it needs to be sorted will be ignored and
|
||||
|
@ -162,6 +163,20 @@ top of the list) to the oldest (at the bottom of the list). Each page will
|
|||
get `page.earlier` and `page.later` variables that contain the pages with
|
||||
earlier and later dates, respectively.
|
||||
|
||||
### `title`
|
||||
This will sort all pages by their `title` field in natural lexical order, as
|
||||
defined by `natural_lexical_cmp` in the [lexical-sort] crate. Each page will
|
||||
get `page.title_prev` and `page.title_next` variables that contain the pages
|
||||
with previous and next titles, respectively.
|
||||
|
||||
For example, here is a natural lexical ordering: "bachata, BART, bolero,
|
||||
μ-kernel, meter, Métro, Track-2, Track-3, Track-13, underground". Notice how
|
||||
special characters and numbers are sorted reasonably. This is better than
|
||||
the standard sorting: "BART, Métro, Track-13, Track-2, Track-3, bachata,
|
||||
bolero, meter, underground, μ-kernel".
|
||||
|
||||
[lexical-sort]: https://docs.rs/lexical-sort
|
||||
|
||||
### `weight`
|
||||
This will be sort all pages by their `weight` field, from lightest weight
|
||||
(at the top of the list) to heaviest (at the bottom of the list). Each
|
||||
|
@ -175,9 +190,13 @@ pages sorted by weight will be sorted from lightest (at the top) to heaviest
|
|||
(at the bottom); pages sorted by date will be sorted from oldest (at the top)
|
||||
to newest (at the bottom).
|
||||
|
||||
`reverse` has no effect on `page.later`/`page.earlier` or `page.heavier`/`page.lighter`.
|
||||
`reverse` has no effect on:
|
||||
|
||||
If the section is paginated the `paginate_reversed=true` in the front matter of the relevant section should be set instead of using the filter.
|
||||
* `page.later` / `page.earlier`,
|
||||
* `page.title_prev` / `page.title_next`, or
|
||||
* `page.heavier` / `page.lighter`.
|
||||
|
||||
If the section is paginated the `paginate_reversed=true` in the front matter of the relevant section should be set instead of using the filter.
|
||||
|
||||
## Sorting subsections
|
||||
Sorting sections is a bit less flexible: sections can only be sorted by `weight`,
|
||||
|
|
|
@ -164,6 +164,7 @@ Embed a responsive player for a YouTube video.
|
|||
The arguments are:
|
||||
|
||||
- `id`: the video id (mandatory)
|
||||
- `playlist: the playlist id (optional)
|
||||
- `class`: a class to add to the `<div>` surrounding the iframe
|
||||
- `autoplay`: when set to "true", the video autoplays on load
|
||||
|
||||
|
@ -172,6 +173,8 @@ Usage example:
|
|||
```md
|
||||
{{/* youtube(id="dQw4w9WgXcQ") */}}
|
||||
|
||||
{{/* youtube(id="dQw4w9WgXcQ", playlist="RDdQw4w9WgXcQ") */}}
|
||||
|
||||
{{/* youtube(id="dQw4w9WgXcQ", autoplay=true) */}}
|
||||
|
||||
{{/* youtube(id="dQw4w9WgXcQ", autoplay=true, class="youtube") */}}
|
||||
|
|
|
@ -9,12 +9,10 @@ need to enable it in the [configuration](@/documentation/getting-started/configu
|
|||
Once this is done, Zola will automatically highlight all code blocks
|
||||
in your content. A code block in Markdown looks like the following:
|
||||
|
||||
````md
|
||||
|
||||
````
|
||||
```rust
|
||||
let highlight = true;
|
||||
```
|
||||
|
||||
````
|
||||
|
||||
You can replace `rust` with another language or not put anything to get the text
|
||||
|
@ -35,6 +33,7 @@ Here is a full list of supported languages and their short names:
|
|||
- C -> ["c", "h"]
|
||||
- C# -> ["cs", "csx"]
|
||||
- C++ -> ["C", "c++", "cc", "cp", "cpp", "cxx", "h", "h++", "hh", "hpp", "hxx", "inl", "ipp"]
|
||||
- Clojure -> ["clj", "cljc", "cljs", "edn"]
|
||||
- ClojureC -> ["boot", "clj", "cljc", "cljs", "cljx"]
|
||||
- CMake -> ["CMakeLists.txt", "cmake"]
|
||||
- CMake C Header -> ["h.in"]
|
||||
|
@ -112,6 +111,8 @@ Here is a full list of supported languages and their short names:
|
|||
- PHP -> ["php", "php3", "php4", "php5", "php7", "phps", "phpt", "phtml"]
|
||||
- Plain Text -> ["txt"]
|
||||
- PowerShell -> ["ps1", "psd1", "psm1"]
|
||||
- Protocol Buffer -> ["proto", "protodevel"]
|
||||
- Protocol Buffer (TEXT) -> ["pb.txt", "pbtxt", "proto.text", "prototxt", "textpb"]
|
||||
- PureScript -> ["purs"]
|
||||
- Python -> ["SConscript", "SConstruct", "Sconstruct", "Snakefile", "bazel", "bzl", "cpy", "gyp", "gypi", "pxd", "pxd.in", "pxi", "pxi.in", "py", "py3", "pyi", "pyw", "pyx", "pyx.in", "rpy", "sconstruct", "vpy", "wscript"]
|
||||
- R -> ["R", "Rprofile", "r"]
|
||||
|
@ -143,6 +144,7 @@ Here is a full list of supported languages and their short names:
|
|||
- VimL -> ["vim"]
|
||||
- XML -> ["dtml", "opml", "rng", "rss", "svg", "tld", "xml", "xsd", "xslt"]
|
||||
- YAML -> ["sublime-syntax", "yaml", "yml"]
|
||||
- Zig -> ["zig"]
|
||||
```
|
||||
|
||||
Note: due to some issues with the JavaScript syntax, the TypeScript syntax will be used instead.
|
||||
|
@ -168,3 +170,180 @@ If your site source is laid out as follows:
|
|||
```
|
||||
|
||||
you would set your `extra_syntaxes` to `["syntaxes", "syntaxes/Sublime-Language1"]` to load `lang1.sublime-syntax` and `lang2.sublime-syntax`.
|
||||
|
||||
## Inline VS classed highlighting
|
||||
|
||||
If you use a highlighting scheme like
|
||||
|
||||
```toml
|
||||
highlight_theme = "base16-ocean-dark"
|
||||
```
|
||||
|
||||
for a code block like
|
||||
|
||||
````md
|
||||
```rs
|
||||
let highlight = true;
|
||||
```
|
||||
````
|
||||
|
||||
you get the colors directly encoded in the html file.
|
||||
|
||||
```html
|
||||
<pre class="language-rs" style="background-color:#2b303b;">
|
||||
<code class="language-rs">
|
||||
<span style="color:#b48ead;">let</span>
|
||||
<span style="color:#c0c5ce;"> highlight = </span>
|
||||
<span style="color:#d08770;">true</span>
|
||||
<span style="color:#c0c5ce;">;
|
||||
</span>
|
||||
</code>
|
||||
</pre>
|
||||
```
|
||||
|
||||
This is nice, because your page will load faster if everything is in one file.
|
||||
But if you would like to have the user choose a theme from a
|
||||
list, or use different color schemes for dark/light color schemes, you need a
|
||||
different solution.
|
||||
|
||||
If you use the special `css` color scheme
|
||||
|
||||
```toml
|
||||
highlight_theme = "css"
|
||||
```
|
||||
|
||||
you get CSS class definitions, instead.
|
||||
|
||||
```html
|
||||
<pre class="language-rs">
|
||||
<code class="language-rs">
|
||||
<span class="z-source z-rust">
|
||||
<span class="z-storage z-type z-rust">let</span> highlight
|
||||
<span class="z-keyword z-operator z-assignment z-rust">=</span>
|
||||
<span class="z-constant z-language z-rust">true</span>
|
||||
<span class="z-punctuation z-terminator z-rust">;</span>
|
||||
</span>
|
||||
</code>
|
||||
</pre>
|
||||
```
|
||||
|
||||
Zola can output a css file for a theme in the `static` directory using the `highlighting_themes_css` option.
|
||||
|
||||
```toml
|
||||
highlight_themes_css = [
|
||||
{ theme = "base16-ocean-dark", filename = "syntax-theme-dark.css" },
|
||||
{ theme = "base16-ocean-light", filename = "syntax-theme-light.css" },
|
||||
]
|
||||
```
|
||||
|
||||
You can then support light and dark mode like so:
|
||||
|
||||
```css
|
||||
@import url("syntax-theme-dark.css") (prefers-color-scheme: dark);
|
||||
@import url("syntax-theme-light.css") (prefers-color-scheme: light);
|
||||
```
|
||||
|
||||
|
||||
## Annotations
|
||||
|
||||
You can use additional annotations to customize how code blocks are displayed:
|
||||
|
||||
- `linenos` to enable line numbering.
|
||||
|
||||
````
|
||||
```rust,linenos
|
||||
use highlighter::highlight;
|
||||
let code = "...";
|
||||
highlight(code);
|
||||
```
|
||||
````
|
||||
|
||||
- `linenostart` to specify the number for the first line (defaults to 1)
|
||||
|
||||
````
|
||||
```rust,linenos,linenostart=20
|
||||
use highlighter::highlight;
|
||||
let code = "...";
|
||||
highlight(code);
|
||||
```
|
||||
````
|
||||
|
||||
- `hl_lines` to highlight lines. You must specify a list of inclusive ranges of lines to highlight,
|
||||
separated by whitespaces. Ranges are 1-indexed and `linenostart` doesn't influence the values, it always refers to the codeblock line number.
|
||||
|
||||
````
|
||||
```rust,hl_lines=1 3-5 9
|
||||
use highlighter::highlight;
|
||||
let code = "...";
|
||||
highlight(code);
|
||||
```
|
||||
````
|
||||
|
||||
- `hide_lines` to hide lines. You must specify a list of inclusive ranges of lines to hide,
|
||||
separated by ` `. Ranges are 1-indexed.
|
||||
|
||||
````
|
||||
```rust,hide_lines=1-2
|
||||
use highlighter::highlight;
|
||||
let code = "...";
|
||||
highlight(code);
|
||||
```
|
||||
````
|
||||
|
||||
## Styling codeblocks
|
||||
|
||||
Depending on the annotations used, some codeblocks will be hard to read without any CSS. We recommend using the following
|
||||
snippet in your sites:
|
||||
|
||||
```scss
|
||||
pre {
|
||||
padding: 1rem;
|
||||
overflow: auto;
|
||||
}
|
||||
// The line numbers already provide some kind of left/right padding
|
||||
pre[data-linenos] {
|
||||
padding: 1rem 0;
|
||||
}
|
||||
pre table td {
|
||||
padding: 0;
|
||||
}
|
||||
// The line number cells
|
||||
pre table td:nth-of-type(1) {
|
||||
text-align: center;
|
||||
user-select: none;
|
||||
}
|
||||
pre mark {
|
||||
// If you want your highlights to take the full width.
|
||||
display: block;
|
||||
// The default background colour of a mark is bright yellow
|
||||
background-color: rgba(254, 252, 232, 0.9);
|
||||
}
|
||||
pre table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
```
|
||||
|
||||
This snippet makes the highlighting work on the full width and ensures that a user can copy the content without
|
||||
selecting the line numbers. Obviously you will probably need to adjust it to fit your site style.
|
||||
|
||||
Here's an example with all the options used: `scss, linenos, linenostart=10, hl_lines=3-4 8-9, hide_lines=2 7` with the
|
||||
snippet above.
|
||||
|
||||
```scss, linenos, linenostart=10, hl_lines=3-4 8-9, hide_lines=2 7
|
||||
pre mark {
|
||||
// If you want your highlights to take the full width.
|
||||
display: block;
|
||||
color: currentcolor;
|
||||
}
|
||||
pre table td:nth-of-type(1) {
|
||||
// Select a colour matching your theme
|
||||
color: #6b6b6b;
|
||||
font-style: italic;
|
||||
}
|
||||
```
|
||||
|
||||
Line 2 and 7 are comments that are not shown in the final output.
|
||||
|
||||
When line numbers are active, the code block is turned into a table with one row and two cells. The first cell contains the line number and the second cell contains the code.
|
||||
Highlights are done via the `<mark>` HTML tag. When a line with line number is highlighted two `<mark>` tags are created: one around the line number(s) and one around the code.
|
||||
|
|
|
@ -131,7 +131,7 @@ In a similar manner to how section and pages calculate their output path:
|
|||
|
||||
The taxonomy pages are then available at the following paths:
|
||||
|
||||
```plain
|
||||
```
|
||||
$BASE_URL/$NAME/ (taxonomy)
|
||||
$BASE_URL/$NAME/$SLUG (taxonomy entry)
|
||||
```
|
||||
|
|
53
docs/content/documentation/deployment/sourcehut.md
Normal file
53
docs/content/documentation/deployment/sourcehut.md
Normal file
|
@ -0,0 +1,53 @@
|
|||
+++
|
||||
title = "Sourcehut Pages"
|
||||
weight = 15
|
||||
+++
|
||||
|
||||
Deploying your static Zola website on [Sourcehut Pages][srht] is very simple.
|
||||
|
||||
You only need to create a manifest `.build.yml` file in your root folder of your Zola project and push your changes to the Sourcehut git/hg repository. To create your `.build.yml` you can start by [a template][srht-tpl].
|
||||
|
||||
Example:
|
||||
|
||||
``` yaml
|
||||
image: alpine/edge
|
||||
packages: [ zola ]
|
||||
oauth: pages.sr.ht/PAGES:RW
|
||||
environment:
|
||||
site: www.example.org
|
||||
sources:
|
||||
- https://git.sr.ht/~your_username/my-website
|
||||
tasks:
|
||||
- build: |
|
||||
cd my-website
|
||||
zola build
|
||||
- package: |
|
||||
cd my-website
|
||||
tar -C public -cvz . > ../site.tar.gz
|
||||
- upload: |
|
||||
acurl -f https://pages.sr.ht/publish/$site -Fcontent=@site.tar.gz
|
||||
```
|
||||
|
||||
This manifest will checkout your code from `sources`, build and upload the generated static files to `site` using a wrapper script around `curl` (called `acurl`, already available in all Sourcehut builds).
|
||||
|
||||
From this template you need to customize the variable `site` with the domain that will host your website and `sources` to point to your Sourcehut git/hg public URL (in this example `my-website` is the name of the repository).
|
||||
|
||||
Then commit and push your changes:
|
||||
|
||||
``` sh
|
||||
$ git push
|
||||
Enumerating objects: 5, done.
|
||||
...
|
||||
remote: Build started:
|
||||
remote: https://builds.sr.ht/~your_username/job/430625 [.build.yml]
|
||||
To git.sr.ht:~your_username/www
|
||||
fbe9afa..59ae556 master -> master
|
||||
```
|
||||
|
||||
The build job will be automatically triggered. Notice that Sourcehut returns a direct link to the build page.
|
||||
|
||||
By default you can use a subdomain of Sourcehut Pages to host your static website (e.g. "your_username.srht.site"). If you want to use a custom domain (e.g. "blog.mydomain.org"), you will need to configure a DNS record to point to the Sourcehut server. Instructions to do this are detailed on [Sourcehut][srht-custom-domain].
|
||||
|
||||
[srht]: https://srht.site
|
||||
[srht-tpl]: https://git.sr.ht/~sircmpwn/pages.sr.ht-examples
|
||||
[srht-custom-domain]: https://srht.site/custom-domains
|
|
@ -88,12 +88,11 @@ $ zola serve --open
|
|||
```
|
||||
|
||||
The serve command will watch all your content and provide live reload without
|
||||
a hard refresh if possible.
|
||||
a hard refresh if possible. If you are using WSL2 on Windows, make sure to store the website on the WSL file system.
|
||||
|
||||
Some changes cannot be handled automatically and thus live reload may not always work. If you
|
||||
fail to see your change or get an error, try restarting `zola serve`.
|
||||
|
||||
|
||||
You can also point to a config file other than `config.toml` like so (note that the position of the `config` option is important):
|
||||
|
||||
```bash
|
||||
|
|
|
@ -151,6 +151,8 @@ anchors = "on"
|
|||
include_title = true
|
||||
# Whether to include the description of the page/section in the index
|
||||
include_description = false
|
||||
# Whether to include the path of the page/section in the index
|
||||
include_path = false
|
||||
# Whether to include the rendered content of the page/section in the index
|
||||
include_content = true
|
||||
# At which character to truncate the content to. Useful if you have a lot of pages and the index would
|
||||
|
@ -193,6 +195,7 @@ Zola currently has the following highlight themes available:
|
|||
- [ayu-dark](https://github.com/dempfi/ayu)
|
||||
- [ayu-light](https://github.com/dempfi/ayu)
|
||||
- [ayu-mirage](https://github.com/dempfi/ayu)
|
||||
- [base16-aterlierdune-light](https://atelierbram.github.io/syntax-highlighting/atelier-schemes/dune/)
|
||||
- [base16-ocean-dark](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Base16%20Ocean%20Dark)
|
||||
- [base16-ocean-light](https://tmtheme-editor.herokuapp.com/#!/editor/theme/Base16%20Ocean%20Light)
|
||||
- [bbedit](https://tmtheme-editor.herokuapp.com/#!/editor/theme/BBEdit)
|
||||
|
|
|
@ -124,8 +124,7 @@ $ choco install zola
|
|||
Zola does not work in PowerShell ISE.
|
||||
|
||||
## From source
|
||||
|
||||
To build Zola from source, you will need to have Git, [Rust (at least 1.45) and Cargo](https://www.rust-lang.org/)
|
||||
To build Zola from source, you will need to have Git, [Rust (at least 1.49) and Cargo](https://www.rust-lang.org/)
|
||||
installed. You will also need to meet additional dependencies to compile [libsass](https://github.com/sass/libsass):
|
||||
|
||||
- OSX, Linux and other Unix-like operating systems: `make` (`gmake` on BSDs), `g++`, `libssl-dev`
|
||||
|
|
|
@ -64,8 +64,7 @@ Zola adds a few filters in addition to [those](https://tera.netlify.com/docs/#fi
|
|||
in Tera.
|
||||
|
||||
### markdown
|
||||
Converts the given variable to HTML using Markdown. This doesn't apply any of the
|
||||
features that Zola adds to Markdown; for example, internal links and shortcodes won't work.
|
||||
Converts the given variable to HTML using Markdown. Please note that shortcodes evaluated by this filter cannot access the current rendering context. `config` will be available, but accessing `section` or `page` (among others) from a shortcode called within the `markdown` filter will prevent your site from building. See [this discussion](https://github.com/getzola/zola/pull/1358).
|
||||
|
||||
By default, the filter will wrap all text in a paragraph. To disable this behaviour, you can
|
||||
pass `true` to the inline argument:
|
||||
|
@ -82,22 +81,54 @@ Encode the variable to base64.
|
|||
### base64_decode
|
||||
Decode the variable from base64.
|
||||
|
||||
### num_format
|
||||
Format a number into its string representation.
|
||||
|
||||
## Built-in global functions
|
||||
```jinja2
|
||||
{{ 1000000 | num_format }}
|
||||
<!-- 1,000,000 -->
|
||||
```
|
||||
|
||||
Zola adds a few global functions to [those in Tera](https://tera.netlify.com/docs#built-in-functions)
|
||||
By default this will format the number using the locale set by `config.default_language` in config.toml.
|
||||
|
||||
To format a number for a specific locale, you can use the `locale` argument and pass the name of the desired locale:
|
||||
|
||||
```jinja2
|
||||
{{ 1000000 | num_format(locale="en-IN") }}
|
||||
<!-- 10,00,000 -->
|
||||
```
|
||||
|
||||
## Built-in functions
|
||||
|
||||
Zola adds a few Tera functions to [those built-in in Tera](https://tera.netlify.com/docs#built-in-functions)
|
||||
to make it easier to develop complex sites.
|
||||
|
||||
### File searching logic
|
||||
For functions that are searching for a file on disk other than through `get_page` and `get_section`, the following
|
||||
logic applies.
|
||||
|
||||
1. The base directory is the Zola root directory, where the `config.toml` is
|
||||
2. For the given path: if it starts with `@/`, replace that with `content/` instead and trim any leading `/`
|
||||
3. Search in the following 3 or 4 locations in this order, returning the first where the file exists:
|
||||
a. $base_directory + $path
|
||||
b. $base_directory + "static/" + $path
|
||||
c. $base_directory + "content/" + $path
|
||||
d. $base_directory + "themes" + $theme + "static/" + $path only if using a theme
|
||||
|
||||
In practice this means that `@/some/image.jpg`, `/content/some/image.jpg` and `content/some/image.jpg` will point to the
|
||||
same thing.
|
||||
|
||||
It will error if the path is outside the Zola directory.
|
||||
|
||||
### `get_page`
|
||||
Takes a path to an `.md` file and returns the associated page.
|
||||
Takes a path to an `.md` file and returns the associated page. The base path is the `content` directory.
|
||||
|
||||
```jinja2
|
||||
{% set page = get_page(path="blog/page2.md") %}
|
||||
```
|
||||
|
||||
### `get_section`
|
||||
Takes a path to an `_index.md` file and returns the associated section.
|
||||
Takes a path to an `_index.md` file and returns the associated section. The base path is the `content` directory.
|
||||
|
||||
```jinja2
|
||||
{% set section = get_section(path="blog/_index.md") %}
|
||||
|
@ -109,70 +140,6 @@ If you only need the metadata of the section, you can pass `metadata_only=true`
|
|||
{% set section = get_section(path="blog/_index.md", metadata_only=true) %}
|
||||
```
|
||||
|
||||
### `get_url`
|
||||
Gets the permalink for the given path.
|
||||
If the path starts with `@/`, it will be treated as an internal
|
||||
link like the ones used in Markdown, starting from the root `content` directory.
|
||||
|
||||
```jinja2
|
||||
{% set url = get_url(path="@/blog/_index.md") %}
|
||||
```
|
||||
|
||||
It accepts an optional parameter `lang` in order to compute a *language-aware URL* in multilingual websites. Assuming `config.base_url` is `"http://example.com"`, the following snippet will:
|
||||
|
||||
- return `"http://example.com/blog/"` if `config.default_language` is `"en"`
|
||||
- return `"http://example.com/en/blog/"` if `config.default_language` is **not** `"en"` and `"en"` appears in `config.languages`
|
||||
- fail otherwise, with the error message `"'en' is not an authorized language (check config.languages)."`
|
||||
|
||||
```jinja2
|
||||
{% set url = get_url(path="@/blog/_index.md", lang="en") %}
|
||||
```
|
||||
|
||||
This can also be used to get the permalinks for static assets, for example if
|
||||
we want to link to the file that is located at `static/css/app.css`:
|
||||
|
||||
```jinja2
|
||||
{{/* get_url(path="css/app.css") */}}
|
||||
```
|
||||
|
||||
By default, assets will not have a trailing slash. You can force one by passing `trailing_slash=true` to the `get_url` function.
|
||||
An example is:
|
||||
|
||||
```jinja2
|
||||
{{/* get_url(path="css/app.css", trailing_slash=true) */}}
|
||||
```
|
||||
|
||||
In the case of non-internal links, you can also add a cachebust of the format `?h=<sha256>` at the end of a URL
|
||||
by passing `cachebust=true` to the `get_url` function.
|
||||
|
||||
|
||||
### `get_file_hash`
|
||||
|
||||
Gets the hash digest for a static file. Supported hashes are SHA-256, SHA-384 (default) and SHA-512. Requires `path`. The `sha_type` key is optional and must be one of 256, 384 or 512.
|
||||
|
||||
```jinja2
|
||||
{{/* get_file_hash(path="js/app.js", sha_type=256) */}}
|
||||
```
|
||||
|
||||
This can be used to implement subresource integrity. Do note that subresource integrity is typically used when using external scripts, which `get_file_hash` does not support.
|
||||
|
||||
```jinja2
|
||||
<script src="{{/* get_url(path="js/app.js") */}}"
|
||||
integrity="sha384-{{/* get_file_hash(path="js/app.js", sha_type=384) */}}"></script>
|
||||
```
|
||||
|
||||
Whenever hashing files, whether using `get_file_hash` or `get_url(..., cachebust=true)`, the file is searched for in three places: `static/`, `content/` and the output path (so e.g. compiled SASS can be hashed, too.)
|
||||
|
||||
|
||||
### `get_image_metadata`
|
||||
Gets metadata for an image. This supports common formats like JPEG, PNG, as well as SVG.
|
||||
Currently, the only supported keys are `width` and `height`.
|
||||
|
||||
```jinja2
|
||||
{% set meta = get_image_metadata(path="...") %}
|
||||
Our image is {{ meta.width }}x{{ meta.height }}
|
||||
```
|
||||
|
||||
### `get_taxonomy_url`
|
||||
Gets the permalink for the taxonomy item found.
|
||||
|
||||
|
@ -199,23 +166,111 @@ kind: TaxonomyConfig;
|
|||
items: Array<TaxonomyTerm>;
|
||||
```
|
||||
|
||||
`lang` (optional) default to `config.default_language` in config.toml
|
||||
|
||||
See the [Taxonomies documentation](@/documentation/templates/taxonomies.md) for a full documentation of those types.
|
||||
|
||||
### `get_url`
|
||||
Gets the permalink for the given path.
|
||||
If the path starts with `@/`, it will be treated as an internal link like the ones used in Markdown,
|
||||
starting from the root `content` directory as well as validated.
|
||||
|
||||
```jinja2
|
||||
{% set url = get_url(path="@/blog/_index.md") %}
|
||||
```
|
||||
|
||||
It accepts an optional parameter `lang` in order to compute a *language-aware URL* in multilingual websites. Assuming `config.base_url` is `"http://example.com"`, the following snippet will:
|
||||
|
||||
- return `"http://example.com/blog/"` if `config.default_language` is `"en"`
|
||||
- return `"http://example.com/en/blog/"` if `config.default_language` is **not** `"en"` and `"en"` appears in `config.languages`
|
||||
- fail otherwise, with the error message `"'en' is not an authorized language (check config.languages)."`
|
||||
|
||||
```jinja2
|
||||
{% set url = get_url(path="@/blog/_index.md", lang="en") %}
|
||||
```
|
||||
|
||||
This can also be used to get the permalinks for static assets, for example if
|
||||
we want to link to the file that is located at `static/css/app.css`:
|
||||
|
||||
```jinja2
|
||||
{{/* get_url(path="static/css/app.css") */}}
|
||||
```
|
||||
|
||||
By default, assets will not have a trailing slash. You can force one by passing `trailing_slash=true` to the `get_url` function.
|
||||
An example is:
|
||||
|
||||
```jinja2
|
||||
{{/* get_url(path="static/css/app.css", trailing_slash=true) */}}
|
||||
```
|
||||
|
||||
In the case of non-internal links, you can also add a cachebust of the format `?h=<sha256>` at the end of a URL
|
||||
by passing `cachebust=true` to the `get_url` function. In this case, the path will need to resolve to an actual file.
|
||||
See [File Searching Logic](@/documentation/templates/overview.md#file-searching-logic) for details.
|
||||
|
||||
### `get_file_hash`
|
||||
|
||||
Returns the hash digest (SHA-256, SHA-384 or SHA-512) of a file.
|
||||
|
||||
It can take the following arguments:
|
||||
- `path`: mandatory, see [File Searching Logic](@/documentation/templates/overview.md#file-searching-logic) for details
|
||||
- `sha_type`: optional, one of `256`, `384` or `512`, defaults to `384`
|
||||
- `base64`: optional, `true` or `false`, defaults to `true`. Whether to encode the hash as base64
|
||||
|
||||
```jinja2
|
||||
{{/* get_file_hash(path="static/js/app.js", sha_type=256) */}}
|
||||
```
|
||||
|
||||
The function can also output a base64-encoded hash value when its `base64`
|
||||
parameter is set to `true`. This can be used to implement [subresource
|
||||
integrity](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
|
||||
|
||||
```jinja2
|
||||
<script src="{{/* get_url(path="static/js/app.js") */}}"
|
||||
integrity="sha384-{{ get_file_hash(path="static/js/app.js", sha_type=384, base64=true) | safe }}"></script>
|
||||
```
|
||||
|
||||
Do note that subresource integrity is typically used when using external scripts, which `get_file_hash` does not support.
|
||||
|
||||
### `get_image_metadata`
|
||||
|
||||
Gets metadata for an image. This supports common formats like JPEG, PNG, WebP, BMP, GIF as well as SVG.
|
||||
|
||||
It can take the following arguments:
|
||||
|
||||
- `path`: mandatory, see [File Searching Logic](@/documentation/templates/overview.md#file-searching-logic) for details
|
||||
- `allow_missing`: optional, `true` or `false`, defaults to `false`. Whether a missing file should raise an error or not.
|
||||
|
||||
The method returns a map containing `width`, `height` and `format` (the lowercased value as string).
|
||||
|
||||
```jinja2
|
||||
{% set meta = get_image_metadata(path="...") %}
|
||||
Our image (.{{meta.format}}) has format is {{ meta.width }}x{{ meta.height }}
|
||||
```
|
||||
|
||||
### `load_data`
|
||||
Loads data from a file or URL. Supported file types include *toml*, *json*, *csv* and *bibtex*.
|
||||
Loads data from a file or URL. Supported file types include *toml*, *json*, *csv* and *bibtex* and only supports UTF-8 encoding.
|
||||
Any other file type will be loaded as plain text.
|
||||
|
||||
The `path` argument specifies the path to the data file relative to your base directory, where your `config.toml` is.
|
||||
As a security precaution, if this file is outside the main site directory, your site will fail to build.
|
||||
The `path` argument specifies the path to the data file, according to the [File Searching Logic](@/documentation/templates/overview.md#file-searching-logic).
|
||||
|
||||
```jinja2
|
||||
{% set data = load_data(path="content/blog/story/data.toml") %}
|
||||
```
|
||||
|
||||
The optional `required` boolean argument can be set to false so that missing data (HTTP error or local file not found) does not produce an error, but returns a null value instead. However, permission issues with a local file and invalid data that could not be parsed to the requested data format will still produce an error even with `required=false`.
|
||||
|
||||
The snippet below outputs the HTML from a Wikipedia page, or "No data found" if the page was not reachable, or did not return a successful HTTP code:
|
||||
|
||||
```jinja2
|
||||
{% set data = load_data(path="https://en.wikipedia.org/wiki/Commune_of_Paris", required=false) %}
|
||||
{% if data %}{{ data | safe }}{% else %}No data found{% endif %}
|
||||
```
|
||||
|
||||
The optional `format` argument allows you to specify and override which data type is contained
|
||||
within the file specified in the `path` argument. Valid entries are `toml`, `json`, `csv`, `bibtex`
|
||||
or `plain`. If the `format` argument isn't specified, then the path extension is used.
|
||||
|
||||
|
||||
```jinja2
|
||||
{% set data = load_data(path="content/blog/story/data.txt", format="json") %}
|
||||
```
|
||||
|
@ -322,6 +377,28 @@ as below.
|
|||
{{ response }}
|
||||
```
|
||||
|
||||
When no other parameters are specified the URL will always be retrieved using a HTTP GET request.
|
||||
Using the parameter `method`, since version 0.14.0, you can also choose to retrieve the URL using a POST request.
|
||||
|
||||
When using `method="POST"` you can also use the parameters `body` and `content_type`.
|
||||
The parameter body is the actual contents sent in the POST request.
|
||||
The parameter `content_type` should be the mimetype of the body.
|
||||
|
||||
This example will make a POST request to the kroki service to generate a SVG.
|
||||
|
||||
```jinja2
|
||||
{% set postdata = load_data(url="https://kroki.io/blockdiag/svg", format="plain", method="POST" ,content_type="text/plain", body="blockdiag {
|
||||
'Doing POST' -> 'using load_data'
|
||||
'using load_data' -> 'can generate' -> 'block diagrams';
|
||||
'using load_data' -> is -> 'very easy!';
|
||||
|
||||
'Doing POST' [color = 'greenyellow'];
|
||||
'block diagrams' [color = 'pink'];
|
||||
'very easy!' [color = 'orange'];
|
||||
}")%}
|
||||
{{postdata|safe}}
|
||||
```
|
||||
|
||||
#### Data caching
|
||||
|
||||
Data file loading and remote requests are cached in memory during the build, so multiple requests aren't made
|
||||
|
|
|
@ -57,7 +57,7 @@ ancestors: Array<String>;
|
|||
relative_path: String;
|
||||
// The language for the page if there is one. Default to the config `default_language`
|
||||
lang: String;
|
||||
// Information about all the available languages for that content
|
||||
// Information about all the available languages for that content, including the current page
|
||||
translations: Array<TranslatedContent>;
|
||||
```
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@ First, `TaxonomyTerm` has the following fields:
|
|||
```ts
|
||||
name: String;
|
||||
slug: String;
|
||||
path: String;
|
||||
permalink: String;
|
||||
pages: Array<Page>;
|
||||
```
|
||||
|
@ -25,6 +26,7 @@ paginate_by: Number?;
|
|||
paginate_path: String?;
|
||||
feed: Bool;
|
||||
lang: String;
|
||||
permalink: String;
|
||||
```
|
||||
|
||||
|
||||
|
|
|
@ -2,5 +2,6 @@
|
|||
+++
|
||||
template = "themes.html"
|
||||
sort_by = "date"
|
||||
in_search_index = false
|
||||
+++
|
||||
|
|
@ -87,6 +87,26 @@ pre {
|
|||
padding: 1rem;
|
||||
overflow: auto;
|
||||
}
|
||||
pre[data-linenos] {
|
||||
padding: 1rem 0;
|
||||
}
|
||||
pre mark {
|
||||
// If you want your highlights to take the full width.
|
||||
display: block;
|
||||
// The default background colour of a mark is bright yellow
|
||||
background-color: rgba(254, 252, 232, 0.9);
|
||||
}
|
||||
pre table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
pre table td {
|
||||
padding: 0;
|
||||
}
|
||||
pre table td:nth-of-type(1) {
|
||||
text-align: center;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
p code, li code {
|
||||
background-color: #f5f5f5;
|
||||
|
|
|
@ -17,3 +17,6 @@ $link-color: #007CBC;
|
|||
@import "docs";
|
||||
@import "themes";
|
||||
@import "search";
|
||||
|
||||
//@import url("syntax-theme-dark.css") (prefers-color-scheme: dark);
|
||||
//@import url("syntax-theme-light.css") (prefers-color-scheme: light);
|
||||
|
|
BIN
docs/static/processed_images/0b751f5aa0aeb49e00.png
vendored
Normal file
BIN
docs/static/processed_images/0b751f5aa0aeb49e00.png
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 29 KiB |
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue