commit
789a6408fb
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,6 +1,6 @@
|
||||||
target
|
target
|
||||||
.idea/
|
.idea/
|
||||||
components/site/test_site/public
|
test_site/public
|
||||||
docs/public
|
docs/public
|
||||||
|
|
||||||
small-blog
|
small-blog
|
||||||
|
|
16
CHANGELOG.md
16
CHANGELOG.md
|
@ -1,5 +1,21 @@
|
||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 0.3.3 (2018-03-29)
|
||||||
|
|
||||||
|
- Fixed config flag in CLI
|
||||||
|
- Sitemap entries are now sorted by permalinks to avoid random ordering
|
||||||
|
- Preserve directory structure from sass folder when copying compiled css files
|
||||||
|
to the public directory
|
||||||
|
- Do not require themes to have a static folder
|
||||||
|
- Now supports indented Sass syntax
|
||||||
|
- Add search index building
|
||||||
|
- Update Tera: now has `break` and `continue` in loops
|
||||||
|
- Gutenberg now creates an anchor link at the position of the `<!-- more -->` tag if you
|
||||||
|
want to link directly to it
|
||||||
|
- Fix many shortcode parsing issues
|
||||||
|
- Correctly copy themes shortcodes so they are useable in content
|
||||||
|
- Fix internal links not working for markdown files directly in `content` directory
|
||||||
|
|
||||||
## 0.3.2 (2018-03-05)
|
## 0.3.2 (2018-03-05)
|
||||||
|
|
||||||
- Fix `serve` command trying to read all files as markdown
|
- Fix `serve` command trying to read all files as markdown
|
||||||
|
|
618
Cargo.lock
generated
618
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,10 @@
|
||||||
[package]
|
[package]
|
||||||
name = "gutenberg"
|
name = "gutenberg"
|
||||||
version = "0.3.2"
|
version = "0.3.3"
|
||||||
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
description = "A static site generator with everything built-in"
|
description = "A fast static site generator with everything built-in"
|
||||||
homepage = "https://github.com/Keats/gutenberg"
|
homepage = "https://github.com/Keats/gutenberg"
|
||||||
repository = "https://github.com/Keats/gutenberg"
|
repository = "https://github.com/Keats/gutenberg"
|
||||||
keywords = ["static", "site", "generator", "blog"]
|
keywords = ["static", "site", "generator", "blog"]
|
||||||
|
@ -52,4 +52,5 @@ members = [
|
||||||
"components/taxonomies",
|
"components/taxonomies",
|
||||||
"components/templates",
|
"components/templates",
|
||||||
"components/utils",
|
"components/utils",
|
||||||
|
"components/search",
|
||||||
]
|
]
|
||||||
|
|
|
@ -15,8 +15,8 @@ _gutenberg() {
|
||||||
|
|
||||||
local context curcontext="$curcontext" state line
|
local context curcontext="$curcontext" state line
|
||||||
_arguments "${_arguments_options[@]}" \
|
_arguments "${_arguments_options[@]}" \
|
||||||
'-c[Path to a config file other than config.toml]' \
|
'-c+[Path to a config file other than config.toml]' \
|
||||||
'--config[Path to a config file other than config.toml]' \
|
'--config=[Path to a config file other than config.toml]' \
|
||||||
'-h[Prints help information]' \
|
'-h[Prints help information]' \
|
||||||
'--help[Prints help information]' \
|
'--help[Prints help information]' \
|
||||||
'-V[Prints version information]' \
|
'-V[Prints version information]' \
|
||||||
|
|
|
@ -1,74 +1,79 @@
|
||||||
|
|
||||||
@('gutenberg', './gutenberg') | %{
|
using namespace System.Management.Automation
|
||||||
Register-ArgumentCompleter -Native -CommandName $_ -ScriptBlock {
|
using namespace System.Management.Automation.Language
|
||||||
param($wordToComplete, $commandAst, $cursorPosition)
|
|
||||||
|
|
||||||
$command = '_gutenberg'
|
Register-ArgumentCompleter -Native -CommandName 'gutenberg' -ScriptBlock {
|
||||||
$commandAst.CommandElements |
|
param($wordToComplete, $commandAst, $cursorPosition)
|
||||||
Select-Object -Skip 1 |
|
|
||||||
%{
|
|
||||||
switch ($_.ToString()) {
|
|
||||||
|
|
||||||
'gutenberg' {
|
|
||||||
$command += '_gutenberg'
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
'init' {
|
|
||||||
$command += '_init'
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
'build' {
|
|
||||||
$command += '_build'
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
'serve' {
|
|
||||||
$command += '_serve'
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
'help' {
|
|
||||||
$command += '_help'
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
default {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$completions = @()
|
|
||||||
|
|
||||||
switch ($command) {
|
|
||||||
|
|
||||||
'_gutenberg' {
|
|
||||||
$completions = @('init', 'build', 'serve', 'help', '-c', '-h', '-V', '--config', '--help', '--version')
|
|
||||||
}
|
|
||||||
|
|
||||||
'_gutenberg_init' {
|
|
||||||
$completions = @('-h', '-V', '--help', '--version')
|
|
||||||
}
|
|
||||||
|
|
||||||
'_gutenberg_build' {
|
|
||||||
$completions = @('-h', '-V', '-u', '-o', '--help', '--version', '--base-url', '--output-dir')
|
|
||||||
}
|
|
||||||
|
|
||||||
'_gutenberg_serve' {
|
|
||||||
$completions = @('-h', '-V', '-i', '-p', '-o', '-u', '--help', '--version', '--interface', '--port', '--output-dir', '--base-url')
|
|
||||||
}
|
|
||||||
|
|
||||||
'_gutenberg_help' {
|
|
||||||
$completions = @('-h', '-V', '--help', '--version')
|
|
||||||
}
|
|
||||||
|
|
||||||
|
$commandElements = $commandAst.CommandElements
|
||||||
|
$command = @(
|
||||||
|
'gutenberg'
|
||||||
|
for ($i = 1; $i -lt $commandElements.Count; $i++) {
|
||||||
|
$element = $commandElements[$i]
|
||||||
|
if ($element -isnot [StringConstantExpressionAst] -or
|
||||||
|
$element.StringConstantType -ne [StringConstantType]::BareWord -or
|
||||||
|
$element.Value.StartsWith('-')) {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
|
$element.Value
|
||||||
|
}) -join ';'
|
||||||
|
|
||||||
$completions |
|
$completions = @(switch ($command) {
|
||||||
?{ $_ -like "$wordToComplete*" } |
|
'gutenberg' {
|
||||||
Sort-Object |
|
[CompletionResult]::new('-c', 'c', [CompletionResultType]::ParameterName, 'Path to a config file other than config.toml')
|
||||||
%{ New-Object System.Management.Automation.CompletionResult $_, $_, 'ParameterValue', $_ }
|
[CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'Path to a config file other than config.toml')
|
||||||
}
|
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
[CompletionResult]::new('init', 'init', [CompletionResultType]::ParameterValue, 'Create a new Gutenberg project')
|
||||||
|
[CompletionResult]::new('build', 'build', [CompletionResultType]::ParameterValue, 'Builds the site')
|
||||||
|
[CompletionResult]::new('serve', 'serve', [CompletionResultType]::ParameterValue, 'Serve the site. Rebuild and reload on change automatically')
|
||||||
|
[CompletionResult]::new('help', 'help', [CompletionResultType]::ParameterValue, 'Prints this message or the help of the given subcommand(s)')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
'gutenberg;init' {
|
||||||
|
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
'gutenberg;build' {
|
||||||
|
[CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'Force the base URL to be that value (default to the one in config.toml)')
|
||||||
|
[CompletionResult]::new('--base-url', 'base-url', [CompletionResultType]::ParameterName, 'Force the base URL to be that value (default to the one in config.toml)')
|
||||||
|
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path')
|
||||||
|
[CompletionResult]::new('--output-dir', 'output-dir', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path')
|
||||||
|
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
'gutenberg;serve' {
|
||||||
|
[CompletionResult]::new('-i', 'i', [CompletionResultType]::ParameterName, 'Interface to bind on')
|
||||||
|
[CompletionResult]::new('--interface', 'interface', [CompletionResultType]::ParameterName, 'Interface to bind on')
|
||||||
|
[CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Which port to use')
|
||||||
|
[CompletionResult]::new('--port', 'port', [CompletionResultType]::ParameterName, 'Which port to use')
|
||||||
|
[CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path')
|
||||||
|
[CompletionResult]::new('--output-dir', 'output-dir', [CompletionResultType]::ParameterName, 'Outputs the generated site in the given path')
|
||||||
|
[CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'Changes the base_url')
|
||||||
|
[CompletionResult]::new('--base-url', 'base-url', [CompletionResultType]::ParameterName, 'Changes the base_url')
|
||||||
|
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
'gutenberg;help' {
|
||||||
|
[CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information')
|
||||||
|
[CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
[CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information')
|
||||||
|
break
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
$completions.Where{ $_.CompletionText -like "$wordToComplete*" } |
|
||||||
|
Sort-Object -Property ListItemText
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,13 +32,21 @@ _gutenberg() {
|
||||||
|
|
||||||
case "${cmd}" in
|
case "${cmd}" in
|
||||||
gutenberg)
|
gutenberg)
|
||||||
opts=" -c -h -V --config --help --version init build serve help"
|
opts=" -h -V -c --help --version --config init build serve help"
|
||||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
|
||||||
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
|
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
case "${prev}" in
|
case "${prev}" in
|
||||||
|
|
||||||
|
--config)
|
||||||
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
-c)
|
||||||
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
COMPREPLY=()
|
COMPREPLY=()
|
||||||
;;
|
;;
|
||||||
|
@ -56,19 +64,19 @@ _gutenberg() {
|
||||||
case "${prev}" in
|
case "${prev}" in
|
||||||
|
|
||||||
--base-url)
|
--base-url)
|
||||||
COMPREPLY=("<base_url>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
-u)
|
-u)
|
||||||
COMPREPLY=("<base_url>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
--output-dir)
|
--output-dir)
|
||||||
COMPREPLY=("<output_dir>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
-o)
|
-o)
|
||||||
COMPREPLY=("<output_dir>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
@ -117,35 +125,35 @@ _gutenberg() {
|
||||||
case "${prev}" in
|
case "${prev}" in
|
||||||
|
|
||||||
--interface)
|
--interface)
|
||||||
COMPREPLY=("<interface>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
-i)
|
-i)
|
||||||
COMPREPLY=("<interface>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
--port)
|
--port)
|
||||||
COMPREPLY=("<port>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
-p)
|
-p)
|
||||||
COMPREPLY=("<port>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
--output-dir)
|
--output-dir)
|
||||||
COMPREPLY=("<output_dir>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
-o)
|
-o)
|
||||||
COMPREPLY=("<output_dir>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
--base-url)
|
--base-url)
|
||||||
COMPREPLY=("<base_url>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
-u)
|
-u)
|
||||||
COMPREPLY=("<base_url>")
|
COMPREPLY=($(compgen -f ${cur}))
|
||||||
return 0
|
return 0
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "config"
|
name = "config"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
toml = "0.4"
|
toml = "0.4"
|
||||||
|
|
|
@ -24,7 +24,12 @@ mod theme;
|
||||||
|
|
||||||
use theme::Theme;
|
use theme::Theme;
|
||||||
|
|
||||||
|
// We want a default base url for tests
|
||||||
|
static DEFAULT_BASE_URL: &'static str = "http://a-website.com";
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
/// Base URL of the site, the only required config argument
|
/// Base URL of the site, the only required config argument
|
||||||
pub base_url: String,
|
pub base_url: String,
|
||||||
|
@ -33,48 +38,47 @@ pub struct Config {
|
||||||
pub theme: Option<String>,
|
pub theme: Option<String>,
|
||||||
/// Title of the site. Defaults to None
|
/// Title of the site. Defaults to None
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
/// Whether to highlight all code blocks found in markdown files. Defaults to false
|
|
||||||
pub highlight_code: Option<bool>,
|
|
||||||
/// Which themes to use for code highlighting. See Readme for supported themes
|
|
||||||
pub highlight_theme: Option<String>,
|
|
||||||
/// Description of the site
|
/// Description of the site
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
|
|
||||||
/// The language used in the site. Defaults to "en"
|
/// The language used in the site. Defaults to "en"
|
||||||
pub default_language: Option<String>,
|
pub default_language: String,
|
||||||
|
/// Languages list and translated strings
|
||||||
|
pub translations: HashMap<String, Toml>,
|
||||||
|
|
||||||
|
/// Whether to highlight all code blocks found in markdown files. Defaults to false
|
||||||
|
pub highlight_code: bool,
|
||||||
|
/// Which themes to use for code highlighting. See Readme for supported themes
|
||||||
|
/// Defaults to "base16-ocean-dark"
|
||||||
|
pub highlight_theme: String,
|
||||||
|
|
||||||
/// Whether to generate RSS. Defaults to false
|
/// Whether to generate RSS. Defaults to false
|
||||||
pub generate_rss: Option<bool>,
|
pub generate_rss: bool,
|
||||||
/// The number of articles to include in the RSS feed. Defaults to unlimited
|
/// The number of articles to include in the RSS feed. Defaults to 10_000
|
||||||
pub rss_limit: Option<usize>,
|
pub rss_limit: usize,
|
||||||
/// Whether to generate tags and individual tag pages if some pages have them. Defaults to true
|
/// Whether to generate tags and individual tag pages if some pages have them. Defaults to true
|
||||||
pub generate_tags_pages: Option<bool>,
|
pub generate_tags_pages: bool,
|
||||||
/// Whether to generate categories and individual tag categories if some pages have them. Defaults to true
|
/// Whether to generate categories and individual tag categories if some pages have them. Defaults to true
|
||||||
pub generate_categories_pages: Option<bool>,
|
pub generate_categories_pages: bool,
|
||||||
|
|
||||||
/// Whether to compile the `sass` directory and output the css files into the static folder
|
/// Whether to compile the `sass` directory and output the css files into the static folder
|
||||||
pub compile_sass: Option<bool>,
|
pub compile_sass: bool,
|
||||||
|
/// Whether to build the search index for the content
|
||||||
|
pub build_search_index: bool,
|
||||||
/// A list of file glob patterns to ignore when processing the content folder. Defaults to none.
|
/// A list of file glob patterns to ignore when processing the content folder. Defaults to none.
|
||||||
/// Had to remove the PartialEq derive because GlobSet does not implement it. No impact
|
/// Had to remove the PartialEq derive because GlobSet does not implement it. No impact
|
||||||
/// because it's unused anyway (who wants to sort Configs?).
|
/// because it's unused anyway (who wants to sort Configs?).
|
||||||
pub ignored_content: Option<Vec<String>>,
|
pub ignored_content: Vec<String>,
|
||||||
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are needed
|
#[serde(skip_serializing, skip_deserializing)] // not a typo, 2 are needed
|
||||||
pub ignored_content_globber: Option<GlobSet>,
|
pub ignored_content_globset: Option<GlobSet>,
|
||||||
|
|
||||||
/// Languages list and translated strings
|
|
||||||
pub translations: Option<HashMap<String, Toml>>,
|
|
||||||
|
|
||||||
/// All user params set in [extra] in the config
|
/// All user params set in [extra] in the config
|
||||||
pub extra: Option<HashMap<String, Toml>>,
|
pub extra: HashMap<String, Toml>,
|
||||||
|
|
||||||
/// Set automatically when instantiating the config. Used for cachebusting
|
/// Set automatically when instantiating the config. Used for cachebusting
|
||||||
pub build_timestamp: Option<i64>,
|
pub build_timestamp: Option<i64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! set_default {
|
|
||||||
($key: expr, $default: expr) => {
|
|
||||||
if $key.is_none() {
|
|
||||||
$key = Some($default);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
/// Parses a string containing TOML to our Config struct
|
/// Parses a string containing TOML to our Config struct
|
||||||
|
@ -85,45 +89,33 @@ impl Config {
|
||||||
Err(e) => bail!(e)
|
Err(e) => bail!(e)
|
||||||
};
|
};
|
||||||
|
|
||||||
set_default!(config.default_language, "en".to_string());
|
if config.base_url.is_empty() || config.base_url == DEFAULT_BASE_URL {
|
||||||
set_default!(config.highlight_code, false);
|
bail!("A base URL is required in config.toml with key `base_url`");
|
||||||
set_default!(config.generate_rss, false);
|
}
|
||||||
set_default!(config.rss_limit, 20);
|
|
||||||
set_default!(config.generate_tags_pages, false);
|
|
||||||
set_default!(config.generate_categories_pages, false);
|
|
||||||
set_default!(config.compile_sass, false);
|
|
||||||
set_default!(config.ignored_content, Vec::new());
|
|
||||||
set_default!(config.translations, HashMap::new());
|
|
||||||
set_default!(config.extra, HashMap::new());
|
|
||||||
|
|
||||||
match config.highlight_theme {
|
if !THEME_SET.themes.contains_key(&config.highlight_theme) {
|
||||||
Some(ref t) => {
|
bail!("Highlight theme {} not available", config.highlight_theme)
|
||||||
if !THEME_SET.themes.contains_key(t) {
|
}
|
||||||
bail!("Theme {} not available", t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => config.highlight_theme = Some("base16-ocean-dark".to_string())
|
|
||||||
};
|
|
||||||
|
|
||||||
config.build_timestamp = Some(Utc::now().timestamp());
|
config.build_timestamp = Some(Utc::now().timestamp());
|
||||||
|
|
||||||
// Convert the file glob strings into a compiled glob set matcher. We want to do this once,
|
|
||||||
// at program initialization, rather than for every page, for example. We arrange for the
|
|
||||||
// globset matcher to always exist (even though it has to be an inside an Option at the
|
|
||||||
// moment because of the TOML serializer); if the glob set is empty the `is_match` function
|
|
||||||
// of the globber always returns false.
|
|
||||||
let mut glob_set_builder = GlobSetBuilder::new();
|
|
||||||
|
|
||||||
if let Some(ref v) = config.ignored_content {
|
if !config.ignored_content.is_empty() {
|
||||||
for pat in v {
|
// Convert the file glob strings into a compiled glob set matcher. We want to do this once,
|
||||||
|
// at program initialization, rather than for every page, for example. We arrange for the
|
||||||
|
// globset matcher to always exist (even though it has to be an inside an Option at the
|
||||||
|
// moment because of the TOML serializer); if the glob set is empty the `is_match` function
|
||||||
|
// of the globber always returns false.
|
||||||
|
let mut glob_set_builder = GlobSetBuilder::new();
|
||||||
|
for pat in &config.ignored_content {
|
||||||
let glob = match Glob::new(pat) {
|
let glob = match Glob::new(pat) {
|
||||||
Ok(g) => g,
|
Ok(g) => g,
|
||||||
Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e)
|
Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e)
|
||||||
};
|
};
|
||||||
glob_set_builder.add(glob);
|
glob_set_builder.add(glob);
|
||||||
}
|
}
|
||||||
|
config.ignored_content_globset = Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
|
||||||
}
|
}
|
||||||
config.ignored_content_globber = Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
|
|
||||||
|
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
@ -131,8 +123,10 @@ impl Config {
|
||||||
/// Parses a config file from the given path
|
/// Parses a config file from the given path
|
||||||
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> {
|
pub fn from_file<P: AsRef<Path>>(path: P) -> Result<Config> {
|
||||||
let mut content = String::new();
|
let mut content = String::new();
|
||||||
|
let path = path.as_ref();
|
||||||
|
let file_name = path.file_name().unwrap();
|
||||||
File::open(path)
|
File::open(path)
|
||||||
.chain_err(|| "No `config.toml` file found. Are you in the right directory?")?
|
.chain_err(|| format!("No `{:?}` file found. Are you in the right directory?", file_name))?
|
||||||
.read_to_string(&mut content)?;
|
.read_to_string(&mut content)?;
|
||||||
|
|
||||||
Config::parse(&content)
|
Config::parse(&content)
|
||||||
|
@ -161,19 +155,17 @@ impl Config {
|
||||||
|
|
||||||
/// Merges the extra data from the theme with the config extra data
|
/// Merges the extra data from the theme with the config extra data
|
||||||
fn add_theme_extra(&mut self, theme: &Theme) -> Result<()> {
|
fn add_theme_extra(&mut self, theme: &Theme) -> Result<()> {
|
||||||
if let Some(ref mut config_extra) = self.extra {
|
// 3 pass merging
|
||||||
// 3 pass merging
|
// 1. save config to preserve user
|
||||||
// 1. save config to preserve user
|
let original = self.extra.clone();
|
||||||
let original = config_extra.clone();
|
// 2. inject theme extra values
|
||||||
// 2. inject theme extra values
|
for (key, val) in &theme.extra {
|
||||||
for (key, val) in &theme.extra {
|
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
||||||
config_extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// 3. overwrite with original config
|
// 3. overwrite with original config
|
||||||
for (key, val) in &original {
|
for (key, val) in &original {
|
||||||
config_extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -187,27 +179,26 @@ impl Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Exists only for testing purposes
|
|
||||||
#[doc(hidden)]
|
|
||||||
impl Default for Config {
|
impl Default for Config {
|
||||||
fn default() -> Config {
|
fn default() -> Config {
|
||||||
Config {
|
Config {
|
||||||
title: Some("".to_string()),
|
base_url: DEFAULT_BASE_URL.to_string(),
|
||||||
theme: None,
|
title: None,
|
||||||
base_url: "http://a-website.com/".to_string(),
|
|
||||||
highlight_code: Some(true),
|
|
||||||
highlight_theme: Some("base16-ocean-dark".to_string()),
|
|
||||||
description: None,
|
description: None,
|
||||||
default_language: Some("en".to_string()),
|
theme: None,
|
||||||
generate_rss: Some(false),
|
highlight_code: true,
|
||||||
rss_limit: Some(10_000),
|
highlight_theme: "base16-ocean-dark".to_string(),
|
||||||
generate_tags_pages: Some(true),
|
default_language: "en".to_string(),
|
||||||
generate_categories_pages: Some(true),
|
generate_rss: false,
|
||||||
compile_sass: Some(false),
|
rss_limit: 10_000,
|
||||||
ignored_content: Some(Vec::new()),
|
generate_tags_pages: true,
|
||||||
ignored_content_globber: Some(GlobSetBuilder::new().build().unwrap()),
|
generate_categories_pages: true,
|
||||||
translations: None,
|
compile_sass: false,
|
||||||
extra: None,
|
build_search_index: false,
|
||||||
|
ignored_content: Vec::new(),
|
||||||
|
ignored_content_globset: None,
|
||||||
|
translations: HashMap::new(),
|
||||||
|
extra: HashMap::new(),
|
||||||
build_timestamp: Some(1),
|
build_timestamp: Some(1),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -277,7 +268,7 @@ hello = "world"
|
||||||
|
|
||||||
let config = Config::parse(config);
|
let config = Config::parse(config);
|
||||||
assert!(config.is_ok());
|
assert!(config.is_ok());
|
||||||
assert_eq!(config.unwrap().extra.unwrap().get("hello").unwrap().as_str().unwrap(), "world");
|
assert_eq!(config.unwrap().extra.get("hello").unwrap().as_str().unwrap(), "world");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -333,7 +324,7 @@ a_value = 10
|
||||||
"#;
|
"#;
|
||||||
let theme = Theme::parse(theme_str).unwrap();
|
let theme = Theme::parse(theme_str).unwrap();
|
||||||
assert!(config.add_theme_extra(&theme).is_ok());
|
assert!(config.add_theme_extra(&theme).is_ok());
|
||||||
let extra = config.extra.unwrap();
|
let extra = config.extra;
|
||||||
assert_eq!(extra["hello"].as_str().unwrap(), "world".to_string());
|
assert_eq!(extra["hello"].as_str().unwrap(), "world".to_string());
|
||||||
assert_eq!(extra["a_value"].as_integer().unwrap(), 10);
|
assert_eq!(extra["a_value"].as_integer().unwrap(), 10);
|
||||||
}
|
}
|
||||||
|
@ -355,26 +346,26 @@ title = "A title"
|
||||||
|
|
||||||
let config = Config::parse(config);
|
let config = Config::parse(config);
|
||||||
assert!(config.is_ok());
|
assert!(config.is_ok());
|
||||||
let translations = config.unwrap().translations.unwrap();
|
let translations = config.unwrap().translations;
|
||||||
assert_eq!(translations["fr"]["title"].as_str().unwrap(), "Un titre");
|
assert_eq!(translations["fr"]["title"].as_str().unwrap(), "Un titre");
|
||||||
assert_eq!(translations["en"]["title"].as_str().unwrap(), "A title");
|
assert_eq!(translations["en"]["title"].as_str().unwrap(), "A title");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn missing_ignored_content_results_in_empty_vector_and_empty_globber() {
|
fn missing_ignored_content_results_in_empty_vector_and_empty_globset() {
|
||||||
let config_str = r#"
|
let config_str = r#"
|
||||||
title = "My site"
|
title = "My site"
|
||||||
base_url = "example.com"
|
base_url = "example.com"
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let config = Config::parse(config_str).unwrap();
|
let config = Config::parse(config_str).unwrap();
|
||||||
let v = config.ignored_content.unwrap();
|
let v = config.ignored_content;
|
||||||
assert_eq!(v.len(), 0);
|
assert_eq!(v.len(), 0);
|
||||||
assert!(config.ignored_content_globber.unwrap().is_empty());
|
assert!(config.ignored_content_globset.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn empty_ignored_content_results_in_empty_vector_and_empty_globber() {
|
fn empty_ignored_content_results_in_empty_vector_and_empty_globset() {
|
||||||
let config_str = r#"
|
let config_str = r#"
|
||||||
title = "My site"
|
title = "My site"
|
||||||
base_url = "example.com"
|
base_url = "example.com"
|
||||||
|
@ -382,12 +373,12 @@ ignored_content = []
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let config = Config::parse(config_str).unwrap();
|
let config = Config::parse(config_str).unwrap();
|
||||||
assert_eq!(config.ignored_content.unwrap().len(), 0);
|
assert_eq!(config.ignored_content.len(), 0);
|
||||||
assert!(config.ignored_content_globber.unwrap().is_empty());
|
assert!(config.ignored_content_globset.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn non_empty_ignored_content_results_in_vector_of_patterns_and_configured_globber() {
|
fn non_empty_ignored_content_results_in_vector_of_patterns_and_configured_globset() {
|
||||||
let config_str = r#"
|
let config_str = r#"
|
||||||
title = "My site"
|
title = "My site"
|
||||||
base_url = "example.com"
|
base_url = "example.com"
|
||||||
|
@ -395,10 +386,10 @@ ignored_content = ["*.{graphml,iso}", "*.py?"]
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let config = Config::parse(config_str).unwrap();
|
let config = Config::parse(config_str).unwrap();
|
||||||
let v = config.ignored_content.unwrap();
|
let v = config.ignored_content;
|
||||||
assert_eq!(v, vec!["*.{graphml,iso}", "*.py?"]);
|
assert_eq!(v, vec!["*.{graphml,iso}", "*.py?"]);
|
||||||
|
|
||||||
let g = config.ignored_content_globber.unwrap();
|
let g = config.ignored_content_globset.unwrap();
|
||||||
assert_eq!(g.len(), 2);
|
assert_eq!(g.len(), 2);
|
||||||
assert!(g.is_match("foo.graphml"));
|
assert!(g.is_match("foo.graphml"));
|
||||||
assert!(g.is_match("foo.iso"));
|
assert!(g.is_match("foo.iso"));
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "content"
|
name = "content"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
|
|
@ -48,7 +48,11 @@ impl FileInfo {
|
||||||
let mut parent = file_path.parent().unwrap().to_path_buf();
|
let mut parent = file_path.parent().unwrap().to_path_buf();
|
||||||
let name = path.file_stem().unwrap().to_string_lossy().to_string();
|
let name = path.file_stem().unwrap().to_string_lossy().to_string();
|
||||||
let mut components = find_content_components(&file_path);
|
let mut components = find_content_components(&file_path);
|
||||||
let relative = format!("{}/{}.md", components.join("/"), name);
|
let relative = if !components.is_empty() {
|
||||||
|
format!("{}/{}.md", components.join("/"), name)
|
||||||
|
} else {
|
||||||
|
format!("{}.md", name)
|
||||||
|
};
|
||||||
|
|
||||||
// If we have a folder with an asset, don't consider it as a component
|
// If we have a folder with an asset, don't consider it as a component
|
||||||
if !components.is_empty() && name == "index" {
|
if !components.is_empty() && name == "index" {
|
||||||
|
|
|
@ -75,7 +75,7 @@ impl Page {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_draft(&self) -> bool {
|
pub fn is_draft(&self) -> bool {
|
||||||
self.meta.draft.unwrap_or(false)
|
self.meta.draft
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a page given the content of the .md file
|
/// Parse a page given the content of the .md file
|
||||||
|
@ -130,23 +130,27 @@ impl Page {
|
||||||
let mut page = Page::parse(path, &content, config)?;
|
let mut page = Page::parse(path, &content, config)?;
|
||||||
|
|
||||||
if page.file.name == "index" {
|
if page.file.name == "index" {
|
||||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
|
||||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
|
||||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
|
||||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
|
||||||
// against the remaining path. Note that the current behaviour effectively means that
|
|
||||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
|
||||||
// patterns (no "**" patterns).
|
|
||||||
let globber = config.ignored_content_globber.as_ref().unwrap();
|
|
||||||
let parent_dir = path.parent().unwrap();
|
let parent_dir = path.parent().unwrap();
|
||||||
page.assets = find_related_assets(parent_dir).into_iter()
|
let assets = find_related_assets(parent_dir);
|
||||||
.filter(|path|
|
|
||||||
match path.file_name() {
|
|
||||||
None => true,
|
|
||||||
Some(file) => !globber.is_match(file)
|
|
||||||
}
|
|
||||||
).collect();
|
|
||||||
|
|
||||||
|
if let Some(ref globset) = config.ignored_content_globset {
|
||||||
|
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||||
|
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||||
|
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||||
|
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||||
|
// against the remaining path. Note that the current behaviour effectively means that
|
||||||
|
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||||
|
// patterns (no "**" patterns).
|
||||||
|
page.assets = assets.into_iter()
|
||||||
|
.filter(|path|
|
||||||
|
match path.file_name() {
|
||||||
|
None => true,
|
||||||
|
Some(file) => !globset.is_match(file)
|
||||||
|
}
|
||||||
|
).collect();
|
||||||
|
} else {
|
||||||
|
page.assets = assets;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
page.assets = vec![];
|
page.assets = vec![];
|
||||||
}
|
}
|
||||||
|
@ -160,13 +164,13 @@ impl Page {
|
||||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, anchor_insert: InsertAnchor) -> Result<()> {
|
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, anchor_insert: InsertAnchor) -> Result<()> {
|
||||||
let context = Context::new(
|
let context = Context::new(
|
||||||
tera,
|
tera,
|
||||||
config.highlight_code.unwrap(),
|
config.highlight_code,
|
||||||
config.highlight_theme.clone().unwrap(),
|
config.highlight_theme.clone(),
|
||||||
&self.permalink,
|
&self.permalink,
|
||||||
permalinks,
|
permalinks,
|
||||||
anchor_insert
|
anchor_insert
|
||||||
);
|
);
|
||||||
let res = markdown_to_html(&self.raw_content, &context)?;
|
let res = markdown_to_html(&self.raw_content.replacen("<!-- more -->", "<a name=\"continue-reading\"></a>", 1), &context)?;
|
||||||
self.content = res.0;
|
self.content = res.0;
|
||||||
self.toc = res.1;
|
self.toc = res.1;
|
||||||
if self.raw_content.contains("<!-- more -->") {
|
if self.raw_content.contains("<!-- more -->") {
|
||||||
|
@ -192,7 +196,7 @@ impl Page {
|
||||||
context.add("current_url", &self.permalink);
|
context.add("current_url", &self.permalink);
|
||||||
context.add("current_path", &self.path);
|
context.add("current_path", &self.path);
|
||||||
|
|
||||||
render_template(&tpl_name, tera, &context, config.theme.clone())
|
render_template(&tpl_name, tera, &context, &config.theme)
|
||||||
.chain_err(|| format!("Failed to render page '{}'", self.file.path.display()))
|
.chain_err(|| format!("Failed to render page '{}'", self.file.path.display()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -450,7 +454,7 @@ Hello world
|
||||||
let mut gsb = GlobSetBuilder::new();
|
let mut gsb = GlobSetBuilder::new();
|
||||||
gsb.add(Glob::new("*.{js,png}").unwrap());
|
gsb.add(Glob::new("*.{js,png}").unwrap());
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.ignored_content_globber = Some(gsb.build().unwrap());
|
config.ignored_content_globset = Some(gsb.build().unwrap());
|
||||||
|
|
||||||
let res = Page::from_file(
|
let res = Page::from_file(
|
||||||
nested_path.join("index.md").as_path(),
|
nested_path.join("index.md").as_path(),
|
||||||
|
|
|
@ -100,11 +100,11 @@ impl Section {
|
||||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config) -> Result<()> {
|
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config) -> Result<()> {
|
||||||
let context = Context::new(
|
let context = Context::new(
|
||||||
tera,
|
tera,
|
||||||
config.highlight_code.unwrap(),
|
config.highlight_code,
|
||||||
config.highlight_theme.clone().unwrap(),
|
config.highlight_theme.clone(),
|
||||||
&self.permalink,
|
&self.permalink,
|
||||||
permalinks,
|
permalinks,
|
||||||
self.meta.insert_anchor_links.unwrap()
|
self.meta.insert_anchor_links,
|
||||||
);
|
);
|
||||||
let res = markdown_to_html(&self.raw_content, &context)?;
|
let res = markdown_to_html(&self.raw_content, &context)?;
|
||||||
self.content = res.0;
|
self.content = res.0;
|
||||||
|
@ -122,7 +122,7 @@ impl Section {
|
||||||
context.add("current_url", &self.permalink);
|
context.add("current_url", &self.permalink);
|
||||||
context.add("current_path", &self.path);
|
context.add("current_path", &self.path);
|
||||||
|
|
||||||
render_template(&tpl_name, tera, &context, config.theme.clone())
|
render_template(&tpl_name, tera, &context, &config.theme)
|
||||||
.chain_err(|| format!("Failed to render section '{}'", self.file.path.display()))
|
.chain_err(|| format!("Failed to render section '{}'", self.file.path.display()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
use rayon::prelude::*;
|
use rayon::prelude::*;
|
||||||
|
|
||||||
use page::Page;
|
use page::Page;
|
||||||
|
@ -24,9 +26,36 @@ pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) {
|
||||||
});
|
});
|
||||||
|
|
||||||
match sort_by {
|
match sort_by {
|
||||||
SortBy::Date => can_be_sorted.par_sort_unstable_by(|a, b| b.meta.date().unwrap().cmp(&a.meta.date().unwrap())),
|
SortBy::Date => {
|
||||||
SortBy::Order => can_be_sorted.par_sort_unstable_by(|a, b| b.meta.order().cmp(&a.meta.order())),
|
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||||
SortBy::Weight => can_be_sorted.par_sort_unstable_by(|a, b| a.meta.weight().cmp(&b.meta.weight())),
|
let ord = b.meta.date().unwrap().cmp(&a.meta.date().unwrap());
|
||||||
|
if ord == Ordering::Equal {
|
||||||
|
a.permalink.cmp(&b.permalink)
|
||||||
|
} else {
|
||||||
|
ord
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
SortBy::Order => {
|
||||||
|
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||||
|
let ord = b.meta.order().cmp(&a.meta.order());
|
||||||
|
if ord == Ordering::Equal {
|
||||||
|
a.permalink.cmp(&b.permalink)
|
||||||
|
} else {
|
||||||
|
ord
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
SortBy::Weight => {
|
||||||
|
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||||
|
let ord = a.meta.weight().cmp(&b.meta.weight());
|
||||||
|
if ord == Ordering::Equal {
|
||||||
|
a.permalink.cmp(&b.permalink)
|
||||||
|
} else {
|
||||||
|
ord
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -108,16 +137,19 @@ mod tests {
|
||||||
Page::new("content/hello.md", front_matter)
|
Page::new("content/hello.md", front_matter)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_page_with_order(order: usize) -> Page {
|
fn create_page_with_order(order: usize, filename: &str) -> Page {
|
||||||
let mut front_matter = PageFrontMatter::default();
|
let mut front_matter = PageFrontMatter::default();
|
||||||
front_matter.order = Some(order);
|
front_matter.order = Some(order);
|
||||||
Page::new("content/hello.md", front_matter)
|
let mut p = Page::new("content/".to_string() + filename, front_matter);
|
||||||
|
// Faking a permalink to test sorting with equal order
|
||||||
|
p.permalink = filename.to_string();
|
||||||
|
p
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_draft_page_with_order(order: usize) -> Page {
|
fn create_draft_page_with_order(order: usize) -> Page {
|
||||||
let mut front_matter = PageFrontMatter::default();
|
let mut front_matter = PageFrontMatter::default();
|
||||||
front_matter.order = Some(order);
|
front_matter.order = Some(order);
|
||||||
front_matter.draft = Some(true);
|
front_matter.draft = true;
|
||||||
Page::new("content/hello.md", front_matter)
|
Page::new("content/hello.md", front_matter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -144,17 +176,34 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn can_sort_by_order() {
|
fn can_sort_by_order() {
|
||||||
let input = vec![
|
let input = vec![
|
||||||
create_page_with_order(2),
|
create_page_with_order(2, "hello.md"),
|
||||||
create_page_with_order(3),
|
create_page_with_order(3, "hello2.md"),
|
||||||
create_page_with_order(1),
|
create_page_with_order(1, "hello3.md"),
|
||||||
];
|
];
|
||||||
let (pages, _) = sort_pages(input, SortBy::Order);
|
let (pages, _) = sort_pages(input, SortBy::Order);
|
||||||
// Should be sorted by date
|
// Should be sorted by order
|
||||||
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
||||||
assert_eq!(pages[1].clone().meta.order.unwrap(), 2);
|
assert_eq!(pages[1].clone().meta.order.unwrap(), 2);
|
||||||
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_sort_by_order_uses_permalink_to_break_ties() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(3, "b.md"),
|
||||||
|
create_page_with_order(3, "a.md"),
|
||||||
|
create_page_with_order(3, "c.md"),
|
||||||
|
];
|
||||||
|
let (pages, _) = sort_pages(input, SortBy::Order);
|
||||||
|
// Should be sorted by order
|
||||||
|
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[0].clone().permalink, "a.md");
|
||||||
|
assert_eq!(pages[1].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[1].clone().permalink, "b.md");
|
||||||
|
assert_eq!(pages[2].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[2].clone().permalink, "c.md");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_sort_by_weight() {
|
fn can_sort_by_weight() {
|
||||||
let input = vec![
|
let input = vec![
|
||||||
|
@ -163,7 +212,7 @@ mod tests {
|
||||||
create_page_with_weight(1),
|
create_page_with_weight(1),
|
||||||
];
|
];
|
||||||
let (pages, _) = sort_pages(input, SortBy::Weight);
|
let (pages, _) = sort_pages(input, SortBy::Weight);
|
||||||
// Should be sorted by date
|
// Should be sorted by weight
|
||||||
assert_eq!(pages[0].clone().meta.weight.unwrap(), 1);
|
assert_eq!(pages[0].clone().meta.weight.unwrap(), 1);
|
||||||
assert_eq!(pages[1].clone().meta.weight.unwrap(), 2);
|
assert_eq!(pages[1].clone().meta.weight.unwrap(), 2);
|
||||||
assert_eq!(pages[2].clone().meta.weight.unwrap(), 3);
|
assert_eq!(pages[2].clone().meta.weight.unwrap(), 3);
|
||||||
|
@ -172,9 +221,9 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn can_sort_by_none() {
|
fn can_sort_by_none() {
|
||||||
let input = vec![
|
let input = vec![
|
||||||
create_page_with_order(2),
|
create_page_with_order(2, "a.md"),
|
||||||
create_page_with_order(3),
|
create_page_with_order(3, "a.md"),
|
||||||
create_page_with_order(1),
|
create_page_with_order(1, "a.md"),
|
||||||
];
|
];
|
||||||
let (pages, _) = sort_pages(input, SortBy::None);
|
let (pages, _) = sort_pages(input, SortBy::None);
|
||||||
// Should be sorted by date
|
// Should be sorted by date
|
||||||
|
@ -186,8 +235,8 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn ignore_page_with_missing_field() {
|
fn ignore_page_with_missing_field() {
|
||||||
let input = vec![
|
let input = vec![
|
||||||
create_page_with_order(2),
|
create_page_with_order(2, "a.md"),
|
||||||
create_page_with_order(3),
|
create_page_with_order(3, "a.md"),
|
||||||
create_page_with_date("2019-01-01"),
|
create_page_with_date("2019-01-01"),
|
||||||
];
|
];
|
||||||
let (pages, unsorted) = sort_pages(input, SortBy::Order);
|
let (pages, unsorted) = sort_pages(input, SortBy::Order);
|
||||||
|
@ -198,9 +247,9 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn can_populate_previous_and_next_pages() {
|
fn can_populate_previous_and_next_pages() {
|
||||||
let input = vec![
|
let input = vec![
|
||||||
create_page_with_order(1),
|
create_page_with_order(1, "a.md"),
|
||||||
create_page_with_order(2),
|
create_page_with_order(2, "b.md"),
|
||||||
create_page_with_order(3),
|
create_page_with_order(3, "a.md"),
|
||||||
];
|
];
|
||||||
let pages = populate_previous_and_next_pages(&input);
|
let pages = populate_previous_and_next_pages(&input);
|
||||||
|
|
||||||
|
@ -222,9 +271,9 @@ mod tests {
|
||||||
fn can_populate_previous_and_next_pages_skip_drafts() {
|
fn can_populate_previous_and_next_pages_skip_drafts() {
|
||||||
let input = vec![
|
let input = vec![
|
||||||
create_draft_page_with_order(0),
|
create_draft_page_with_order(0),
|
||||||
create_page_with_order(1),
|
create_page_with_order(1, "a.md"),
|
||||||
create_page_with_order(2),
|
create_page_with_order(2, "b.md"),
|
||||||
create_page_with_order(3),
|
create_page_with_order(3, "c.md"),
|
||||||
create_draft_page_with_order(4),
|
create_draft_page_with_order(4),
|
||||||
];
|
];
|
||||||
let pages = populate_previous_and_next_pages(&input);
|
let pages = populate_previous_and_next_pages(&input);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "errors"
|
name = "errors"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
error-chain = "0.11"
|
error-chain = "0.11"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "front_matter"
|
name = "front_matter"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
|
|
@ -62,6 +62,7 @@ fn fix_toml_dates(table: Map<String, Value>) -> Value {
|
||||||
|
|
||||||
/// The front matter of every page
|
/// The front matter of every page
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
pub struct PageFrontMatter {
|
pub struct PageFrontMatter {
|
||||||
/// <title> of the page
|
/// <title> of the page
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
|
@ -71,7 +72,7 @@ pub struct PageFrontMatter {
|
||||||
#[serde(default, deserialize_with = "from_toml_datetime")]
|
#[serde(default, deserialize_with = "from_toml_datetime")]
|
||||||
pub date: Option<String>,
|
pub date: Option<String>,
|
||||||
/// Whether this page is a draft and should be ignored for pagination etc
|
/// Whether this page is a draft and should be ignored for pagination etc
|
||||||
pub draft: Option<bool>,
|
pub draft: bool,
|
||||||
/// The page slug. Will be used instead of the filename if present
|
/// The page slug. Will be used instead of the filename if present
|
||||||
/// Can't be an empty string if present
|
/// Can't be an empty string if present
|
||||||
pub slug: Option<String>,
|
pub slug: Option<String>,
|
||||||
|
@ -90,12 +91,15 @@ pub struct PageFrontMatter {
|
||||||
/// All aliases for that page. Gutenberg will create HTML templates that will
|
/// All aliases for that page. Gutenberg will create HTML templates that will
|
||||||
/// redirect to this
|
/// redirect to this
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub aliases: Option<Vec<String>>,
|
pub aliases: Vec<String>,
|
||||||
/// Specify a template different from `page.html` to use for that page
|
/// Specify a template different from `page.html` to use for that page
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub template: Option<String>,
|
pub template: Option<String>,
|
||||||
|
/// Whether the page is included in the search index
|
||||||
|
/// Defaults to `true` but is only used if search if explicitly enabled in the config.
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub in_search_index: bool,
|
||||||
/// Any extra parameter present in the front matter
|
/// Any extra parameter present in the front matter
|
||||||
#[serde(default)]
|
|
||||||
pub extra: Map<String, Value>,
|
pub extra: Map<String, Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,14 +170,15 @@ impl Default for PageFrontMatter {
|
||||||
title: None,
|
title: None,
|
||||||
description: None,
|
description: None,
|
||||||
date: None,
|
date: None,
|
||||||
draft: None,
|
draft: false,
|
||||||
slug: None,
|
slug: None,
|
||||||
path: None,
|
path: None,
|
||||||
tags: None,
|
tags: None,
|
||||||
category: None,
|
category: None,
|
||||||
order: None,
|
order: None,
|
||||||
weight: None,
|
weight: None,
|
||||||
aliases: None,
|
aliases: Vec::new(),
|
||||||
|
in_search_index: true,
|
||||||
template: None,
|
template: None,
|
||||||
extra: Map::new(),
|
extra: Map::new(),
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ static DEFAULT_PAGINATE_PATH: &'static str = "page";
|
||||||
|
|
||||||
/// The front matter of every section
|
/// The front matter of every section
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
#[serde(default)]
|
||||||
pub struct SectionFrontMatter {
|
pub struct SectionFrontMatter {
|
||||||
/// <title> of the page
|
/// <title> of the page
|
||||||
pub title: Option<String>,
|
pub title: Option<String>,
|
||||||
|
@ -19,11 +20,11 @@ pub struct SectionFrontMatter {
|
||||||
pub description: Option<String>,
|
pub description: Option<String>,
|
||||||
/// Whether to sort by "date", "order", "weight" or "none". Defaults to `none`.
|
/// Whether to sort by "date", "order", "weight" or "none". Defaults to `none`.
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub sort_by: Option<SortBy>,
|
pub sort_by: SortBy,
|
||||||
/// Used by the parent section to order its subsections.
|
/// Used by the parent section to order its subsections.
|
||||||
/// Higher values means it will be at the end.
|
/// Higher values means it will be at the end. Defaults to `0`
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub weight: Option<usize>,
|
pub weight: usize,
|
||||||
/// Optional template, if we want to specify which template to render for that section
|
/// Optional template, if we want to specify which template to render for that section
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub template: Option<String>,
|
pub template: Option<String>,
|
||||||
|
@ -32,59 +33,38 @@ pub struct SectionFrontMatter {
|
||||||
pub paginate_by: Option<usize>,
|
pub paginate_by: Option<usize>,
|
||||||
/// Path to be used by pagination: the page number will be appended after it. Defaults to `page`.
|
/// Path to be used by pagination: the page number will be appended after it. Defaults to `page`.
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub paginate_path: Option<String>,
|
pub paginate_path: String,
|
||||||
/// Whether to insert a link for each header like the ones you can see in this site if you hover one
|
/// Whether to insert a link for each header like the ones you can see in this site if you hover one
|
||||||
/// The default template can be overridden by creating a `anchor-link.html` in the `templates` directory
|
/// The default template can be overridden by creating a `anchor-link.html` in the `templates` directory
|
||||||
pub insert_anchor_links: Option<InsertAnchor>,
|
pub insert_anchor_links: InsertAnchor,
|
||||||
/// Whether to render that section or not. Defaults to `true`.
|
/// Whether to render that section or not. Defaults to `true`.
|
||||||
/// Useful when the section is only there to organize things but is not meant
|
/// Useful when the section is only there to organize things but is not meant
|
||||||
/// to be used directly, like a posts section in a personal site
|
/// to be used directly, like a posts section in a personal site
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub render: Option<bool>,
|
pub render: bool,
|
||||||
/// Whether to redirect when landing on that section. Defaults to `None`.
|
/// Whether to redirect when landing on that section. Defaults to `None`.
|
||||||
/// Useful for the same reason as `render` but when you don't want a 404 when
|
/// Useful for the same reason as `render` but when you don't want a 404 when
|
||||||
/// landing on the root section page
|
/// landing on the root section page
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub redirect_to: Option<String>,
|
pub redirect_to: Option<String>,
|
||||||
|
/// Whether the section content and its pages/subsections are included in the index.
|
||||||
|
/// Defaults to `true` but is only used if search if explicitly enabled in the config.
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub in_search_index: bool,
|
||||||
/// Any extra parameter present in the front matter
|
/// Any extra parameter present in the front matter
|
||||||
pub extra: Option<HashMap<String, Value>>,
|
pub extra: HashMap<String, Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SectionFrontMatter {
|
impl SectionFrontMatter {
|
||||||
pub fn parse(toml: &str) -> Result<SectionFrontMatter> {
|
pub fn parse(toml: &str) -> Result<SectionFrontMatter> {
|
||||||
let mut f: SectionFrontMatter = match toml::from_str(toml) {
|
let f: SectionFrontMatter = match toml::from_str(toml) {
|
||||||
Ok(d) => d,
|
Ok(d) => d,
|
||||||
Err(e) => bail!(e),
|
Err(e) => bail!(e),
|
||||||
};
|
};
|
||||||
|
|
||||||
if f.paginate_path.is_none() {
|
|
||||||
f.paginate_path = Some(DEFAULT_PAGINATE_PATH.to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.render.is_none() {
|
|
||||||
f.render = Some(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.sort_by.is_none() {
|
|
||||||
f.sort_by = Some(SortBy::None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.insert_anchor_links.is_none() {
|
|
||||||
f.insert_anchor_links = Some(InsertAnchor::None);
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.weight.is_none() {
|
|
||||||
f.weight = Some(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(f)
|
Ok(f)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the current sorting method, defaults to `None` (== no sorting)
|
|
||||||
pub fn sort_by(&self) -> SortBy {
|
|
||||||
self.sort_by.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Only applies to section, whether it is paginated or not.
|
/// Only applies to section, whether it is paginated or not.
|
||||||
pub fn is_paginated(&self) -> bool {
|
pub fn is_paginated(&self) -> bool {
|
||||||
match self.paginate_by {
|
match self.paginate_by {
|
||||||
|
@ -92,10 +72,6 @@ impl SectionFrontMatter {
|
||||||
None => false
|
None => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn should_render(&self) -> bool {
|
|
||||||
self.render.unwrap()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for SectionFrontMatter {
|
impl Default for SectionFrontMatter {
|
||||||
|
@ -103,15 +79,16 @@ impl Default for SectionFrontMatter {
|
||||||
SectionFrontMatter {
|
SectionFrontMatter {
|
||||||
title: None,
|
title: None,
|
||||||
description: None,
|
description: None,
|
||||||
sort_by: Some(SortBy::None),
|
sort_by: SortBy::None,
|
||||||
weight: Some(0),
|
weight: 0,
|
||||||
template: None,
|
template: None,
|
||||||
paginate_by: None,
|
paginate_by: None,
|
||||||
paginate_path: Some(DEFAULT_PAGINATE_PATH.to_string()),
|
paginate_path: DEFAULT_PAGINATE_PATH.to_string(),
|
||||||
render: Some(true),
|
render: true,
|
||||||
redirect_to: None,
|
redirect_to: None,
|
||||||
insert_anchor_links: Some(InsertAnchor::None),
|
insert_anchor_links: InsertAnchor::None,
|
||||||
extra: None,
|
in_search_index: true,
|
||||||
|
extra: HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "highlighting"
|
name = "highlighting"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lazy_static = "1"
|
lazy_static = "1"
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "pagination"
|
name = "pagination"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
|
|
@ -72,13 +72,9 @@ impl<'a> Paginator<'a> {
|
||||||
/// It will always at least create one pager (the first) even if there are no pages to paginate
|
/// It will always at least create one pager (the first) even if there are no pages to paginate
|
||||||
pub fn new(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> {
|
pub fn new(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> {
|
||||||
let paginate_by = section.meta.paginate_by.unwrap();
|
let paginate_by = section.meta.paginate_by.unwrap();
|
||||||
let paginate_path = match section.meta.paginate_path {
|
|
||||||
Some(ref p) => p,
|
|
||||||
None => unreachable!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut pages = vec![];
|
let mut pages = vec![];
|
||||||
let mut current_page = vec![];
|
let mut current_page = vec![];
|
||||||
|
|
||||||
for page in all_pages {
|
for page in all_pages {
|
||||||
current_page.push(page);
|
current_page.push(page);
|
||||||
|
|
||||||
|
@ -99,7 +95,7 @@ impl<'a> Paginator<'a> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let page_path = format!("{}/{}/", paginate_path, index + 1);
|
let page_path = format!("{}/{}/", section.meta.paginate_path, index + 1);
|
||||||
let permalink = format!("{}{}", section.permalink, page_path);
|
let permalink = format!("{}{}", section.permalink, page_path);
|
||||||
let pager_path = if section.is_index() {
|
let pager_path = if section.is_index() {
|
||||||
page_path
|
page_path
|
||||||
|
@ -172,7 +168,7 @@ impl<'a> Paginator<'a> {
|
||||||
context.add("current_path", &pager.path);
|
context.add("current_path", &pager.path);
|
||||||
context.add("paginator", &self.build_paginator_context(pager));
|
context.add("paginator", &self.build_paginator_context(pager));
|
||||||
|
|
||||||
render_template(&self.section.get_template_name(), tera, &context, config.theme.clone())
|
render_template(&self.section.get_template_name(), tera, &context, &config.theme)
|
||||||
.chain_err(|| format!("Failed to render pager {} of section '{}'", pager.index, self.section.file.path.display()))
|
.chain_err(|| format!("Failed to render pager {} of section '{}'", pager.index, self.section.file.path.display()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -189,7 +185,7 @@ mod tests {
|
||||||
fn create_section(is_index: bool) -> Section {
|
fn create_section(is_index: bool) -> Section {
|
||||||
let mut f = SectionFrontMatter::default();
|
let mut f = SectionFrontMatter::default();
|
||||||
f.paginate_by = Some(2);
|
f.paginate_by = Some(2);
|
||||||
f.paginate_path = Some("page".to_string());
|
f.paginate_path = "page".to_string();
|
||||||
let mut s = Section::new("content/_index.md", f);
|
let mut s = Section::new("content/_index.md", f);
|
||||||
if !is_index {
|
if !is_index {
|
||||||
s.path = "posts/".to_string();
|
s.path = "posts/".to_string();
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "rebuild"
|
name = "rebuild"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
errors = { path = "../errors" }
|
errors = { path = "../errors" }
|
||||||
|
|
|
@ -60,7 +60,7 @@ fn find_section_front_matter_changes(current: &SectionFrontMatter, new: &Section
|
||||||
|
|
||||||
// We want to hide the section
|
// We want to hide the section
|
||||||
// TODO: what to do on redirect_path change?
|
// TODO: what to do on redirect_path change?
|
||||||
if current.should_render() && !new.should_render() {
|
if current.render && !new.render {
|
||||||
changes_needed.push(SectionChangesNeeded::Delete);
|
changes_needed.push(SectionChangesNeeded::Delete);
|
||||||
// Nothing else we can do
|
// Nothing else we can do
|
||||||
return changes_needed;
|
return changes_needed;
|
||||||
|
@ -383,14 +383,14 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_find_sort_changes_in_section_frontmatter() {
|
fn can_find_sort_changes_in_section_frontmatter() {
|
||||||
let new = SectionFrontMatter { sort_by: Some(SortBy::Date), ..SectionFrontMatter::default() };
|
let new = SectionFrontMatter { sort_by: SortBy::Date, ..SectionFrontMatter::default() };
|
||||||
let changes = find_section_front_matter_changes(&SectionFrontMatter::default(), &new);
|
let changes = find_section_front_matter_changes(&SectionFrontMatter::default(), &new);
|
||||||
assert_eq!(changes, vec![SectionChangesNeeded::Sort, SectionChangesNeeded::Render]);
|
assert_eq!(changes, vec![SectionChangesNeeded::Sort, SectionChangesNeeded::Render]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_find_render_changes_in_section_frontmatter() {
|
fn can_find_render_changes_in_section_frontmatter() {
|
||||||
let new = SectionFrontMatter { render: Some(false), ..SectionFrontMatter::default() };
|
let new = SectionFrontMatter { render: false, ..SectionFrontMatter::default() };
|
||||||
let changes = find_section_front_matter_changes(&SectionFrontMatter::default(), &new);
|
let changes = find_section_front_matter_changes(&SectionFrontMatter::default(), &new);
|
||||||
assert_eq!(changes, vec![SectionChangesNeeded::Delete]);
|
assert_eq!(changes, vec![SectionChangesNeeded::Delete]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "rendering"
|
name = "rendering"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
|
|
@ -6,7 +6,15 @@ use tera::{Tera, Context, Value, to_value};
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
lazy_static!{
|
lazy_static!{
|
||||||
pub static ref SHORTCODE_RE: Regex = Regex::new(r#"\{(?:%|\{)\s+([[:word:]]+?)\(([[:word:]]+?="?.+?"?)?\)\s+(?:%|\})\}"#).unwrap();
|
// Does this look like a shortcode?
|
||||||
|
pub static ref SHORTCODE_RE: Regex = Regex::new(
|
||||||
|
r#"\{(?:%|\{)\s+(\w+?)\((\w+?="?(?:.|\n)+?"?)?\)\s+(?:%|\})\}"#
|
||||||
|
).unwrap();
|
||||||
|
|
||||||
|
// Parse the shortcode args with capture groups named after their type
|
||||||
|
pub static ref SHORTCODE_ARGS_RE: Regex = Regex::new(
|
||||||
|
r#"(?P<name>\w+)=\s*((?P<str>".*?")|(?P<float>[-+]?[0-9]+\.[0-9]+)|(?P<int>[-+]?[0-9]+)|(?P<bool>true|false))"#
|
||||||
|
).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A shortcode that has a body
|
/// A shortcode that has a body
|
||||||
|
@ -52,41 +60,28 @@ pub fn parse_shortcode(input: &str) -> (String, HashMap<String, Value>) {
|
||||||
let name = &caps[1];
|
let name = &caps[1];
|
||||||
|
|
||||||
if let Some(arg_list) = caps.get(2) {
|
if let Some(arg_list) = caps.get(2) {
|
||||||
for arg in arg_list.as_str().split(',') {
|
for arg_cap in SHORTCODE_ARGS_RE.captures_iter(arg_list.as_str()) {
|
||||||
let bits = arg.split('=').collect::<Vec<_>>();
|
let arg_name = arg_cap["name"].trim().to_string();
|
||||||
let arg_name = bits[0].trim().to_string();
|
|
||||||
let arg_val = bits[1].replace("\"", "");
|
|
||||||
|
|
||||||
// Regex captures will be str so we need to figure out if they are
|
if let Some(arg_val) = arg_cap.name("str") {
|
||||||
// actually str or bool/number
|
args.insert(arg_name, to_value(arg_val.as_str().replace("\"", "")).unwrap());
|
||||||
if input.contains(&format!("{}=\"{}\"", arg_name, arg_val)) {
|
|
||||||
// that's a str, just add it
|
|
||||||
args.insert(arg_name, to_value(arg_val).unwrap());
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.contains(&format!("{}=true", arg_name)) {
|
if let Some(arg_val) = arg_cap.name("int") {
|
||||||
args.insert(arg_name, to_value(true).unwrap());
|
args.insert(arg_name, to_value(arg_val.as_str().parse::<i64>().unwrap()).unwrap());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.contains(&format!("{}=false", arg_name)) {
|
if let Some(arg_val) = arg_cap.name("float") {
|
||||||
args.insert(arg_name, to_value(false).unwrap());
|
args.insert(arg_name, to_value(arg_val.as_str().parse::<f64>().unwrap()).unwrap());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Not a string or a bool, a number then?
|
if let Some(arg_val) = arg_cap.name("bool") {
|
||||||
if arg_val.contains('.') {
|
args.insert(arg_name, to_value(arg_val.as_str() == "true").unwrap());
|
||||||
if let Ok(float) = arg_val.parse::<f64>() {
|
|
||||||
args.insert(arg_name, to_value(float).unwrap());
|
|
||||||
}
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// must be an integer
|
|
||||||
if let Ok(int) = arg_val.parse::<i64>() {
|
|
||||||
args.insert(arg_name, to_value(int).unwrap());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -122,6 +117,10 @@ mod tests {
|
||||||
"{% basic() %}",
|
"{% basic() %}",
|
||||||
"{% quo_te(author=\"Bob\") %}",
|
"{% quo_te(author=\"Bob\") %}",
|
||||||
"{{ quo_te(author=\"Bob\") }}",
|
"{{ quo_te(author=\"Bob\") }}",
|
||||||
|
// https://github.com/Keats/gutenberg/issues/229
|
||||||
|
r#"{{ youtube(id="dQw4w9WgXcQ",
|
||||||
|
|
||||||
|
autoplay=true) }}"#,
|
||||||
];
|
];
|
||||||
|
|
||||||
for i in inputs {
|
for i in inputs {
|
||||||
|
@ -130,6 +129,15 @@ mod tests {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://github.com/Keats/gutenberg/issues/228
|
||||||
|
#[test]
|
||||||
|
fn doesnt_panic_on_invalid_shortcode() {
|
||||||
|
let (name, args) = parse_shortcode(r#"{{ youtube(id="dQw4w9WgXcQ", autoplay) }}"#);
|
||||||
|
assert_eq!(name, "youtube");
|
||||||
|
assert_eq!(args["id"], "dQw4w9WgXcQ");
|
||||||
|
assert!(args.get("autoplay").is_none());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_parse_simple_shortcode_no_arg() {
|
fn can_parse_simple_shortcode_no_arg() {
|
||||||
let (name, args) = parse_shortcode(r#"{{ basic() }}"#);
|
let (name, args) = parse_shortcode(r#"{{ basic() }}"#);
|
||||||
|
@ -162,10 +170,21 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_parse_shortcode_number() {
|
fn can_parse_shortcode_number() {
|
||||||
let (name, args) = parse_shortcode(r#"{% test(int=42, float=42.0, autoplay=true) %}"#);
|
let (name, args) = parse_shortcode(r#"{% test(int=42, float=42.0, autoplay=false) %}"#);
|
||||||
assert_eq!(name, "test");
|
assert_eq!(name, "test");
|
||||||
assert_eq!(args["int"], 42);
|
assert_eq!(args["int"], 42);
|
||||||
assert_eq!(args["float"], 42.0);
|
assert_eq!(args["float"], 42.0);
|
||||||
assert_eq!(args["autoplay"], true);
|
assert_eq!(args["autoplay"], false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// https://github.com/Keats/gutenberg/issues/249
|
||||||
|
#[test]
|
||||||
|
fn can_parse_shortcode_with_comma_in_it() {
|
||||||
|
let (name, args) = parse_shortcode(
|
||||||
|
r#"{% quote(author="C++ Standard Core Language Defect Reports and Accepted Issues, Revision 82, delete and user-written deallocation function", href="http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#348") %}"#
|
||||||
|
);
|
||||||
|
assert_eq!(name, "quote");
|
||||||
|
assert_eq!(args["author"], "C++ Standard Core Language Defect Reports and Accepted Issues, Revision 82, delete and user-written deallocation function");
|
||||||
|
assert_eq!(args["href"], "http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#348");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -241,7 +241,7 @@ fn doesnt_render_shortcode_in_code_block() {
|
||||||
fn can_render_shortcode_with_body() {
|
fn can_render_shortcode_with_body() {
|
||||||
let mut tera = Tera::default();
|
let mut tera = Tera::default();
|
||||||
tera.extend(&GUTENBERG_TERA).unwrap();
|
tera.extend(&GUTENBERG_TERA).unwrap();
|
||||||
tera.add_raw_template("shortcodes/quote.html", "<blockquote>{{ body }} - {{ author}}</blockquote>").unwrap();
|
tera.add_raw_template("shortcodes/quote.html", "<blockquote>{{ body }} - {{ author }}</blockquote>").unwrap();
|
||||||
let permalinks_ctx = HashMap::new();
|
let permalinks_ctx = HashMap::new();
|
||||||
let context = Context::new(&tera, true, "base16-ocean-dark".to_string(), "", &permalinks_ctx, InsertAnchor::None);
|
let context = Context::new(&tera, true, "base16-ocean-dark".to_string(), "", &permalinks_ctx, InsertAnchor::None);
|
||||||
|
|
||||||
|
|
12
components/search/Cargo.toml
Normal file
12
components/search/Cargo.toml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
[package]
|
||||||
|
name = "search"
|
||||||
|
version = "0.1.0"
|
||||||
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
elasticlunr-rs = "2"
|
||||||
|
ammonia = "1"
|
||||||
|
lazy_static = "1"
|
||||||
|
|
||||||
|
errors = { path = "../errors" }
|
||||||
|
content = { path = "../content" }
|
10
components/search/src/elasticlunr.min.js
vendored
Normal file
10
components/search/src/elasticlunr.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
80
components/search/src/lib.rs
Normal file
80
components/search/src/lib.rs
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
extern crate elasticlunr;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
extern crate ammonia;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate errors;
|
||||||
|
extern crate content;
|
||||||
|
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use elasticlunr::{Index, Language};
|
||||||
|
|
||||||
|
use content::Section;
|
||||||
|
use errors::Result;
|
||||||
|
|
||||||
|
|
||||||
|
pub const ELASTICLUNR_JS: &'static str = include_str!("elasticlunr.min.js");
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref AMMONIA: ammonia::Builder<'static> = {
|
||||||
|
let mut clean_content = HashSet::new();
|
||||||
|
clean_content.insert("script");
|
||||||
|
clean_content.insert("style");
|
||||||
|
let mut builder = ammonia::Builder::new();
|
||||||
|
builder
|
||||||
|
.tags(HashSet::new())
|
||||||
|
.tag_attributes(HashMap::new())
|
||||||
|
.generic_attributes(HashSet::new())
|
||||||
|
.link_rel(None)
|
||||||
|
.allowed_classes(HashMap::new())
|
||||||
|
.clean_content_tags(clean_content);
|
||||||
|
builder
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Returns the generated JSON index with all the documents of the site added using
|
||||||
|
/// the language given
|
||||||
|
/// Errors if the language given is not available in Elasticlunr
|
||||||
|
/// TODO: is making `in_search_index` apply to subsections of a `false` section useful?
|
||||||
|
pub fn build_index(sections: &HashMap<PathBuf, Section>, lang: &str) -> Result<String> {
|
||||||
|
let language = match Language::from_code(lang) {
|
||||||
|
Some(l) => l,
|
||||||
|
None => { bail!("Tried to build search index for language {} which is not supported", lang); }
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut index = Index::with_language(language, &["title", "body"]);
|
||||||
|
|
||||||
|
for section in sections.values() {
|
||||||
|
add_section_to_index(&mut index, section);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(index.to_json())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_section_to_index(index: &mut Index, section: &Section) {
|
||||||
|
if !section.meta.in_search_index {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't index redirecting sections
|
||||||
|
if section.meta.redirect_to.is_none() {
|
||||||
|
index.add_doc(
|
||||||
|
§ion.permalink,
|
||||||
|
&[§ion.meta.title.clone().unwrap_or(String::new()), &AMMONIA.clean(§ion.content).to_string()],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
for page in §ion.pages {
|
||||||
|
if !page.meta.in_search_index || page.meta.draft {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
index.add_doc(
|
||||||
|
&page.permalink,
|
||||||
|
&[&page.meta.title.clone().unwrap_or(String::new()), &AMMONIA.clean(&page.content).to_string()],
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,12 +1,11 @@
|
||||||
[package]
|
[package]
|
||||||
name = "site"
|
name = "site"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
glob = "0.2"
|
glob = "0.2"
|
||||||
walkdir = "2"
|
|
||||||
rayon = "1"
|
rayon = "1"
|
||||||
serde = "1"
|
serde = "1"
|
||||||
serde_derive = "1"
|
serde_derive = "1"
|
||||||
|
@ -20,6 +19,7 @@ front_matter = { path = "../front_matter" }
|
||||||
pagination = { path = "../pagination" }
|
pagination = { path = "../pagination" }
|
||||||
taxonomies = { path = "../taxonomies" }
|
taxonomies = { path = "../taxonomies" }
|
||||||
content = { path = "../content" }
|
content = { path = "../content" }
|
||||||
|
search = { path = "../search" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tempdir = "0.3"
|
tempdir = "0.3"
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
extern crate tera;
|
extern crate tera;
|
||||||
extern crate rayon;
|
extern crate rayon;
|
||||||
extern crate glob;
|
extern crate glob;
|
||||||
extern crate walkdir;
|
|
||||||
extern crate serde;
|
extern crate serde;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
|
@ -16,23 +15,23 @@ extern crate templates;
|
||||||
extern crate pagination;
|
extern crate pagination;
|
||||||
extern crate taxonomies;
|
extern crate taxonomies;
|
||||||
extern crate content;
|
extern crate content;
|
||||||
|
extern crate search;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate tempdir;
|
extern crate tempdir;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs::{remove_dir_all, copy, create_dir_all};
|
use std::fs::{create_dir_all, remove_dir_all, copy};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use tera::{Tera, Context};
|
use tera::{Tera, Context};
|
||||||
use walkdir::WalkDir;
|
use sass_rs::{Options as SassOptions, OutputStyle, compile_file};
|
||||||
use sass_rs::{Options, OutputStyle, compile_file};
|
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use config::{Config, get_config};
|
use config::{Config, get_config};
|
||||||
use utils::fs::{create_file, create_directory, ensure_directory_exists};
|
use utils::fs::{create_file, copy_directory, create_directory, ensure_directory_exists};
|
||||||
use utils::templates::{render_template, rewrite_theme_paths};
|
use utils::templates::{render_template, rewrite_theme_paths};
|
||||||
use content::{Page, Section, populate_previous_and_next_pages, sort_pages};
|
use content::{Page, Section, populate_previous_and_next_pages, sort_pages};
|
||||||
use templates::{GUTENBERG_TERA, global_fns, render_redirect_template};
|
use templates::{GUTENBERG_TERA, global_fns, render_redirect_template};
|
||||||
|
@ -67,7 +66,7 @@ pub struct Site {
|
||||||
pub sections: HashMap<PathBuf, Section>,
|
pub sections: HashMap<PathBuf, Section>,
|
||||||
pub tera: Tera,
|
pub tera: Tera,
|
||||||
live_reload: bool,
|
live_reload: bool,
|
||||||
output_path: PathBuf,
|
pub output_path: PathBuf,
|
||||||
pub static_path: PathBuf,
|
pub static_path: PathBuf,
|
||||||
pub tags: Option<Taxonomy>,
|
pub tags: Option<Taxonomy>,
|
||||||
pub categories: Option<Taxonomy>,
|
pub categories: Option<Taxonomy>,
|
||||||
|
@ -92,14 +91,11 @@ impl Site {
|
||||||
// Grab data from the extra section of the theme
|
// Grab data from the extra section of the theme
|
||||||
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
|
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
|
||||||
|
|
||||||
// Test that the {templates,static} folder exist for that theme
|
// Test that the templates folder exist for that theme
|
||||||
let theme_path = path.join("themes").join(&theme);
|
let theme_path = path.join("themes").join(&theme);
|
||||||
if !theme_path.join("templates").exists() {
|
if !theme_path.join("templates").exists() {
|
||||||
bail!("Theme `{}` is missing a templates folder", theme);
|
bail!("Theme `{}` is missing a templates folder", theme);
|
||||||
}
|
}
|
||||||
if !theme_path.join("static").exists() {
|
|
||||||
bail!("Theme `{}` is missing a static folder", theme);
|
|
||||||
}
|
|
||||||
|
|
||||||
let theme_tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "themes/**/*.html");
|
let theme_tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "themes/**/*.html");
|
||||||
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?;
|
let mut tera_theme = Tera::parse(&theme_tpl_glob).chain_err(|| "Error parsing templates from themes")?;
|
||||||
|
@ -113,10 +109,10 @@ impl Site {
|
||||||
|
|
||||||
let site = Site {
|
let site = Site {
|
||||||
base_path: path.to_path_buf(),
|
base_path: path.to_path_buf(),
|
||||||
config: config,
|
config,
|
||||||
|
tera,
|
||||||
pages: HashMap::new(),
|
pages: HashMap::new(),
|
||||||
sections: HashMap::new(),
|
sections: HashMap::new(),
|
||||||
tera: tera,
|
|
||||||
live_reload: false,
|
live_reload: false,
|
||||||
output_path: path.join("public"),
|
output_path: path.join("public"),
|
||||||
static_path: path.join("static"),
|
static_path: path.join("static"),
|
||||||
|
@ -128,6 +124,11 @@ impl Site {
|
||||||
Ok(site)
|
Ok(site)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The index section is ALWAYS at that path
|
||||||
|
pub fn index_section_path(&self) -> PathBuf {
|
||||||
|
self.base_path.join("content").join("_index.md")
|
||||||
|
}
|
||||||
|
|
||||||
/// What the function name says
|
/// What the function name says
|
||||||
pub fn enable_live_reload(&mut self) {
|
pub fn enable_live_reload(&mut self) {
|
||||||
self.live_reload = true;
|
self.live_reload = true;
|
||||||
|
@ -201,7 +202,17 @@ impl Site {
|
||||||
|
|
||||||
// Insert a default index section if necessary so we don't need to create
|
// Insert a default index section if necessary so we don't need to create
|
||||||
// a _index.md to render the index page
|
// a _index.md to render the index page
|
||||||
let index_path = self.base_path.join("content").join("_index.md");
|
let index_path = self.index_section_path();
|
||||||
|
if let Some(ref index_section) = self.sections.get(&index_path) {
|
||||||
|
if self.config.build_search_index && !index_section.meta.in_search_index {
|
||||||
|
bail!(
|
||||||
|
"You have enabled search in the config but disabled it in the index section: \
|
||||||
|
either turn off the search in the config or remote `in_search_index = true` from the \
|
||||||
|
section front-matter."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Not in else because of borrow checker
|
||||||
if !self.sections.contains_key(&index_path) {
|
if !self.sections.contains_key(&index_path) {
|
||||||
let mut index_section = Section::default();
|
let mut index_section = Section::default();
|
||||||
index_section.permalink = self.config.make_permalink("");
|
index_section.permalink = self.config.make_permalink("");
|
||||||
|
@ -311,7 +322,7 @@ impl Site {
|
||||||
/// Defaults to `AnchorInsert::None` if no parent section found
|
/// Defaults to `AnchorInsert::None` if no parent section found
|
||||||
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor {
|
pub fn find_parent_section_insert_anchor(&self, parent_path: &PathBuf) -> InsertAnchor {
|
||||||
match self.sections.get(&parent_path.join("_index.md")) {
|
match self.sections.get(&parent_path.join("_index.md")) {
|
||||||
Some(s) => s.meta.insert_anchor_links.unwrap(),
|
Some(s) => s.meta.insert_anchor_links,
|
||||||
None => InsertAnchor::None
|
None => InsertAnchor::None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -353,7 +364,7 @@ impl Site {
|
||||||
.map(|p| sections[p].clone())
|
.map(|p| sections[p].clone())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
section.subsections
|
section.subsections
|
||||||
.sort_by(|a, b| a.meta.weight.unwrap().cmp(&b.meta.weight.unwrap()));
|
.sort_by(|a, b| a.meta.weight.cmp(&b.meta.weight));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -368,7 +379,7 @@ impl Site {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let pages = mem::replace(&mut section.pages, vec![]);
|
let pages = mem::replace(&mut section.pages, vec![]);
|
||||||
let (sorted_pages, cannot_be_sorted_pages) = sort_pages(pages, section.meta.sort_by());
|
let (sorted_pages, cannot_be_sorted_pages) = sort_pages(pages, section.meta.sort_by);
|
||||||
section.pages = populate_previous_and_next_pages(&sorted_pages);
|
section.pages = populate_previous_and_next_pages(&sorted_pages);
|
||||||
section.ignored_pages = cannot_be_sorted_pages;
|
section.ignored_pages = cannot_be_sorted_pages;
|
||||||
}
|
}
|
||||||
|
@ -376,8 +387,8 @@ impl Site {
|
||||||
|
|
||||||
/// Find all the tags and categories if it's asked in the config
|
/// Find all the tags and categories if it's asked in the config
|
||||||
pub fn populate_tags_and_categories(&mut self) {
|
pub fn populate_tags_and_categories(&mut self) {
|
||||||
let generate_tags_pages = self.config.generate_tags_pages.unwrap();
|
let generate_tags_pages = self.config.generate_tags_pages;
|
||||||
let generate_categories_pages = self.config.generate_categories_pages.unwrap();
|
let generate_categories_pages = self.config.generate_categories_pages;
|
||||||
if !generate_tags_pages && !generate_categories_pages {
|
if !generate_tags_pages && !generate_categories_pages {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -412,45 +423,18 @@ impl Site {
|
||||||
html
|
html
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Copy the file at the given path into the public folder
|
|
||||||
pub fn copy_static_file<P: AsRef<Path>>(&self, path: P, base_path: &PathBuf) -> Result<()> {
|
|
||||||
let relative_path = path.as_ref().strip_prefix(base_path).unwrap();
|
|
||||||
let target_path = self.output_path.join(relative_path);
|
|
||||||
if let Some(parent_directory) = target_path.parent() {
|
|
||||||
create_dir_all(parent_directory)?;
|
|
||||||
}
|
|
||||||
copy(path.as_ref(), &target_path)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Copy the content of the given folder into the `public` folder
|
|
||||||
fn copy_static_directory(&self, path: &PathBuf) -> Result<()> {
|
|
||||||
for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {
|
|
||||||
let relative_path = entry.path().strip_prefix(path).unwrap();
|
|
||||||
let target_path = self.output_path.join(relative_path);
|
|
||||||
if entry.path().is_dir() {
|
|
||||||
if !target_path.exists() {
|
|
||||||
create_directory(&target_path)?;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let entry_fullpath = self.base_path.join(entry.path());
|
|
||||||
self.copy_static_file(entry_fullpath, path)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Copy the main `static` folder and the theme `static` folder if a theme is used
|
/// Copy the main `static` folder and the theme `static` folder if a theme is used
|
||||||
pub fn copy_static_directories(&self) -> Result<()> {
|
pub fn copy_static_directories(&self) -> Result<()> {
|
||||||
// The user files will overwrite the theme files
|
// The user files will overwrite the theme files
|
||||||
if let Some(ref theme) = self.config.theme {
|
if let Some(ref theme) = self.config.theme {
|
||||||
self.copy_static_directory(
|
copy_directory(
|
||||||
&self.base_path.join("themes").join(theme).join("static")
|
&self.base_path.join("themes").join(theme).join("static"),
|
||||||
|
&self.output_path
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
// We're fine with missing static folders
|
// We're fine with missing static folders
|
||||||
if self.static_path.exists() {
|
if self.static_path.exists() {
|
||||||
self.copy_static_directory(&self.static_path)?;
|
copy_directory(&self.static_path, &self.output_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -505,7 +489,7 @@ impl Site {
|
||||||
self.render_sections()?;
|
self.render_sections()?;
|
||||||
self.render_orphan_pages()?;
|
self.render_orphan_pages()?;
|
||||||
self.render_sitemap()?;
|
self.render_sitemap()?;
|
||||||
if self.config.generate_rss.unwrap() {
|
if self.config.generate_rss {
|
||||||
self.render_rss_feed()?;
|
self.render_rss_feed()?;
|
||||||
}
|
}
|
||||||
self.render_robots()?;
|
self.render_robots()?;
|
||||||
|
@ -521,53 +505,108 @@ impl Site {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.config.compile_sass.unwrap() {
|
if self.config.compile_sass {
|
||||||
self.compile_sass(&self.base_path)?;
|
self.compile_sass(&self.base_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.copy_static_directories()
|
self.copy_static_directories()?;
|
||||||
}
|
|
||||||
|
|
||||||
pub fn compile_sass(&self, base_path: &PathBuf) -> Result<()> {
|
if self.config.build_search_index {
|
||||||
ensure_directory_exists(&self.output_path)?;
|
self.build_search_index()?;
|
||||||
|
|
||||||
let base_path = base_path.to_string_lossy().replace("\\", "/");
|
|
||||||
let sass_glob = format!("{}/{}", base_path, "sass/**/*.scss");
|
|
||||||
let files = glob(&sass_glob)
|
|
||||||
.unwrap()
|
|
||||||
.filter_map(|e| e.ok())
|
|
||||||
.filter(|entry| !entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_'))
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut sass_options = Options::default();
|
|
||||||
sass_options.output_style = OutputStyle::Compressed;
|
|
||||||
for file in files {
|
|
||||||
let name = file.as_path().file_stem().unwrap().to_string_lossy();
|
|
||||||
let css = match compile_file(file.as_path(), sass_options.clone()) {
|
|
||||||
Ok(c) => c,
|
|
||||||
Err(e) => bail!(e)
|
|
||||||
};
|
|
||||||
|
|
||||||
create_file(&self.output_path.join(format!("{}.css", name)), &css)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn build_search_index(&self) -> Result<()> {
|
||||||
|
// index first
|
||||||
|
create_file(
|
||||||
|
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)),
|
||||||
|
&format!(
|
||||||
|
"window.searchIndex = {};",
|
||||||
|
search::build_index(&self.sections, &self.config.default_language)?
|
||||||
|
),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// then elasticlunr.min.js
|
||||||
|
create_file(
|
||||||
|
&self.output_path.join("elasticlunr.min.js"),
|
||||||
|
search::ELASTICLUNR_JS,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compile_sass(&self, base_path: &Path) -> Result<()> {
|
||||||
|
ensure_directory_exists(&self.output_path)?;
|
||||||
|
|
||||||
|
let sass_path = {
|
||||||
|
let mut sass_path = PathBuf::from(base_path);
|
||||||
|
sass_path.push("sass");
|
||||||
|
sass_path
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut options = SassOptions::default();
|
||||||
|
options.output_style = OutputStyle::Compressed;
|
||||||
|
let mut compiled_paths = self.compile_sass_glob(&sass_path, "scss", options.clone())?;
|
||||||
|
|
||||||
|
options.indented_syntax = true;
|
||||||
|
compiled_paths.extend(self.compile_sass_glob(&sass_path, "sass", options)?);
|
||||||
|
|
||||||
|
compiled_paths.sort();
|
||||||
|
for window in compiled_paths.windows(2) {
|
||||||
|
if window[0].1 == window[1].1 {
|
||||||
|
bail!(
|
||||||
|
"SASS path conflict: \"{}\" and \"{}\" both compile to \"{}\"",
|
||||||
|
window[0].0.display(),
|
||||||
|
window[1].0.display(),
|
||||||
|
window[0].1.display(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compile_sass_glob(&self, sass_path: &Path, extension: &str, options: SassOptions) -> Result<Vec<(PathBuf, PathBuf)>> {
|
||||||
|
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension);
|
||||||
|
let files = glob(&glob_string)
|
||||||
|
.unwrap()
|
||||||
|
.filter_map(|e| e.ok())
|
||||||
|
.filter(|entry| !entry.as_path().file_name().unwrap().to_string_lossy().starts_with('_'))
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let mut compiled_paths = Vec::new();
|
||||||
|
for file in files {
|
||||||
|
let css = compile_file(&file, options.clone())?;
|
||||||
|
|
||||||
|
let path_inside_sass = file.strip_prefix(&sass_path).unwrap();
|
||||||
|
let parent_inside_sass = path_inside_sass.parent();
|
||||||
|
let css_output_path = self.output_path.join(path_inside_sass).with_extension("css");
|
||||||
|
|
||||||
|
if parent_inside_sass.is_some() {
|
||||||
|
create_dir_all(&css_output_path.parent().unwrap())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
create_file(&css_output_path, &css)?;
|
||||||
|
compiled_paths.push((path_inside_sass.to_owned(), css_output_path));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(compiled_paths)
|
||||||
|
}
|
||||||
|
|
||||||
pub fn render_aliases(&self) -> Result<()> {
|
pub fn render_aliases(&self) -> Result<()> {
|
||||||
for page in self.pages.values() {
|
for page in self.pages.values() {
|
||||||
if let Some(ref aliases) = page.meta.aliases {
|
for alias in &page.meta.aliases {
|
||||||
for alias in aliases {
|
let mut output_path = self.output_path.to_path_buf();
|
||||||
let mut output_path = self.output_path.to_path_buf();
|
for component in alias.split('/') {
|
||||||
for component in alias.split('/') {
|
output_path.push(&component);
|
||||||
output_path.push(&component);
|
|
||||||
|
|
||||||
if !output_path.exists() {
|
if !output_path.exists() {
|
||||||
create_directory(&output_path)?;
|
create_directory(&output_path)?;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
create_file(&output_path.join("index.html"), &render_redirect_template(&page.permalink, &self.tera)?)?;
|
|
||||||
}
|
}
|
||||||
|
create_file(&output_path.join("index.html"), &render_redirect_template(&page.permalink, &self.tera)?)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -578,7 +617,7 @@ impl Site {
|
||||||
ensure_directory_exists(&self.output_path)?;
|
ensure_directory_exists(&self.output_path)?;
|
||||||
create_file(
|
create_file(
|
||||||
&self.output_path.join("robots.txt"),
|
&self.output_path.join("robots.txt"),
|
||||||
&render_template("robots.txt", &self.tera, &Context::new(), self.config.theme.clone())?
|
&render_template("robots.txt", &self.tera, &Context::new(), &self.config.theme)?
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -632,27 +671,26 @@ impl Site {
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
|
|
||||||
context.add(
|
let mut pages = self.pages
|
||||||
"pages",
|
.values()
|
||||||
&self.pages
|
.filter(|p| !p.is_draft())
|
||||||
.values()
|
.map(|p| {
|
||||||
.filter(|p| !p.is_draft())
|
let date = match p.meta.date {
|
||||||
.map(|p| {
|
Some(ref d) => Some(d.to_string()),
|
||||||
let date = match p.meta.date {
|
None => None,
|
||||||
Some(ref d) => Some(d.to_string()),
|
};
|
||||||
None => None,
|
SitemapEntry::new(p.permalink.clone(), date)
|
||||||
};
|
})
|
||||||
SitemapEntry::new(p.permalink.clone(), date)
|
.collect::<Vec<_>>();
|
||||||
})
|
pages.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||||
.collect::<Vec<_>>()
|
context.add("pages", &pages);
|
||||||
);
|
|
||||||
context.add(
|
let mut sections = self.sections
|
||||||
"sections",
|
|
||||||
&self.sections
|
|
||||||
.values()
|
.values()
|
||||||
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
|
.map(|s| SitemapEntry::new(s.permalink.clone(), None))
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>();
|
||||||
);
|
sections.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||||
|
context.add("sections", §ions);
|
||||||
|
|
||||||
let mut categories = vec![];
|
let mut categories = vec![];
|
||||||
if let Some(ref c) = self.categories {
|
if let Some(ref c) = self.categories {
|
||||||
|
@ -664,6 +702,7 @@ impl Site {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
categories.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||||
context.add("categories", &categories);
|
context.add("categories", &categories);
|
||||||
|
|
||||||
let mut tags = vec![];
|
let mut tags = vec![];
|
||||||
|
@ -676,10 +715,11 @@ impl Site {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
tags.sort_by(|a, b| a.permalink.cmp(&b.permalink));
|
||||||
context.add("tags", &tags);
|
context.add("tags", &tags);
|
||||||
context.add("config", &self.config);
|
context.add("config", &self.config);
|
||||||
|
|
||||||
let sitemap = &render_template("sitemap.xml", &self.tera, &context, self.config.theme.clone())?;
|
let sitemap = &render_template("sitemap.xml", &self.tera, &context, &self.config.theme)?;
|
||||||
|
|
||||||
create_file(&self.output_path.join("sitemap.xml"), sitemap)?;
|
create_file(&self.output_path.join("sitemap.xml"), sitemap)?;
|
||||||
|
|
||||||
|
@ -703,7 +743,7 @@ impl Site {
|
||||||
let (sorted_pages, _) = sort_pages(pages, SortBy::Date);
|
let (sorted_pages, _) = sort_pages(pages, SortBy::Date);
|
||||||
context.add("last_build_date", &sorted_pages[0].meta.date.clone().map(|d| d.to_string()));
|
context.add("last_build_date", &sorted_pages[0].meta.date.clone().map(|d| d.to_string()));
|
||||||
// limit to the last n elements)
|
// limit to the last n elements)
|
||||||
context.add("pages", &sorted_pages.iter().take(self.config.rss_limit.unwrap()).collect::<Vec<_>>());
|
context.add("pages", &sorted_pages.iter().take(self.config.rss_limit).collect::<Vec<_>>());
|
||||||
context.add("config", &self.config);
|
context.add("config", &self.config);
|
||||||
|
|
||||||
let rss_feed_url = if self.config.base_url.ends_with('/') {
|
let rss_feed_url = if self.config.base_url.ends_with('/') {
|
||||||
|
@ -713,7 +753,7 @@ impl Site {
|
||||||
};
|
};
|
||||||
context.add("feed_url", &rss_feed_url);
|
context.add("feed_url", &rss_feed_url);
|
||||||
|
|
||||||
let feed = &render_template("rss.xml", &self.tera, &context, self.config.theme.clone())?;
|
let feed = &render_template("rss.xml", &self.tera, &context, &self.config.theme)?;
|
||||||
|
|
||||||
create_file(&self.output_path.join("rss.xml"), feed)?;
|
create_file(&self.output_path.join("rss.xml"), feed)?;
|
||||||
|
|
||||||
|
@ -743,7 +783,7 @@ impl Site {
|
||||||
.reduce(|| Ok(()), Result::and)?;
|
.reduce(|| Ok(()), Result::and)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if !section.meta.should_render() {
|
if !section.meta.render {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -797,13 +837,8 @@ impl Site {
|
||||||
pub fn render_paginated(&self, output_path: &Path, section: &Section) -> Result<()> {
|
pub fn render_paginated(&self, output_path: &Path, section: &Section) -> Result<()> {
|
||||||
ensure_directory_exists(&self.output_path)?;
|
ensure_directory_exists(&self.output_path)?;
|
||||||
|
|
||||||
let paginate_path = match section.meta.paginate_path {
|
|
||||||
Some(ref s) => s.clone(),
|
|
||||||
None => unreachable!()
|
|
||||||
};
|
|
||||||
|
|
||||||
let paginator = Paginator::new(§ion.pages, section);
|
let paginator = Paginator::new(§ion.pages, section);
|
||||||
let folder_path = output_path.join(&paginate_path);
|
let folder_path = output_path.join(§ion.meta.paginate_path);
|
||||||
create_directory(&folder_path)?;
|
create_directory(&folder_path)?;
|
||||||
|
|
||||||
paginator
|
paginator
|
||||||
|
|
|
@ -100,7 +100,7 @@ fn can_build_site_without_live_reload() {
|
||||||
site.set_output_path(&public);
|
site.set_output_path(&public);
|
||||||
site.build().unwrap();
|
site.build().unwrap();
|
||||||
|
|
||||||
assert!(Path::new(&public).exists());
|
assert!(&public.exists());
|
||||||
assert!(file_exists!(public, "index.html"));
|
assert!(file_exists!(public, "index.html"));
|
||||||
assert!(file_exists!(public, "sitemap.xml"));
|
assert!(file_exists!(public, "sitemap.xml"));
|
||||||
assert!(file_exists!(public, "robots.txt"));
|
assert!(file_exists!(public, "robots.txt"));
|
||||||
|
@ -140,6 +140,18 @@ fn can_build_site_without_live_reload() {
|
||||||
assert!(file_exists!(public, "sample.css"));
|
assert!(file_exists!(public, "sample.css"));
|
||||||
assert!(file_exists!(public, "some.js"));
|
assert!(file_exists!(public, "some.js"));
|
||||||
|
|
||||||
|
// SASS and SCSS files compile correctly
|
||||||
|
assert!(file_exists!(public, "blog.css"));
|
||||||
|
assert!(file_contains!(public, "blog.css", "red"));
|
||||||
|
assert!(file_contains!(public, "blog.css", "blue"));
|
||||||
|
assert!(!file_contains!(public, "blog.css", "@import \"included\""));
|
||||||
|
assert!(file_contains!(public, "blog.css", "2rem")); // check include
|
||||||
|
assert!(!file_exists!(public, "_included.css"));
|
||||||
|
assert!(file_exists!(public, "scss.css"));
|
||||||
|
assert!(file_exists!(public, "sass.css"));
|
||||||
|
assert!(file_exists!(public, "nested_sass/sass.css"));
|
||||||
|
assert!(file_exists!(public, "nested_sass/scss.css"));
|
||||||
|
|
||||||
// no live reload code
|
// no live reload code
|
||||||
assert_eq!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"), false);
|
assert_eq!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"), false);
|
||||||
|
|
||||||
|
@ -186,6 +198,10 @@ fn can_build_site_with_live_reload() {
|
||||||
|
|
||||||
// no live reload code
|
// no live reload code
|
||||||
assert!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"));
|
assert!(file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"));
|
||||||
|
|
||||||
|
// the summary anchor link has been created
|
||||||
|
assert!(file_contains!(public, "posts/python/index.html", r#"<a name="continue-reading"></a>"#));
|
||||||
|
assert!(file_contains!(public, "posts/draft/index.html", r#"THEME_SHORTCODE"#));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -193,7 +209,7 @@ fn can_build_site_with_categories() {
|
||||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||||
path.push("test_site");
|
path.push("test_site");
|
||||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
site.config.generate_categories_pages = Some(true);
|
site.config.generate_categories_pages = true;
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
|
|
||||||
for (i, page) in site.pages.values_mut().enumerate() {
|
for (i, page) in site.pages.values_mut().enumerate() {
|
||||||
|
@ -247,7 +263,7 @@ fn can_build_site_with_tags() {
|
||||||
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||||
path.push("test_site");
|
path.push("test_site");
|
||||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
site.config.generate_tags_pages = Some(true);
|
site.config.generate_tags_pages = true;
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
|
|
||||||
for (i, page) in site.pages.values_mut().enumerate() {
|
for (i, page) in site.pages.values_mut().enumerate() {
|
||||||
|
@ -433,3 +449,21 @@ fn can_build_rss_feed() {
|
||||||
// Next is posts/python.md
|
// Next is posts/python.md
|
||||||
assert!(file_contains!(public, "rss.xml", "Python in posts"));
|
assert!(file_contains!(public, "rss.xml", "Python in posts"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_build_search_index() {
|
||||||
|
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||||
|
path.push("test_site");
|
||||||
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
|
site.load().unwrap();
|
||||||
|
site.config.build_search_index = true;
|
||||||
|
let tmp_dir = TempDir::new("example").expect("create temp dir");
|
||||||
|
let public = &tmp_dir.path().join("public");
|
||||||
|
site.set_output_path(&public);
|
||||||
|
site.build().unwrap();
|
||||||
|
|
||||||
|
assert!(Path::new(&public).exists());
|
||||||
|
assert!(file_exists!(public, "elasticlunr.min.js"));
|
||||||
|
assert!(file_exists!(public, "search_index.en.js"));
|
||||||
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "taxonomies"
|
name = "taxonomies"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
|
|
@ -144,7 +144,7 @@ impl Taxonomy {
|
||||||
context.add("current_url", &config.make_permalink(&format!("{}/{}", name, item.slug)));
|
context.add("current_url", &config.make_permalink(&format!("{}/{}", name, item.slug)));
|
||||||
context.add("current_path", &format!("/{}/{}", name, item.slug));
|
context.add("current_path", &format!("/{}/{}", name, item.slug));
|
||||||
|
|
||||||
render_template(&format!("{}.html", name), tera, &context, config.theme.clone())
|
render_template(&format!("{}.html", name), tera, &context, &config.theme)
|
||||||
.chain_err(|| format!("Failed to render {} page.", name))
|
.chain_err(|| format!("Failed to render {} page.", name))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -156,7 +156,7 @@ impl Taxonomy {
|
||||||
context.add("current_url", &config.make_permalink(&name));
|
context.add("current_url", &config.make_permalink(&name));
|
||||||
context.add("current_path", &name);
|
context.add("current_path", &name);
|
||||||
|
|
||||||
render_template(&format!("{}.html", name), tera, &context, config.theme.clone())
|
render_template(&format!("{}.html", name), tera, &context, &config.theme)
|
||||||
.chain_err(|| format!("Failed to render {} page.", name))
|
.chain_err(|| format!("Failed to render {} page.", name))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "templates"
|
name = "templates"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
|
|
@ -23,8 +23,8 @@ macro_rules! required_string_arg {
|
||||||
|
|
||||||
|
|
||||||
pub fn make_trans(config: Config) -> GlobalFn {
|
pub fn make_trans(config: Config) -> GlobalFn {
|
||||||
let translations_config = config.translations.unwrap();
|
let translations_config = config.translations;
|
||||||
let default_lang = to_value(config.default_language.unwrap()).unwrap();
|
let default_lang = to_value(config.default_language).unwrap();
|
||||||
|
|
||||||
Box::new(move |args| -> Result<Value> {
|
Box::new(move |args| -> Result<Value> {
|
||||||
let key = required_string_arg!(args.get("key"), "`trans` requires a `key` argument.");
|
let key = required_string_arg!(args.get("key"), "`trans` requires a `key` argument.");
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
[package]
|
[package]
|
||||||
name = "utils"
|
name = "utils"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
authors = ["Vincent Prouillet <vincent@wearewizards.io>"]
|
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
errors = { path = "../errors" }
|
errors = { path = "../errors" }
|
||||||
tera = "0.11"
|
tera = "0.11"
|
||||||
|
walkdir = "2"
|
||||||
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::fs::{File, create_dir_all, read_dir};
|
use std::fs::{File, create_dir_all, read_dir, copy};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
|
||||||
/// Create a file with the content given
|
/// Create a file with the content given
|
||||||
pub fn create_file(path: &Path, content: &str) -> Result<()> {
|
pub fn create_file(path: &Path, content: &str) -> Result<()> {
|
||||||
let mut file = File::create(&path)?;
|
let mut file = File::create(&path)?;
|
||||||
|
@ -60,6 +63,36 @@ pub fn find_related_assets(path: &Path) -> Vec<PathBuf> {
|
||||||
assets
|
assets
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Copy a file but takes into account where to start the copy as
|
||||||
|
/// there might be folders we need to create on the way
|
||||||
|
pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf) -> Result<()> {
|
||||||
|
let relative_path = src.strip_prefix(base_path).unwrap();
|
||||||
|
let target_path = dest.join(relative_path);
|
||||||
|
|
||||||
|
if let Some(parent_directory) = target_path.parent() {
|
||||||
|
create_dir_all(parent_directory)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
copy(src, target_path)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn copy_directory(src: &PathBuf, dest: &PathBuf) -> Result<()> {
|
||||||
|
for entry in WalkDir::new(src).into_iter().filter_map(|e| e.ok()) {
|
||||||
|
let relative_path = entry.path().strip_prefix(src).unwrap();
|
||||||
|
let target_path = dest.join(relative_path);
|
||||||
|
|
||||||
|
if entry.path().is_dir() {
|
||||||
|
if !target_path.exists() {
|
||||||
|
create_directory(&target_path)?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
copy_file(entry.path(), dest, src)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
|
|
@ -4,6 +4,7 @@ extern crate errors;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
extern crate tempdir;
|
extern crate tempdir;
|
||||||
extern crate tera;
|
extern crate tera;
|
||||||
|
extern crate walkdir;
|
||||||
|
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
pub mod site;
|
pub mod site;
|
||||||
|
|
|
@ -46,6 +46,14 @@ mod tests {
|
||||||
assert_eq!(res, "https://vincent.is/about");
|
assert_eq!(res, "https://vincent.is/about");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_resolve_valid_root_internal_link() {
|
||||||
|
let mut permalinks = HashMap::new();
|
||||||
|
permalinks.insert("about.md".to_string(), "https://vincent.is/about".to_string());
|
||||||
|
let res = resolve_internal_link("./about.md", &permalinks).unwrap();
|
||||||
|
assert_eq!(res, "https://vincent.is/about");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_resolve_internal_links_with_anchors() {
|
fn can_resolve_internal_links_with_anchors() {
|
||||||
let mut permalinks = HashMap::new();
|
let mut permalinks = HashMap::new();
|
||||||
|
|
|
@ -20,14 +20,14 @@ macro_rules! render_default_tpl {
|
||||||
/// is not found, it will look up for the equivalent template for the current theme if there is one.
|
/// is not found, it will look up for the equivalent template for the current theme if there is one.
|
||||||
/// Lastly, if it's a default template (index, section or page), it will just return an empty string
|
/// Lastly, if it's a default template (index, section or page), it will just return an empty string
|
||||||
/// to avoid an error if there isn't a template with that name
|
/// to avoid an error if there isn't a template with that name
|
||||||
pub fn render_template(name: &str, tera: &Tera, context: &Context, theme: Option<String>) -> Result<String> {
|
pub fn render_template(name: &str, tera: &Tera, context: &Context, theme: &Option<String>) -> Result<String> {
|
||||||
if tera.templates.contains_key(name) {
|
if tera.templates.contains_key(name) {
|
||||||
return tera
|
return tera
|
||||||
.render(name, context)
|
.render(name, context)
|
||||||
.map_err(|e| e.into());
|
.map_err(|e| e.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref t) = theme {
|
if let &Some(ref t) = theme {
|
||||||
return tera
|
return tera
|
||||||
.render(&format!("{}/templates/{}", t, name), context)
|
.render(&format!("{}/templates/{}", t, name), context)
|
||||||
.map_err(|e| e.into());
|
.map_err(|e| e.into());
|
||||||
|
@ -53,7 +53,11 @@ pub fn render_template(name: &str, tera: &Tera, context: &Context, theme: Option
|
||||||
/// that they will point to the right place (theme/templates/...)
|
/// that they will point to the right place (theme/templates/...)
|
||||||
/// Include is NOT supported as it would be a pain to add and using blocks
|
/// Include is NOT supported as it would be a pain to add and using blocks
|
||||||
/// or macros is always better anyway for themes
|
/// or macros is always better anyway for themes
|
||||||
|
/// This will also rename the shortcodes to NOT have the themes in the path
|
||||||
|
/// so themes shortcodes can be used.
|
||||||
pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) {
|
pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) {
|
||||||
|
let mut shortcodes_to_move = vec![];
|
||||||
|
|
||||||
// We want to match the paths in the templates to the new names
|
// We want to match the paths in the templates to the new names
|
||||||
for tpl in tera.templates.values_mut() {
|
for tpl in tera.templates.values_mut() {
|
||||||
// First the parent if there is none
|
// First the parent if there is none
|
||||||
|
@ -67,13 +71,25 @@ pub fn rewrite_theme_paths(tera: &mut Tera, theme: &str) {
|
||||||
updated.push((format!("{}/templates/{}", theme, filename), namespace.to_string()));
|
updated.push((format!("{}/templates/{}", theme, filename), namespace.to_string()));
|
||||||
}
|
}
|
||||||
tpl.imported_macro_files = updated;
|
tpl.imported_macro_files = updated;
|
||||||
|
|
||||||
|
if tpl.name.starts_with(&format!("{}/templates/shortcodes", theme)) {
|
||||||
|
let new_name = tpl.name.replace(&format!("{}/templates/", theme), "");
|
||||||
|
shortcodes_to_move.push((tpl.name.clone(), new_name.clone()));
|
||||||
|
tpl.name = new_name;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// and then replace shortcodes in the Tera instance using the new names
|
||||||
|
for (old_name, new_name) in shortcodes_to_move {
|
||||||
|
let tpl = tera.templates.remove(&old_name).unwrap();
|
||||||
|
tera.templates.insert(new_name, tpl);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use tera::Tera;
|
use tera::Tera;
|
||||||
use super::{rewrite_theme_paths};
|
use super::rewrite_theme_paths;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_rewrite_all_paths_of_theme() {
|
fn can_rewrite_all_paths_of_theme() {
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Hello
|
|
@ -6,6 +6,7 @@ compile_sass = true
|
||||||
highlight_code = true
|
highlight_code = true
|
||||||
insert_anchor_links = true
|
insert_anchor_links = true
|
||||||
highlight_theme = "kronuz"
|
highlight_theme = "kronuz"
|
||||||
|
build_search_index = true
|
||||||
|
|
||||||
[extra]
|
[extra]
|
||||||
author = "Vincent Prouillet"
|
author = "Vincent Prouillet"
|
||||||
|
|
|
@ -17,6 +17,7 @@ Content
|
||||||
- Internal links & deep linking
|
- Internal links & deep linking
|
||||||
- Table of contents
|
- Table of contents
|
||||||
- Syntax highlighting
|
- Syntax highlighting
|
||||||
|
- Sass
|
||||||
|
|
||||||
Templates
|
Templates
|
||||||
- Intro
|
- Intro
|
||||||
|
|
|
@ -54,6 +54,11 @@ weight = 0
|
||||||
# current one. This takes an array of path, not URLs.
|
# current one. This takes an array of path, not URLs.
|
||||||
aliases = []
|
aliases = []
|
||||||
|
|
||||||
|
# Whether the page should be in the search index. This is only used if
|
||||||
|
# `build_search_index` is set to true in the config and the parent section
|
||||||
|
# hasn't set `in_search_index` to false in its front-matter
|
||||||
|
in_search_index = true
|
||||||
|
|
||||||
# Template to use to render this page
|
# Template to use to render this page
|
||||||
template = "page.html"
|
template = "page.html"
|
||||||
|
|
||||||
|
@ -72,3 +77,7 @@ paragraph of each page in a list for example.
|
||||||
To do so, add `<!-- more -->` in your content at the point where you want the
|
To do so, add `<!-- more -->` in your content at the point where you want the
|
||||||
summary to end and the content up to that point will be also available separately
|
summary to end and the content up to that point will be also available separately
|
||||||
in the [template](./documentation/templates/pages-sections.md#page-variables).
|
in the [template](./documentation/templates/pages-sections.md#page-variables).
|
||||||
|
|
||||||
|
An anchor link to this position named `continue-reading` is created so you can link
|
||||||
|
directly to it if needed for example:
|
||||||
|
`<a href="{{ page.permalink }}#continue-reading">Continue Reading</a>`
|
||||||
|
|
42
docs/content/documentation/content/sass.md
Normal file
42
docs/content/documentation/content/sass.md
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
+++
|
||||||
|
title = "Sass"
|
||||||
|
weight = 110
|
||||||
|
+++
|
||||||
|
|
||||||
|
Sass is a popular CSS extension language that approaches some of the harder
|
||||||
|
parts of maintaining large sets of CSS rules. If you're curious about what Sass
|
||||||
|
is and why it might be useful for styling your static site, the following links
|
||||||
|
may be of interest:
|
||||||
|
|
||||||
|
* The [official Sass website](http://sass-lang.com/)
|
||||||
|
* [Why Sass?](https://alistapart.com/article/why-sass), by Dan Cederholm
|
||||||
|
|
||||||
|
## Using Sass in Gutenberg
|
||||||
|
|
||||||
|
Gutenberg processes any files with the `sass` or `scss` extensions in the `sass`
|
||||||
|
folder, and places the processed output into a `css` file with the same folder
|
||||||
|
structure and base name into the `public` folder:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
.
|
||||||
|
└── sass
|
||||||
|
├── style.scss // -> ./public/style.css
|
||||||
|
├── indented_style.sass // -> ./public/indented_style.css
|
||||||
|
├── _include.scss # This file won't get put into the `public` folder, but other files can @import it.
|
||||||
|
├── assets
|
||||||
|
│ ├── fancy.scss // -> ./public/assets/fancy.css
|
||||||
|
│ ├── same_name.scss // -> ./public/assets/same_name.css
|
||||||
|
│ ├── same_name.sass # CONFLICT! This has the same base name as the file above, so Gutenberg will return an error.
|
||||||
|
│ └── _common_mixins.scss # This file won't get put into the `public` folder, but other files can @import it.
|
||||||
|
└── secret-side-project
|
||||||
|
└── style.scss // -> ./public/secret-side-project/fancy.css
|
||||||
|
```
|
||||||
|
|
||||||
|
Files with a leading underscore in the name are not placed into the `public`
|
||||||
|
folder, but can still be used as `@import` dependencies. For more information, see the "Partials" section of
|
||||||
|
[Sass Basics](https://sass-lang.com/guide#partials).
|
||||||
|
|
||||||
|
Files with the `scss` extension use ["Sassy CSS" syntax](http://sass-lang.com/documentation/#Formatting),
|
||||||
|
while files with the `sass` extension use the ["indented" syntax](http://sass-lang.com/documentation/file.INDENTED_SYNTAX.html).
|
||||||
|
Gutenberg will return an error if a `scss` and `sass` file exist with the same
|
||||||
|
base name in the same folder to avoid confusion -- see the example above.
|
22
docs/content/documentation/content/search.md
Normal file
22
docs/content/documentation/content/search.md
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
+++
|
||||||
|
title = "Search"
|
||||||
|
weight = 100
|
||||||
|
+++
|
||||||
|
|
||||||
|
Gutenberg can build a search index from the sections and pages content to
|
||||||
|
be used by a JavaScript library: [elasticlunr](http://elasticlunr.com/).
|
||||||
|
|
||||||
|
To enable it, you only need to set `build_search_index = true` in your `config.toml` and Gutenberg will
|
||||||
|
generate an index for the `default_language` set for all pages not excluded from the search index.
|
||||||
|
|
||||||
|
It is very important to set the `default_language` in your `config.toml` if you are writing a site not in
|
||||||
|
English: the index building pipelines are very different depending on the language.
|
||||||
|
|
||||||
|
After `gutenberg build` or `gutenberg serve`, you should see two files in your static directory:
|
||||||
|
|
||||||
|
- `search_index.${default_language}.js`: so `search_index.en.js` for a default setup
|
||||||
|
- `elasticlunr.min.js`
|
||||||
|
|
||||||
|
As each site will be different, Gutenberg makes no assumptions about how your search and doesn't provide
|
||||||
|
the JavaScript/CSS code to do an actual search and display results. You can however look at how this very site
|
||||||
|
is implementing it to have an idea: [search.js](https://github.com/Keats/gutenberg/tree/master/docs/static/search.js).
|
|
@ -52,6 +52,10 @@ paginate_path = "page"
|
||||||
# Options are "left", "right" and "none"
|
# Options are "left", "right" and "none"
|
||||||
insert_anchor_links = "none"
|
insert_anchor_links = "none"
|
||||||
|
|
||||||
|
# Whether the section pages should be in the search index. This is only used if
|
||||||
|
# `build_search_index` is set to true in the config
|
||||||
|
in_search_index = true
|
||||||
|
|
||||||
# Whether to render that section homepage or not.
|
# Whether to render that section homepage or not.
|
||||||
# Useful when the section is only there to organize things but is not meant
|
# Useful when the section is only there to organize things but is not meant
|
||||||
# to be used directly
|
# to be used directly
|
||||||
|
@ -87,6 +91,9 @@ is enabled by setting the `sort_by` front-matter variable.
|
||||||
Any page that cannot be sorted, for example if missing the date variable while sorting by `date`, will be ignored and
|
Any page that cannot be sorted, for example if missing the date variable while sorting by `date`, will be ignored and
|
||||||
won't be rendered. The terminal will warn you if this is happening.
|
won't be rendered. The terminal will warn you if this is happening.
|
||||||
|
|
||||||
|
If several pages have the same date/weight/order, their permalink will be used to break the tie following
|
||||||
|
an alphabetical order.
|
||||||
|
|
||||||
### `date`
|
### `date`
|
||||||
This will sort all pages by their `date` field, from the most recent to the oldest.
|
This will sort all pages by their `date` field, from the most recent to the oldest.
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ the command help by running `gutenberg <cmd> --help`.
|
||||||
Creates the directory structure used by Gutenberg at the given directory.
|
Creates the directory structure used by Gutenberg at the given directory.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ gutenberg init <my_site>
|
$ gutenberg init my_site
|
||||||
```
|
```
|
||||||
|
|
||||||
will create a new folder named `my_site` and the files/folders needed by
|
will create a new folder named `my_site` and the files/folders needed by
|
||||||
|
@ -37,10 +37,17 @@ This is useful for example when you want to deploy previews of a site to a dynam
|
||||||
deploy previews.
|
deploy previews.
|
||||||
|
|
||||||
+You can override the default output directory 'public' by passing a other value to the `output-dir` flag.
|
+You can override the default output directory 'public' by passing a other value to the `output-dir` flag.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
$ gutenberg build --output-dir $DOCUMENT_ROOT
|
$ gutenberg build --output-dir $DOCUMENT_ROOT
|
||||||
```
|
```
|
||||||
|
|
||||||
|
You can also point to another config file than `config.toml` like so - the position of the `config` option is important:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ gutenberg --config config.staging.toml build
|
||||||
|
```
|
||||||
|
|
||||||
## serve
|
## serve
|
||||||
|
|
||||||
This will build and serve the site using a local server. You can also specify
|
This will build and serve the site using a local server. You can also specify
|
||||||
|
@ -63,3 +70,10 @@ hard refresh if possible.
|
||||||
|
|
||||||
Gutenberg does a best-effort to live reload but some changes cannot be handled automatically. If you
|
Gutenberg does a best-effort to live reload but some changes cannot be handled automatically. If you
|
||||||
fail to see your change, you will need to restart `gutenberg serve`.
|
fail to see your change, you will need to restart `gutenberg serve`.
|
||||||
|
|
||||||
|
|
||||||
|
You can also point to another config file than `config.toml` like so - the position of the `config` option is important:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ gutenberg --config config.staging.toml serve
|
||||||
|
```
|
||||||
|
|
|
@ -51,6 +51,10 @@ generate_categories_pages = false
|
||||||
# Whether to compile the Sass files found in the `sass` directory
|
# Whether to compile the Sass files found in the `sass` directory
|
||||||
compile_sass = false
|
compile_sass = false
|
||||||
|
|
||||||
|
# Whether to build a search index out of the pages and section
|
||||||
|
# content for the `default_language`
|
||||||
|
build_search_index = false
|
||||||
|
|
||||||
# A list of glob patterns specifying asset files to ignore when
|
# A list of glob patterns specifying asset files to ignore when
|
||||||
# processing the content directory.
|
# processing the content directory.
|
||||||
# Defaults to none, which means all asset files are copied over to the public folder.
|
# Defaults to none, which means all asset files are copied over to the public folder.
|
||||||
|
|
|
@ -33,6 +33,8 @@ To learn more, read [the content overview](./documentation/content/overview.md).
|
||||||
|
|
||||||
## `sass`
|
## `sass`
|
||||||
Contains the [Sass](http://sass-lang.com) files to be compiled. Non-Sass files will be ignored.
|
Contains the [Sass](http://sass-lang.com) files to be compiled. Non-Sass files will be ignored.
|
||||||
|
The directory structure of the `sass` folder will be preserved when copying over the compiled files: a file at
|
||||||
|
`sass/something/site.scss` will be compiled to `public/something/site.css`.
|
||||||
|
|
||||||
## `static`
|
## `static`
|
||||||
Contains any kind of files. All the files/folders in the `static` folder will be copied as-is in the output directory.
|
Contains any kind of files. All the files/folders in the `static` folder will be copied as-is in the output directory.
|
||||||
|
|
|
@ -21,3 +21,5 @@ all the variables above are arrays of `SitemapEntry` with the following type:
|
||||||
permalink: String;
|
permalink: String;
|
||||||
date: String?;
|
date: String?;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
All `SitemapEntry` are sorted in each variable by their permalink.
|
||||||
|
|
47
docs/sass/_search.scss
Normal file
47
docs/sass/_search.scss
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
.search-container {
|
||||||
|
display: inline-block;
|
||||||
|
position: relative;
|
||||||
|
width: 300px;
|
||||||
|
|
||||||
|
input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 0.5rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-results {
|
||||||
|
display: none;
|
||||||
|
position: absolute;
|
||||||
|
background: white;
|
||||||
|
color: black;
|
||||||
|
padding: 1rem;
|
||||||
|
box-shadow: 2px 2px 2px 0 rgba(0, 0, 0, 0.5);
|
||||||
|
max-height: 500px;
|
||||||
|
overflow: auto;
|
||||||
|
width: 150%;
|
||||||
|
right: 0;
|
||||||
|
|
||||||
|
&__items {
|
||||||
|
list-style: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
li {
|
||||||
|
margin-top: 1rem;
|
||||||
|
border-bottom: 1px solid #ccc;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
|
||||||
|
&:first-of-type {
|
||||||
|
margin-top: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__item {
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
|
||||||
|
a {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
display: inline-block;
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -16,3 +16,4 @@ $link-color: #007CBC;
|
||||||
@import "index";
|
@import "index";
|
||||||
@import "docs";
|
@import "docs";
|
||||||
@import "themes";
|
@import "themes";
|
||||||
|
@import "search";
|
||||||
|
|
180
docs/static/search.js
vendored
Normal file
180
docs/static/search.js
vendored
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
function debounce(func, wait) {
|
||||||
|
var timeout;
|
||||||
|
|
||||||
|
return function () {
|
||||||
|
var context = this;
|
||||||
|
var args = arguments;
|
||||||
|
clearTimeout(timeout);
|
||||||
|
|
||||||
|
timeout = setTimeout(function () {
|
||||||
|
timeout = null;
|
||||||
|
func.apply(context, args);
|
||||||
|
}, wait);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Taken from mdbook
|
||||||
|
// The strategy is as follows:
|
||||||
|
// First, assign a value to each word in the document:
|
||||||
|
// Words that correspond to search terms (stemmer aware): 40
|
||||||
|
// Normal words: 2
|
||||||
|
// First word in a sentence: 8
|
||||||
|
// Then use a sliding window with a constant number of words and count the
|
||||||
|
// sum of the values of the words within the window. Then use the window that got the
|
||||||
|
// maximum sum. If there are multiple maximas, then get the last one.
|
||||||
|
// Enclose the terms in <b>.
|
||||||
|
function makeTeaser(body, terms) {
|
||||||
|
var TERM_WEIGHT = 40;
|
||||||
|
var NORMAL_WORD_WEIGHT = 2;
|
||||||
|
var FIRST_WORD_WEIGHT = 8;
|
||||||
|
var TEASER_MAX_WORDS = 30;
|
||||||
|
|
||||||
|
var stemmedTerms = terms.map(function (w) {
|
||||||
|
return elasticlunr.stemmer(w.toLowerCase());
|
||||||
|
});
|
||||||
|
var termFound = false;
|
||||||
|
var index = 0;
|
||||||
|
var weighted = []; // contains elements of ["word", weight, index_in_document]
|
||||||
|
|
||||||
|
// split in sentences, then words
|
||||||
|
var sentences = body.toLowerCase().split(". ");
|
||||||
|
|
||||||
|
for (var i in sentences) {
|
||||||
|
var words = sentences[i].split(" ");
|
||||||
|
var value = FIRST_WORD_WEIGHT;
|
||||||
|
|
||||||
|
for (var j in words) {
|
||||||
|
var word = words[j];
|
||||||
|
|
||||||
|
if (word.length > 0) {
|
||||||
|
for (var k in stemmedTerms) {
|
||||||
|
if (elasticlunr.stemmer(word).startsWith(stemmedTerms[k])) {
|
||||||
|
value = TERM_WEIGHT;
|
||||||
|
termFound = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
weighted.push([word, value, index]);
|
||||||
|
value = NORMAL_WORD_WEIGHT;
|
||||||
|
}
|
||||||
|
|
||||||
|
index += word.length;
|
||||||
|
index += 1; // ' ' or '.' if last word in sentence
|
||||||
|
}
|
||||||
|
|
||||||
|
index += 1; // because we split at a two-char boundary '. '
|
||||||
|
}
|
||||||
|
|
||||||
|
if (weighted.length === 0) {
|
||||||
|
return body;
|
||||||
|
}
|
||||||
|
|
||||||
|
var windowWeights = [];
|
||||||
|
var windowSize = Math.min(weighted.length, TEASER_MAX_WORDS);
|
||||||
|
// We add a window with all the weights first
|
||||||
|
var curSum = 0;
|
||||||
|
for (var i = 0; i < windowSize; i++) {
|
||||||
|
curSum += weighted[i][1];
|
||||||
|
}
|
||||||
|
windowWeights.push(curSum);
|
||||||
|
|
||||||
|
for (var i = 0; i < weighted.length - windowSize; i++) {
|
||||||
|
curSum -= weighted[i][1];
|
||||||
|
curSum += weighted[i + windowSize][1];
|
||||||
|
windowWeights.push(curSum);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we didn't find the term, just pick the first window
|
||||||
|
var maxSumIndex = 0;
|
||||||
|
if (termFound) {
|
||||||
|
var maxFound = 0;
|
||||||
|
// backwards
|
||||||
|
for (var i = windowWeights.length - 1; i >= 0; i--) {
|
||||||
|
if (windowWeights[i] > maxFound) {
|
||||||
|
maxFound = windowWeights[i];
|
||||||
|
maxSumIndex = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var teaser = [];
|
||||||
|
var startIndex = weighted[maxSumIndex][2];
|
||||||
|
for (var i = maxSumIndex; i < maxSumIndex + windowSize; i++) {
|
||||||
|
var word = weighted[i];
|
||||||
|
if (startIndex < word[2]) {
|
||||||
|
// missing text from index to start of `word`
|
||||||
|
teaser.push(body.substring(startIndex, word[2]));
|
||||||
|
startIndex = word[2];
|
||||||
|
}
|
||||||
|
|
||||||
|
// add <em/> around search terms
|
||||||
|
if (word[1] === TERM_WEIGHT) {
|
||||||
|
teaser.push("<b>");
|
||||||
|
}
|
||||||
|
startIndex = word[2] + word[0].length;
|
||||||
|
teaser.push(body.substring(word[2], startIndex));
|
||||||
|
|
||||||
|
if (word[1] === TERM_WEIGHT) {
|
||||||
|
teaser.push("</b>");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
teaser.push("…");
|
||||||
|
return teaser.join("");
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatSearchResultItem(item, terms) {
|
||||||
|
return '<div class="search-results__item">'
|
||||||
|
+ `<a href="${item.ref}">${item.doc.title}</a>`
|
||||||
|
+ `<div>${makeTeaser(item.doc.body, terms)}</div>`
|
||||||
|
+ '</div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
function initSearch() {
|
||||||
|
var $searchInput = document.getElementById("search");
|
||||||
|
var $searchResults = document.querySelector(".search-results");
|
||||||
|
var $searchResultsItems = document.querySelector(".search-results__items");
|
||||||
|
var MAX_ITEMS = 10;
|
||||||
|
|
||||||
|
var options = {
|
||||||
|
bool: "AND",
|
||||||
|
fields: {
|
||||||
|
title: {boost: 2},
|
||||||
|
body: {boost: 1},
|
||||||
|
}
|
||||||
|
};
|
||||||
|
var currentTerm = "";
|
||||||
|
var index = elasticlunr.Index.load(window.searchIndex);
|
||||||
|
|
||||||
|
$searchInput.addEventListener("keyup", debounce(function() {
|
||||||
|
var term = $searchInput.value.trim();
|
||||||
|
if (term === currentTerm || !index) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
$searchResults.style.display = term === "" ? "none" : "block";
|
||||||
|
$searchResultsItems.innerHTML = "";
|
||||||
|
if (term === "") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var results = index.search(term, options);
|
||||||
|
if (results.length === 0) {
|
||||||
|
$searchResults.style.display = "none";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentTerm = term;
|
||||||
|
for (var i = 0; i < Math.min(results.length, MAX_ITEMS); i++) {
|
||||||
|
var item = document.createElement("li");
|
||||||
|
item.innerHTML = formatSearchResultItem(results[i], term.split(" "));
|
||||||
|
$searchResultsItems.appendChild(item);
|
||||||
|
}
|
||||||
|
}, 150));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (document.readyState === "complete" ||
|
||||||
|
(document.readyState !== "loading" && !document.documentElement.doScroll)
|
||||||
|
) {
|
||||||
|
initSearch();
|
||||||
|
} else {
|
||||||
|
document.addEventListener("DOMContentLoaded", initSearch);
|
||||||
|
}
|
12
docs/templates/index.html
vendored
12
docs/templates/index.html
vendored
|
@ -18,6 +18,14 @@
|
||||||
<a class="white" href="{{ get_url(path="./documentation/_index.md") }}" class="nav-link">Docs</a>
|
<a class="white" href="{{ get_url(path="./documentation/_index.md") }}" class="nav-link">Docs</a>
|
||||||
<a class="white" href="{{ get_url(path="./themes/_index.md") }}" class="nav-link">Themes</a>
|
<a class="white" href="{{ get_url(path="./themes/_index.md") }}" class="nav-link">Themes</a>
|
||||||
<a class="white" href="https://github.com/Keats/gutenberg" class="nav-link">GitHub</a>
|
<a class="white" href="https://github.com/Keats/gutenberg" class="nav-link">GitHub</a>
|
||||||
|
|
||||||
|
<div class="search-container">
|
||||||
|
<input id="search" type="search" placeholder="🔎 Search the docs">
|
||||||
|
|
||||||
|
<div class="search-results">
|
||||||
|
<div class="search-results__items"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</nav>
|
</nav>
|
||||||
</header>
|
</header>
|
||||||
|
|
||||||
|
@ -93,5 +101,9 @@
|
||||||
<footer>
|
<footer>
|
||||||
©2017-2018 — <a class="white" href="https://vincent.is">Vincent Prouillet</a> and <a class="white" href="https://github.com/Keats/gutenberg/graphs/contributors">contributors</a>
|
©2017-2018 — <a class="white" href="https://vincent.is">Vincent Prouillet</a> and <a class="white" href="https://github.com/Keats/gutenberg/graphs/contributors">contributors</a>
|
||||||
</footer>
|
</footer>
|
||||||
|
|
||||||
|
<script type="text/javascript" src="{{ get_url(path="elasticlunr.min.js", trailing_slash=false) }}"></script>
|
||||||
|
<script type="text/javascript" src="{{ get_url(path="search_index.en.js", trailing_slash=false) }}"></script>
|
||||||
|
<script type="text/javascript" src="{{ get_url(path="search.js", trailing_slash=false) }}"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -10,6 +10,8 @@ pub fn build_cli() -> App<'static, 'static> {
|
||||||
Arg::with_name("config")
|
Arg::with_name("config")
|
||||||
.short("c")
|
.short("c")
|
||||||
.long("config")
|
.long("config")
|
||||||
|
.default_value("config.toml")
|
||||||
|
.takes_value(true)
|
||||||
.help("Path to a config file other than config.toml")
|
.help("Path to a config file other than config.toml")
|
||||||
)
|
)
|
||||||
.subcommands(vec![
|
.subcommands(vec![
|
||||||
|
|
|
@ -19,6 +19,9 @@ compile_sass = %COMPILE_SASS%
|
||||||
# Theme can be customised by setting the `highlight_theme` variable to a theme supported by Gutenberg
|
# Theme can be customised by setting the `highlight_theme` variable to a theme supported by Gutenberg
|
||||||
highlight_code = %HIGHLIGHT%
|
highlight_code = %HIGHLIGHT%
|
||||||
|
|
||||||
|
# Whether to build a search index to be used later on by a JavaScript library
|
||||||
|
build_search_index = %SEARCH%
|
||||||
|
|
||||||
[extra]
|
[extra]
|
||||||
# Put all your custom variables here
|
# Put all your custom variables here
|
||||||
"#;
|
"#;
|
||||||
|
@ -37,11 +40,13 @@ pub fn create_new_project(name: &str) -> Result<()> {
|
||||||
let base_url = ask_url("> What is the URL of your site?", "https://example.com")?;
|
let base_url = ask_url("> What is the URL of your site?", "https://example.com")?;
|
||||||
let compile_sass = ask_bool("> Do you want to enable Sass compilation?", true)?;
|
let compile_sass = ask_bool("> Do you want to enable Sass compilation?", true)?;
|
||||||
let highlight = ask_bool("> Do you want to enable syntax highlighting?", false)?;
|
let highlight = ask_bool("> Do you want to enable syntax highlighting?", false)?;
|
||||||
|
let search = ask_bool("> Do you want to build a search index of the content?", false)?;
|
||||||
|
|
||||||
let config = CONFIG
|
let config = CONFIG
|
||||||
.trim_left()
|
.trim_left()
|
||||||
.replace("%BASE_URL%", &base_url)
|
.replace("%BASE_URL%", &base_url)
|
||||||
.replace("%COMPILE_SASS%", &format!("{}", compile_sass))
|
.replace("%COMPILE_SASS%", &format!("{}", compile_sass))
|
||||||
|
.replace("%SEARCH%", &format!("{}", search))
|
||||||
.replace("%HIGHLIGHT%", &format!("{}", highlight));
|
.replace("%HIGHLIGHT%", &format!("{}", highlight));
|
||||||
|
|
||||||
create_file(&path.join("config.toml"), &config)?;
|
create_file(&path.join("config.toml"), &config)?;
|
||||||
|
|
|
@ -38,6 +38,7 @@ use ctrlc;
|
||||||
|
|
||||||
use site::Site;
|
use site::Site;
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
|
use utils::fs::copy_file;
|
||||||
|
|
||||||
use console;
|
use console;
|
||||||
use rebuild;
|
use rebuild;
|
||||||
|
@ -163,7 +164,7 @@ pub fn serve(interface: &str, port: &str, output_dir: &str, base_url: &str, conf
|
||||||
if watching_static {
|
if watching_static {
|
||||||
watchers.push("static");
|
watchers.push("static");
|
||||||
}
|
}
|
||||||
if site.config.compile_sass.unwrap() {
|
if site.config.compile_sass {
|
||||||
watchers.push("sass");
|
watchers.push("sass");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,7 +208,7 @@ pub fn serve(interface: &str, port: &str, output_dir: &str, base_url: &str, conf
|
||||||
(ChangeKind::StaticFiles, p) => {
|
(ChangeKind::StaticFiles, p) => {
|
||||||
if path.is_file() {
|
if path.is_file() {
|
||||||
console::info(&format!("-> Static file changes detected {}", path.display()));
|
console::info(&format!("-> Static file changes detected {}", path.display()));
|
||||||
rebuild_done_handling(&broadcaster, site.copy_static_file(&path, &site.static_path), &p);
|
rebuild_done_handling(&broadcaster, copy_file(&path, &site.output_path, &site.static_path), &p);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
(ChangeKind::Sass, p) => {
|
(ChangeKind::Sass, p) => {
|
||||||
|
|
|
@ -29,7 +29,7 @@ mod prompt;
|
||||||
fn main() {
|
fn main() {
|
||||||
let matches = cli::build_cli().get_matches();
|
let matches = cli::build_cli().get_matches();
|
||||||
|
|
||||||
let config_file = matches.value_of("config").unwrap_or("config.toml");
|
let config_file = matches.value_of("config").unwrap();
|
||||||
|
|
||||||
match matches.subcommand() {
|
match matches.subcommand() {
|
||||||
("init", Some(matches)) => {
|
("init", Some(matches)) => {
|
||||||
|
|
|
@ -4,3 +4,7 @@ draft = true
|
||||||
date = 2016-03-01
|
date = 2016-03-01
|
||||||
+++
|
+++
|
||||||
|
|
||||||
|
|
||||||
|
{{ theme_shortcode() }}
|
||||||
|
|
||||||
|
Link to [root](./hello.md).
|
||||||
|
|
|
@ -11,3 +11,4 @@ A simple page with a slug defined
|
||||||
# Title
|
# Title
|
||||||
|
|
||||||
Hey
|
Hey
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,8 @@ date = 2017-03-01
|
||||||
|
|
||||||
Same filename but different path
|
Same filename but different path
|
||||||
|
|
||||||
|
<!-- more -->
|
||||||
|
|
||||||
{{ basic() }}
|
{{ basic() }}
|
||||||
|
|
||||||
{{ pirate(name="Bob") }}
|
{{ pirate(name="Bob") }}
|
||||||
|
|
0
test_site/sass/nested_sass/sass.sass
Normal file
0
test_site/sass/nested_sass/sass.sass
Normal file
0
test_site/sass/nested_sass/scss.scss
Normal file
0
test_site/sass/nested_sass/scss.scss
Normal file
0
test_site/sass/sass.sass
Normal file
0
test_site/sass/sass.sass
Normal file
0
test_site/sass/scss.scss
Normal file
0
test_site/sass/scss.scss
Normal file
|
@ -0,0 +1 @@
|
||||||
|
THEME_SHORTCODE
|
Loading…
Reference in a new issue