Merge pull request #369 from remexre/feature/runtime-syntax-loading
Runtime syntax loading
This commit is contained in:
commit
ce813fab1b
|
@ -3,23 +3,22 @@ extern crate serde_derive;
|
||||||
extern crate toml;
|
extern crate toml;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate errors;
|
extern crate errors;
|
||||||
extern crate highlighting;
|
|
||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
extern crate globset;
|
extern crate globset;
|
||||||
|
extern crate highlighting;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::prelude::*;
|
use std::io::prelude::*;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use toml::Value as Toml;
|
|
||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||||
|
use toml::Value as Toml;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use highlighting::THEME_SET;
|
use highlighting::THEME_SET;
|
||||||
|
|
||||||
|
|
||||||
mod theme;
|
mod theme;
|
||||||
|
|
||||||
use theme::Theme;
|
use theme::Theme;
|
||||||
|
@ -27,7 +26,6 @@ use theme::Theme;
|
||||||
// We want a default base url for tests
|
// We want a default base url for tests
|
||||||
static DEFAULT_BASE_URL: &'static str = "http://a-website.com";
|
static DEFAULT_BASE_URL: &'static str = "http://a-website.com";
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub struct Taxonomy {
|
pub struct Taxonomy {
|
||||||
|
@ -107,6 +105,9 @@ pub struct Config {
|
||||||
/// Whether to check all external links for validity
|
/// Whether to check all external links for validity
|
||||||
pub check_external_links: bool,
|
pub check_external_links: bool,
|
||||||
|
|
||||||
|
/// A list of directories to search for additional `.sublime-syntax` files in.
|
||||||
|
pub extra_syntaxes: Vec<String>,
|
||||||
|
|
||||||
/// All user params set in [extra] in the config
|
/// All user params set in [extra] in the config
|
||||||
pub extra: HashMap<String, Toml>,
|
pub extra: HashMap<String, Toml>,
|
||||||
|
|
||||||
|
@ -114,14 +115,13 @@ pub struct Config {
|
||||||
pub build_timestamp: Option<i64>,
|
pub build_timestamp: Option<i64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
/// Parses a string containing TOML to our Config struct
|
/// Parses a string containing TOML to our Config struct
|
||||||
/// Any extra parameter will end up in the extra field
|
/// Any extra parameter will end up in the extra field
|
||||||
pub fn parse(content: &str) -> Result<Config> {
|
pub fn parse(content: &str) -> Result<Config> {
|
||||||
let mut config: Config = match toml::from_str(content) {
|
let mut config: Config = match toml::from_str(content) {
|
||||||
Ok(c) => c,
|
Ok(c) => c,
|
||||||
Err(e) => bail!(e)
|
Err(e) => bail!(e),
|
||||||
};
|
};
|
||||||
|
|
||||||
if config.base_url.is_empty() || config.base_url == DEFAULT_BASE_URL {
|
if config.base_url.is_empty() || config.base_url == DEFAULT_BASE_URL {
|
||||||
|
@ -134,7 +134,6 @@ impl Config {
|
||||||
|
|
||||||
config.build_timestamp = Some(Utc::now().timestamp());
|
config.build_timestamp = Some(Utc::now().timestamp());
|
||||||
|
|
||||||
|
|
||||||
if !config.ignored_content.is_empty() {
|
if !config.ignored_content.is_empty() {
|
||||||
// Convert the file glob strings into a compiled glob set matcher. We want to do this once,
|
// Convert the file glob strings into a compiled glob set matcher. We want to do this once,
|
||||||
// at program initialization, rather than for every page, for example. We arrange for the
|
// at program initialization, rather than for every page, for example. We arrange for the
|
||||||
|
@ -145,11 +144,19 @@ impl Config {
|
||||||
for pat in &config.ignored_content {
|
for pat in &config.ignored_content {
|
||||||
let glob = match Glob::new(pat) {
|
let glob = match Glob::new(pat) {
|
||||||
Ok(g) => g,
|
Ok(g) => g,
|
||||||
Err(e) => bail!("Invalid ignored_content glob pattern: {}, error = {}", pat, e)
|
Err(e) => bail!(
|
||||||
|
"Invalid ignored_content glob pattern: {}, error = {}",
|
||||||
|
pat,
|
||||||
|
e
|
||||||
|
),
|
||||||
};
|
};
|
||||||
glob_set_builder.add(glob);
|
glob_set_builder.add(glob);
|
||||||
}
|
}
|
||||||
config.ignored_content_globset = Some(glob_set_builder.build().expect("Bad ignored_content in config file."));
|
config.ignored_content_globset = Some(
|
||||||
|
glob_set_builder
|
||||||
|
.build()
|
||||||
|
.expect("Bad ignored_content in config file."),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(config)
|
Ok(config)
|
||||||
|
@ -161,7 +168,12 @@ impl Config {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
let file_name = path.file_name().unwrap();
|
let file_name = path.file_name().unwrap();
|
||||||
File::open(path)
|
File::open(path)
|
||||||
.chain_err(|| format!("No `{:?}` file found. Are you in the right directory?", file_name))?
|
.chain_err(|| {
|
||||||
|
format!(
|
||||||
|
"No `{:?}` file found. Are you in the right directory?",
|
||||||
|
file_name
|
||||||
|
)
|
||||||
|
})?
|
||||||
.read_to_string(&mut content)?;
|
.read_to_string(&mut content)?;
|
||||||
|
|
||||||
Config::parse(&content)
|
Config::parse(&content)
|
||||||
|
@ -169,7 +181,11 @@ impl Config {
|
||||||
|
|
||||||
/// Makes a url, taking into account that the base url might have a trailing slash
|
/// Makes a url, taking into account that the base url might have a trailing slash
|
||||||
pub fn make_permalink(&self, path: &str) -> String {
|
pub fn make_permalink(&self, path: &str) -> String {
|
||||||
let trailing_bit = if path.ends_with('/') || path.is_empty() { "" } else { "/" };
|
let trailing_bit = if path.ends_with('/') || path.is_empty() {
|
||||||
|
""
|
||||||
|
} else {
|
||||||
|
"/"
|
||||||
|
};
|
||||||
|
|
||||||
// Index section with a base url that has a trailing slash
|
// Index section with a base url that has a trailing slash
|
||||||
if self.base_url.ends_with('/') && path == "/" {
|
if self.base_url.ends_with('/') && path == "/" {
|
||||||
|
@ -195,12 +211,16 @@ impl Config {
|
||||||
let original = self.extra.clone();
|
let original = self.extra.clone();
|
||||||
// 2. inject theme extra values
|
// 2. inject theme extra values
|
||||||
for (key, val) in &theme.extra {
|
for (key, val) in &theme.extra {
|
||||||
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
self.extra
|
||||||
|
.entry(key.to_string())
|
||||||
|
.or_insert_with(|| val.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. overwrite with original config
|
// 3. overwrite with original config
|
||||||
for (key, val) in &original {
|
for (key, val) in &original {
|
||||||
self.extra.entry(key.to_string()).or_insert_with(|| val.clone());
|
self.extra
|
||||||
|
.entry(key.to_string())
|
||||||
|
.or_insert_with(|| val.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -233,13 +253,13 @@ impl Default for Config {
|
||||||
ignored_content: Vec::new(),
|
ignored_content: Vec::new(),
|
||||||
ignored_content_globset: None,
|
ignored_content_globset: None,
|
||||||
translations: HashMap::new(),
|
translations: HashMap::new(),
|
||||||
|
extra_syntaxes: Vec::new(),
|
||||||
extra: HashMap::new(),
|
extra: HashMap::new(),
|
||||||
build_timestamp: Some(1),
|
build_timestamp: Some(1),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Get and parse the config.
|
/// Get and parse the config.
|
||||||
/// If it doesn't succeed, exit
|
/// If it doesn't succeed, exit
|
||||||
pub fn get_config(path: &Path, filename: &str) -> Config {
|
pub fn get_config(path: &Path, filename: &str) -> Config {
|
||||||
|
@ -253,7 +273,6 @@ pub fn get_config(path: &Path, filename: &str) -> Config {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::{Config, Theme};
|
use super::{Config, Theme};
|
||||||
|
@ -303,7 +322,16 @@ hello = "world"
|
||||||
|
|
||||||
let config = Config::parse(config);
|
let config = Config::parse(config);
|
||||||
assert!(config.is_ok());
|
assert!(config.is_ok());
|
||||||
assert_eq!(config.unwrap().extra.get("hello").unwrap().as_str().unwrap(), "world");
|
assert_eq!(
|
||||||
|
config
|
||||||
|
.unwrap()
|
||||||
|
.extra
|
||||||
|
.get("hello")
|
||||||
|
.unwrap()
|
||||||
|
.as_str()
|
||||||
|
.unwrap(),
|
||||||
|
"world"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -313,7 +341,6 @@ hello = "world"
|
||||||
assert_eq!(config.make_permalink(""), "http://vincent.is/");
|
assert_eq!(config.make_permalink(""), "http://vincent.is/");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn can_make_url_index_page_with_railing_slash_url() {
|
fn can_make_url_index_page_with_railing_slash_url() {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
|
@ -339,7 +366,10 @@ hello = "world"
|
||||||
fn can_make_url_with_localhost() {
|
fn can_make_url_with_localhost() {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.base_url = "http://127.0.0.1:1111".to_string();
|
config.base_url = "http://127.0.0.1:1111".to_string();
|
||||||
assert_eq!(config.make_permalink("/tags/rust"), "http://127.0.0.1:1111/tags/rust/");
|
assert_eq!(
|
||||||
|
config.make_permalink("/tags/rust"),
|
||||||
|
"http://127.0.0.1:1111/tags/rust/"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -166,12 +166,13 @@ impl Page {
|
||||||
|
|
||||||
/// We need access to all pages url to render links relative to content
|
/// We need access to all pages url to render links relative to content
|
||||||
/// so that can't happen at the same time as parsing
|
/// so that can't happen at the same time as parsing
|
||||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, anchor_insert: InsertAnchor) -> Result<()> {
|
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, base_path: &Path, anchor_insert: InsertAnchor) -> Result<()> {
|
||||||
let mut context = RenderContext::new(
|
let mut context = RenderContext::new(
|
||||||
tera,
|
tera,
|
||||||
config,
|
config,
|
||||||
&self.permalink,
|
&self.permalink,
|
||||||
permalinks,
|
permalinks,
|
||||||
|
base_path,
|
||||||
anchor_insert,
|
anchor_insert,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -123,12 +123,13 @@ impl Section {
|
||||||
|
|
||||||
/// We need access to all pages url to render links relative to content
|
/// We need access to all pages url to render links relative to content
|
||||||
/// so that can't happen at the same time as parsing
|
/// so that can't happen at the same time as parsing
|
||||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config) -> Result<()> {
|
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config, base_path: &Path) -> Result<()> {
|
||||||
let mut context = RenderContext::new(
|
let mut context = RenderContext::new(
|
||||||
tera,
|
tera,
|
||||||
config,
|
config,
|
||||||
&self.permalink,
|
&self.permalink,
|
||||||
permalinks,
|
permalinks,
|
||||||
|
base_path,
|
||||||
self.meta.insert_anchor_links,
|
self.meta.insert_anchor_links,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -2,16 +2,21 @@
|
||||||
extern crate lazy_static;
|
extern crate lazy_static;
|
||||||
extern crate syntect;
|
extern crate syntect;
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use syntect::LoadingError;
|
||||||
use syntect::dumps::from_binary;
|
use syntect::dumps::from_binary;
|
||||||
use syntect::parsing::SyntaxSet;
|
use syntect::parsing::SyntaxSet;
|
||||||
use syntect::highlighting::{ThemeSet, Theme};
|
use syntect::highlighting::{ThemeSet, Theme};
|
||||||
use syntect::easy::HighlightLines;
|
use syntect::easy::HighlightLines;
|
||||||
|
|
||||||
thread_local! {
|
thread_local! {
|
||||||
pub static SYNTAX_SET: SyntaxSet = {
|
/// A pair of the set and whether extras have been added to it.
|
||||||
|
pub static SYNTAX_SET: RefCell<(SyntaxSet, bool)> = {
|
||||||
let mut ss: SyntaxSet = from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump"));
|
let mut ss: SyntaxSet = from_binary(include_bytes!("../../../sublime_syntaxes/newlines.packdump"));
|
||||||
ss.link_syntaxes();
|
ss.link_syntaxes();
|
||||||
ss
|
RefCell::new((ss, false))
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -19,14 +24,22 @@ lazy_static! {
|
||||||
pub static ref THEME_SET: ThemeSet = from_binary(include_bytes!("../../../sublime_themes/all.themedump"));
|
pub static ref THEME_SET: ThemeSet = from_binary(include_bytes!("../../../sublime_themes/all.themedump"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_highlighter<'a>(theme: &'a Theme, info: &str, base_path: &Path, extra_syntaxes: &[String]) -> Result<HighlightLines<'a>, LoadingError> {
|
||||||
|
SYNTAX_SET.with(|rc| {
|
||||||
|
let (ss, extras_added) = &mut *rc.borrow_mut();
|
||||||
|
if !*extras_added {
|
||||||
|
for dir in extra_syntaxes {
|
||||||
|
ss.load_syntaxes(base_path.join(dir), true)?;
|
||||||
|
}
|
||||||
|
ss.link_syntaxes();
|
||||||
|
*extras_added = true;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_highlighter<'a>(theme: &'a Theme, info: &str) -> HighlightLines<'a> {
|
|
||||||
SYNTAX_SET.with(|ss| {
|
|
||||||
let syntax = info
|
let syntax = info
|
||||||
.split(' ')
|
.split(' ')
|
||||||
.next()
|
.next()
|
||||||
.and_then(|lang| ss.find_syntax_by_token(lang))
|
.and_then(|lang| ss.find_syntax_by_token(lang))
|
||||||
.unwrap_or_else(|| ss.find_syntax_plain_text());
|
.unwrap_or_else(|| ss.find_syntax_plain_text());
|
||||||
HighlightLines::new(syntax, theme)
|
Ok(HighlightLines::new(syntax, theme))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use tera::{Tera, Context};
|
use tera::{Tera, Context};
|
||||||
use front_matter::InsertAnchor;
|
use front_matter::InsertAnchor;
|
||||||
|
@ -13,6 +14,7 @@ pub struct RenderContext<'a> {
|
||||||
pub tera_context: Context,
|
pub tera_context: Context,
|
||||||
pub current_page_permalink: &'a str,
|
pub current_page_permalink: &'a str,
|
||||||
pub permalinks: &'a HashMap<String, String>,
|
pub permalinks: &'a HashMap<String, String>,
|
||||||
|
pub base_path: &'a Path,
|
||||||
pub insert_anchor: InsertAnchor,
|
pub insert_anchor: InsertAnchor,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,6 +24,7 @@ impl<'a> RenderContext<'a> {
|
||||||
config: &'a Config,
|
config: &'a Config,
|
||||||
current_page_permalink: &'a str,
|
current_page_permalink: &'a str,
|
||||||
permalinks: &'a HashMap<String, String>,
|
permalinks: &'a HashMap<String, String>,
|
||||||
|
base_path: &'a Path,
|
||||||
insert_anchor: InsertAnchor,
|
insert_anchor: InsertAnchor,
|
||||||
) -> RenderContext<'a> {
|
) -> RenderContext<'a> {
|
||||||
let mut tera_context = Context::new();
|
let mut tera_context = Context::new();
|
||||||
|
@ -32,6 +35,7 @@ impl<'a> RenderContext<'a> {
|
||||||
current_page_permalink,
|
current_page_permalink,
|
||||||
permalinks,
|
permalinks,
|
||||||
insert_anchor,
|
insert_anchor,
|
||||||
|
base_path,
|
||||||
config,
|
config,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,13 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<(Strin
|
||||||
}
|
}
|
||||||
|
|
||||||
let theme = &THEME_SET.themes[&context.config.highlight_theme];
|
let theme = &THEME_SET.themes[&context.config.highlight_theme];
|
||||||
highlighter = Some(get_highlighter(&theme, info));
|
match get_highlighter(&theme, info, context.base_path, &context.config.extra_syntaxes) {
|
||||||
|
Ok(h) => highlighter = Some(h),
|
||||||
|
Err(err) => {
|
||||||
|
error = Some(format!("Could not load syntax: {}", err).into());
|
||||||
|
return Event::Html(Owned(String::new()));
|
||||||
|
}
|
||||||
|
}
|
||||||
let snippet = start_coloured_html_snippet(theme);
|
let snippet = start_coloured_html_snippet(theme);
|
||||||
Event::Html(Owned(snippet))
|
Event::Html(Owned(snippet))
|
||||||
}
|
}
|
||||||
|
|
|
@ -258,6 +258,7 @@ impl Site {
|
||||||
let permalinks = &self.permalinks;
|
let permalinks = &self.permalinks;
|
||||||
let tera = &self.tera;
|
let tera = &self.tera;
|
||||||
let config = &self.config;
|
let config = &self.config;
|
||||||
|
let base_path = &self.base_path;
|
||||||
|
|
||||||
// TODO: avoid the duplication with function above for that part
|
// TODO: avoid the duplication with function above for that part
|
||||||
// This is needed in the first place because of silly borrow checker
|
// This is needed in the first place because of silly borrow checker
|
||||||
|
@ -269,13 +270,13 @@ impl Site {
|
||||||
self.pages.par_iter_mut()
|
self.pages.par_iter_mut()
|
||||||
.map(|(_, page)| {
|
.map(|(_, page)| {
|
||||||
let insert_anchor = pages_insert_anchors[&page.file.path];
|
let insert_anchor = pages_insert_anchors[&page.file.path];
|
||||||
page.render_markdown(permalinks, tera, config, insert_anchor)
|
page.render_markdown(permalinks, tera, config, base_path, insert_anchor)
|
||||||
})
|
})
|
||||||
.fold(|| Ok(()), Result::and)
|
.fold(|| Ok(()), Result::and)
|
||||||
.reduce(|| Ok(()), Result::and)?;
|
.reduce(|| Ok(()), Result::and)?;
|
||||||
|
|
||||||
self.sections.par_iter_mut()
|
self.sections.par_iter_mut()
|
||||||
.map(|(_, section)| section.render_markdown(permalinks, tera, config))
|
.map(|(_, section)| section.render_markdown(permalinks, tera, config, base_path))
|
||||||
.fold(|| Ok(()), Result::and)
|
.fold(|| Ok(()), Result::and)
|
||||||
.reduce(|| Ok(()), Result::and)?;
|
.reduce(|| Ok(()), Result::and)?;
|
||||||
|
|
||||||
|
@ -318,7 +319,7 @@ impl Site {
|
||||||
if render {
|
if render {
|
||||||
let insert_anchor = self.find_parent_section_insert_anchor(&self.pages[&path].file.parent);
|
let insert_anchor = self.find_parent_section_insert_anchor(&self.pages[&path].file.parent);
|
||||||
let page = self.pages.get_mut(&path).unwrap();
|
let page = self.pages.get_mut(&path).unwrap();
|
||||||
page.render_markdown(&self.permalinks, &self.tera, &self.config, insert_anchor)?;
|
page.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path, insert_anchor)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(prev)
|
Ok(prev)
|
||||||
|
@ -335,7 +336,7 @@ impl Site {
|
||||||
|
|
||||||
if render {
|
if render {
|
||||||
let section = self.sections.get_mut(&path).unwrap();
|
let section = self.sections.get_mut(&path).unwrap();
|
||||||
section.render_markdown(&self.permalinks, &self.tera, &self.config)?;
|
section.render_markdown(&self.permalinks, &self.tera, &self.config, &self.base_path)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(prev)
|
Ok(prev)
|
||||||
|
|
|
@ -19,7 +19,7 @@ fn can_parse_site() {
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
|
|
||||||
// Correct number of pages (sections are pages too)
|
// Correct number of pages (sections are pages too)
|
||||||
assert_eq!(site.pages.len(), 14);
|
assert_eq!(site.pages.len(), 15);
|
||||||
let posts_path = path.join("content").join("posts");
|
let posts_path = path.join("content").join("posts");
|
||||||
|
|
||||||
// Make sure we remove all the pwd + content from the sections
|
// Make sure we remove all the pwd + content from the sections
|
||||||
|
@ -44,7 +44,7 @@ fn can_parse_site() {
|
||||||
|
|
||||||
let posts_section = &site.sections[&posts_path.join("_index.md")];
|
let posts_section = &site.sections[&posts_path.join("_index.md")];
|
||||||
assert_eq!(posts_section.subsections.len(), 1);
|
assert_eq!(posts_section.subsections.len(), 1);
|
||||||
assert_eq!(posts_section.pages.len(), 6);
|
assert_eq!(posts_section.pages.len(), 7);
|
||||||
|
|
||||||
let tutorials_section = &site.sections[&posts_path.join("tutorials").join("_index.md")];
|
let tutorials_section = &site.sections[&posts_path.join("tutorials").join("_index.md")];
|
||||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||||
|
@ -321,22 +321,41 @@ fn can_build_site_with_pagination_for_section() {
|
||||||
"posts/page/1/index.html",
|
"posts/page/1/index.html",
|
||||||
"http-equiv=\"refresh\" content=\"0;url=https://replace-this-with-your-url.com/posts/\""
|
"http-equiv=\"refresh\" content=\"0;url=https://replace-this-with-your-url.com/posts/\""
|
||||||
));
|
));
|
||||||
assert!(file_contains!(public, "posts/index.html", "Num pagers: 3"));
|
assert!(file_contains!(public, "posts/index.html", "Num pagers: 4"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "Page size: 2"));
|
assert!(file_contains!(public, "posts/index.html", "Page size: 2"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "Current index: 1"));
|
assert!(file_contains!(public, "posts/index.html", "Current index: 1"));
|
||||||
|
assert!(!file_contains!(public, "posts/index.html", "has_prev"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "has_next"));
|
assert!(file_contains!(public, "posts/index.html", "has_next"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
assert!(file_contains!(public, "posts/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
||||||
assert!(file_contains!(public, "posts/index.html", "Last: https://replace-this-with-your-url.com/posts/page/3/"));
|
assert!(file_contains!(public, "posts/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
||||||
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false);
|
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false);
|
||||||
|
|
||||||
assert!(file_exists!(public, "posts/page/2/index.html"));
|
assert!(file_exists!(public, "posts/page/2/index.html"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "Num pagers: 3"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "Num pagers: 4"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "Page size: 2"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "Page size: 2"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_prev"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "has_prev"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_next"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "has_next"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
||||||
assert!(file_contains!(public, "posts/page/2/index.html", "Last: https://replace-this-with-your-url.com/posts/page/3/"));
|
assert!(file_contains!(public, "posts/page/2/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
||||||
|
|
||||||
|
assert!(file_exists!(public, "posts/page/3/index.html"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "Num pagers: 4"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "Page size: 2"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "Current index: 3"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "has_prev"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "has_next"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
||||||
|
assert!(file_contains!(public, "posts/page/3/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
||||||
|
|
||||||
|
assert!(file_exists!(public, "posts/page/4/index.html"));
|
||||||
|
assert!(file_contains!(public, "posts/page/4/index.html", "Num pagers: 4"));
|
||||||
|
assert!(file_contains!(public, "posts/page/4/index.html", "Page size: 2"));
|
||||||
|
assert!(file_contains!(public, "posts/page/4/index.html", "Current index: 4"));
|
||||||
|
assert!(file_contains!(public, "posts/page/4/index.html", "has_prev"));
|
||||||
|
assert!(!file_contains!(public, "posts/page/4/index.html", "has_next"));
|
||||||
|
assert!(file_contains!(public, "posts/page/4/index.html", "First: https://replace-this-with-your-url.com/posts/"));
|
||||||
|
assert!(file_contains!(public, "posts/page/4/index.html", "Last: https://replace-this-with-your-url.com/posts/page/4/"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -397,10 +416,10 @@ fn can_build_rss_feed() {
|
||||||
|
|
||||||
assert!(Path::new(&public).exists());
|
assert!(Path::new(&public).exists());
|
||||||
assert!(file_exists!(public, "rss.xml"));
|
assert!(file_exists!(public, "rss.xml"));
|
||||||
// latest article is posts/simple.md
|
// latest article is posts/extra-syntax.md
|
||||||
|
assert!(file_contains!(public, "rss.xml", "Extra Syntax"));
|
||||||
|
// Next is posts/simple.md
|
||||||
assert!(file_contains!(public, "rss.xml", "Simple article with shortcodes"));
|
assert!(file_contains!(public, "rss.xml", "Simple article with shortcodes"));
|
||||||
// Next is posts/python.md
|
|
||||||
assert!(file_contains!(public, "rss.xml", "Python in posts"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -420,3 +439,20 @@ fn can_build_search_index() {
|
||||||
assert!(file_exists!(public, "elasticlunr.min.js"));
|
assert!(file_exists!(public, "elasticlunr.min.js"));
|
||||||
assert!(file_exists!(public, "search_index.en.js"));
|
assert!(file_exists!(public, "search_index.en.js"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_build_with_extra_syntaxes() {
|
||||||
|
let mut path = env::current_dir().unwrap().parent().unwrap().parent().unwrap().to_path_buf();
|
||||||
|
path.push("test_site");
|
||||||
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
|
site.load().unwrap();
|
||||||
|
let tmp_dir = tempdir().expect("create temp dir");
|
||||||
|
let public = &tmp_dir.path().join("public");
|
||||||
|
site.set_output_path(&public);
|
||||||
|
site.build().unwrap();
|
||||||
|
|
||||||
|
assert!(&public.exists());
|
||||||
|
assert!(file_exists!(public, "posts/extra-syntax/index.html"));
|
||||||
|
assert!(file_contains!(public, "posts/extra-syntax/index.html",
|
||||||
|
r#"<span style="background-color:#2b303b;color:#d08770;">test</span>"#));
|
||||||
|
}
|
||||||
|
|
|
@ -106,3 +106,23 @@ Here is a full list of the supported languages and the short names you can use:
|
||||||
```
|
```
|
||||||
|
|
||||||
If you want to highlight a language not on that list, please open an issue or a pull request on the [Gutenberg repo](https://github.com/Keats/gutenberg).
|
If you want to highlight a language not on that list, please open an issue or a pull request on the [Gutenberg repo](https://github.com/Keats/gutenberg).
|
||||||
|
Alternatively, the `extra_syntaxes` config option can be used to add additional syntax files.
|
||||||
|
|
||||||
|
If your site source is laid out as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
.
|
||||||
|
├── config.toml
|
||||||
|
├── content/
|
||||||
|
│ └── ...
|
||||||
|
├── static/
|
||||||
|
│ └── ...
|
||||||
|
├── syntaxes/
|
||||||
|
│ ├── Sublime-Language1/
|
||||||
|
│ │ └── lang1.sublime-syntax
|
||||||
|
│ └── lang2.sublime-syntax
|
||||||
|
└── templates/
|
||||||
|
└── ...
|
||||||
|
```
|
||||||
|
|
||||||
|
you would set your `extra_syntaxes` to `["syntaxes", "syntaxes/Sublime-Language1"]` in order to load `lang1.sublime-syntax` and `lang2.sublime-syntax`.
|
||||||
|
|
|
@ -70,6 +70,9 @@ check_external_links = false
|
||||||
# ignored_content = ["*.{graphml,xlsx}", "temp.*"]
|
# ignored_content = ["*.{graphml,xlsx}", "temp.*"]
|
||||||
ignored_content = []
|
ignored_content = []
|
||||||
|
|
||||||
|
# A list of directories to search for additional `.sublime-syntax` files in.
|
||||||
|
extra_syntaxes = []
|
||||||
|
|
||||||
# Optional translation object. The key if present should be a language code
|
# Optional translation object. The key if present should be a language code
|
||||||
[translations]
|
[translations]
|
||||||
|
|
||||||
|
|
|
@ -10,5 +10,7 @@ taxonomies = [
|
||||||
{name = "categories", rss = true},
|
{name = "categories", rss = true},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
extra_syntaxes = ["syntaxes"]
|
||||||
|
|
||||||
[extra.author]
|
[extra.author]
|
||||||
name = "Vincent Prouillet"
|
name = "Vincent Prouillet"
|
||||||
|
|
9
test_site/content/posts/extra_syntax.md
Normal file
9
test_site/content/posts/extra_syntax.md
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
+++
|
||||||
|
title = "Extra Syntax"
|
||||||
|
description = ""
|
||||||
|
date = 2018-08-14
|
||||||
|
+++
|
||||||
|
|
||||||
|
```test-syntax
|
||||||
|
This is a test code snippet.
|
||||||
|
```
|
10
test_site/syntaxes/test.sublime-syntax
Normal file
10
test_site/syntaxes/test.sublime-syntax
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
%YAML 1.2
|
||||||
|
---
|
||||||
|
file_extensions:
|
||||||
|
- test-syntax
|
||||||
|
scope: source.test
|
||||||
|
|
||||||
|
contexts:
|
||||||
|
main:
|
||||||
|
- match: "test"
|
||||||
|
scope: constant.language.test
|
Loading…
Reference in a new issue