cargo fmt

This commit is contained in:
Vincent Prouillet 2021-02-02 20:49:57 +01:00
parent 34989a8ef6
commit 024144ba78
8 changed files with 108 additions and 61 deletions

View file

@ -24,7 +24,10 @@ pub enum HighlightSource {
} }
/// Returns the highlighter and whether it was found in the extra or not /// Returns the highlighter and whether it was found in the extra or not
pub fn get_highlighter(language: Option<&str>, config: &Config) -> (HighlightLines<'static>, HighlightSource) { pub fn get_highlighter(
language: Option<&str>,
config: &Config,
) -> (HighlightLines<'static>, HighlightSource) {
let theme = &THEME_SET.themes[config.highlight_theme()]; let theme = &THEME_SET.themes[config.highlight_theme()];
if let Some(ref lang) = language { if let Some(ref lang) = language {
@ -41,7 +44,10 @@ pub fn get_highlighter(language: Option<&str>, config: &Config) -> (HighlightLin
if let Some(syntax) = SYNTAX_SET.find_syntax_by_token(hacked_lang) { if let Some(syntax) = SYNTAX_SET.find_syntax_by_token(hacked_lang) {
(HighlightLines::new(syntax, theme), HighlightSource::Theme) (HighlightLines::new(syntax, theme), HighlightSource::Theme)
} else { } else {
(HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme), HighlightSource::NotFound) (
HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme),
HighlightSource::NotFound,
)
} }
} else { } else {
(HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme), HighlightSource::Plain) (HighlightLines::new(SYNTAX_SET.find_syntax_plain_text(), theme), HighlightSource::Plain)

View file

@ -6,7 +6,9 @@ use slotmap::{DefaultKey, DenseSlotMap};
use front_matter::SortBy; use front_matter::SortBy;
use crate::content::{Page, Section}; use crate::content::{Page, Section};
use crate::sorting::{find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight}; use crate::sorting::{
find_siblings, sort_pages_by_date, sort_pages_by_title, sort_pages_by_weight,
};
use config::Config; use config::Config;
// Like vec! but for HashSet // Like vec! but for HashSet

View file

@ -173,22 +173,23 @@ mod tests {
"meter", "meter",
"track_1", "track_1",
]; ];
let pages: Vec<Page> = titles.iter().map( let pages: Vec<Page> = titles.iter().map(|title| create_page_with_title(title)).collect();
|title| create_page_with_title(title)
).collect();
let mut dense = DenseSlotMap::new(); let mut dense = DenseSlotMap::new();
let keys: Vec<_> = pages.iter().map( let keys: Vec<_> = pages.iter().map(|p| dense.insert(p)).collect();
|p| dense.insert(p) let input: Vec<_> = pages
).collect(); .iter()
let input: Vec<_> = pages.iter().enumerate().map( .enumerate()
|(i, page)| (&keys[i], page.meta.title.as_deref(), page.permalink.as_ref()) .map(|(i, page)| (&keys[i], page.meta.title.as_deref(), page.permalink.as_ref()))
).collect(); .collect();
let (sorted, _) = sort_pages_by_title(input); let (sorted, _) = sort_pages_by_title(input);
// Should be sorted by title // Should be sorted by title
let sorted_titles: Vec<_> = sorted.iter().map( let sorted_titles: Vec<_> = sorted
|key| dense.get(*key).unwrap().meta.title.as_ref().unwrap() .iter()
).collect(); .map(|key| dense.get(*key).unwrap().meta.title.as_ref().unwrap())
assert_eq!(sorted_titles, vec![ .collect();
assert_eq!(
sorted_titles,
vec![
"bagel", "bagel",
"BART", "BART",
"μ-kernel", "μ-kernel",
@ -199,7 +200,8 @@ mod tests {
"track_3", "track_3",
"track_13", "track_13",
"Underground", "Underground",
]); ]
);
} }
#[test] #[test]

View file

@ -242,7 +242,18 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
fence_info, fence_info,
&context.config, &context.config,
IncludeBackground::IfDifferent(color), IncludeBackground::IfDifferent(color),
context.tera_context.get("page").or(context.tera_context.get("section")).map(|x| x.as_object().unwrap().get("relative_path").unwrap().as_str().unwrap()) context
.tera_context
.get("page")
.or(context.tera_context.get("section"))
.map(|x| {
x.as_object()
.unwrap()
.get("relative_path")
.unwrap()
.as_str()
.unwrap()
}),
)); ));
} }
}; };

View file

@ -22,7 +22,12 @@ pub struct CodeBlock<'config> {
} }
impl<'config> CodeBlock<'config> { impl<'config> CodeBlock<'config> {
pub fn new(fence_info: &str, config: &'config Config, background: IncludeBackground, path: Option<&'config str>) -> Self { pub fn new(
fence_info: &str,
config: &'config Config,
background: IncludeBackground,
path: Option<&'config str>,
) -> Self {
let fence_info = FenceSettings::new(fence_info); let fence_info = FenceSettings::new(fence_info);
let theme = &THEME_SET.themes[config.highlight_theme()]; let theme = &THEME_SET.themes[config.highlight_theme()];
let (highlighter, highlight_source) = get_highlighter(fence_info.language, config); let (highlighter, highlight_source) = get_highlighter(fence_info.language, config);
@ -37,7 +42,7 @@ impl<'config> CodeBlock<'config> {
eprintln!("Warning: Highlight language {} not found", lang); eprintln!("Warning: Highlight language {} not found", lang);
} }
None None
}, }
_ => None, _ => None,
}; };
Self { Self {

View file

@ -50,7 +50,10 @@ pub fn load_tera(path: &Path, config: &Config) -> Result<Tera> {
/// Adds global fns that are to be available to shortcodes while rendering markdown /// Adds global fns that are to be available to shortcodes while rendering markdown
pub fn register_early_global_fns(site: &mut Site) { pub fn register_early_global_fns(site: &mut Site) {
site.tera.register_filter("markdown", filters::MarkdownFilter::new(site.config.clone(), site.permalinks.clone())); site.tera.register_filter(
"markdown",
filters::MarkdownFilter::new(site.config.clone(), site.permalinks.clone()),
);
site.tera.register_function( site.tera.register_function(
"get_url", "get_url",

View file

@ -1,6 +1,6 @@
use std::borrow::Cow;
use std::collections::HashMap; use std::collections::HashMap;
use std::hash::BuildHasher; use std::hash::BuildHasher;
use std::borrow::Cow;
use base64::{decode, encode}; use base64::{decode, encode};
use config::Config; use config::Config;
@ -120,13 +120,14 @@ mod tests {
config.markdown.external_links_target_blank = true; config.markdown.external_links_target_blank = true;
let md = "Hello <https://google.com> :smile: ..."; let md = "Hello <https://google.com> :smile: ...";
let result = let result = MarkdownFilter::new(config.clone(), HashMap::new())
MarkdownFilter::new(config.clone(), HashMap::new()).filter(&to_value(&md).unwrap(), &HashMap::new()); .filter(&to_value(&md).unwrap(), &HashMap::new());
assert!(result.is_ok()); assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(&"<p>Hello <a rel=\"noopener\" target=\"_blank\" href=\"https://google.com\">https://google.com</a> 😄 …</p>\n").unwrap()); assert_eq!(result.unwrap(), to_value(&"<p>Hello <a rel=\"noopener\" target=\"_blank\" href=\"https://google.com\">https://google.com</a> 😄 …</p>\n").unwrap());
let md = "```py\ni=0\n```"; let md = "```py\ni=0\n```";
let result = MarkdownFilter::new(config, HashMap::new()).filter(&to_value(&md).unwrap(), &HashMap::new()); let result = MarkdownFilter::new(config, HashMap::new())
.filter(&to_value(&md).unwrap(), &HashMap::new());
assert!(result.is_ok()); assert!(result.is_ok());
assert!(result.unwrap().as_str().unwrap().contains("<pre style")); assert!(result.unwrap().as_str().unwrap().contains("<pre style"));
} }
@ -136,9 +137,13 @@ mod tests {
let mut permalinks = HashMap::new(); let mut permalinks = HashMap::new();
permalinks.insert("blog/_index.md".to_string(), "/foo/blog".to_string()); permalinks.insert("blog/_index.md".to_string(), "/foo/blog".to_string());
let md = "Hello. Check out [my blog](@/blog/_index.md)!"; let md = "Hello. Check out [my blog](@/blog/_index.md)!";
let result = MarkdownFilter::new(Config::default(), permalinks).filter(&to_value(&md).unwrap(), &HashMap::new()); let result = MarkdownFilter::new(Config::default(), permalinks)
.filter(&to_value(&md).unwrap(), &HashMap::new());
assert!(result.is_ok()); assert!(result.is_ok());
assert_eq!(result.unwrap(), to_value(&"<p>Hello. Check out <a href=\"/foo/blog\">my blog</a>!</p>\n").unwrap()); assert_eq!(
result.unwrap(),
to_value(&"<p>Hello. Check out <a href=\"/foo/blog\">my blog</a>!</p>\n").unwrap()
);
} }
#[test] #[test]

View file

@ -190,7 +190,15 @@ impl LoadData {
impl TeraFn for LoadData { impl TeraFn for LoadData {
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> { fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
let required = if let Some(req) = optional_arg!(bool, args.get("required"), "`load_data`: `required` must be a boolean (true or false)") { req } else { true }; let required = if let Some(req) = optional_arg!(
bool,
args.get("required"),
"`load_data`: `required` must be a boolean (true or false)"
) {
req
} else {
true
};
let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE); let path_arg = optional_arg!(String, args.get("path"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE); let url_arg = optional_arg!(String, args.get("url"), GET_DATA_ARGUMENT_ERROR_MESSAGE);
let data_source = DataSource::from_args(path_arg.clone(), url_arg, &self.base_path)?; let data_source = DataSource::from_args(path_arg.clone(), url_arg, &self.base_path)?;
@ -198,13 +206,19 @@ impl TeraFn for LoadData {
// If the file doesn't exist, source is None // If the file doesn't exist, source is None
match (&data_source, required) { match (&data_source, required) {
// If the file was not required, return a Null value to the template // If the file was not required, return a Null value to the template
(None, false) => { return Ok(Value::Null); }, (None, false) => {
return Ok(Value::Null);
}
// If the file was required, error // If the file was required, error
(None, true) => { (None, true) => {
// source is None only with path_arg (not URL), so path_arg is safely unwrap // source is None only with path_arg (not URL), so path_arg is safely unwrap
return Err(format!("{} doesn't exist", &self.base_path.join(path_arg.unwrap()).display()).into()); return Err(format!(
}, "{} doesn't exist",
_ => {}, &self.base_path.join(path_arg.unwrap()).display()
)
.into());
}
_ => {}
} }
let data_source = data_source.unwrap(); let data_source = data_source.unwrap();
let file_format = get_output_format_from_args(&args, &data_source)?; let file_format = get_output_format_from_args(&args, &data_source)?;
@ -223,11 +237,11 @@ impl TeraFn for LoadData {
.get(url.as_str()) .get(url.as_str())
.header(header::ACCEPT, file_format.as_accept_header()) .header(header::ACCEPT, file_format.as_accept_header())
.send() .send()
.and_then(|res| res.error_for_status()) { .and_then(|res| res.error_for_status())
Ok(r) => { {
r.text() Ok(r) => r.text().map_err(|e| {
.map_err(|e| format!("Failed to parse response from {}: {:?}", url, e).into()) format!("Failed to parse response from {}: {:?}", url, e).into()
}, }),
Err(e) => { Err(e) => {
if !required { if !required {
// HTTP error is discarded (because required=false) and // HTTP error is discarded (because required=false) and
@ -237,10 +251,11 @@ impl TeraFn for LoadData {
Err(match e.status() { Err(match e.status() {
Some(status) => format!("Failed to request {}: {}", url, status), Some(status) => format!("Failed to request {}: {}", url, status),
None => format!("Could not get response status for url: {}", url), None => format!("Could not get response status for url: {}", url),
}.into()) }
.into())
}
} }
} }
},
// Now that we have discarded recoverable errors, we can unwrap the result // Now that we have discarded recoverable errors, we can unwrap the result
}?; }?;
@ -542,7 +557,6 @@ mod tests {
assert_eq!(result.unwrap(), tera::Value::Null); assert_eq!(result.unwrap(), tera::Value::Null);
} }
#[test] #[test]
fn set_default_user_agent() { fn set_default_user_agent() {
let user_agent = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")); let user_agent = concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"));
@ -693,7 +707,6 @@ mod tests {
} }
} }
#[test] #[test]
fn can_load_json() { fn can_load_json() {
let static_fn = LoadData::new(PathBuf::from("../utils/test-files")); let static_fn = LoadData::new(PathBuf::from("../utils/test-files"));