parent
cf86e93986
commit
aa57541c21
|
@ -2,7 +2,11 @@
|
||||||
|
|
||||||
## unreleased
|
## unreleased
|
||||||
|
|
||||||
### Features
|
### Breaking
|
||||||
|
|
||||||
|
- Newlines are now required after the closing `+++` of front-matter
|
||||||
|
|
||||||
|
### Other
|
||||||
|
|
||||||
- internal links are now resolved in the `markdown` filter in the templates (#1296 #1316)
|
- internal links are now resolved in the `markdown` filter in the templates (#1296 #1316)
|
||||||
- Add a `required` argument to `load_data` so it can be allowed to fail
|
- Add a `required` argument to `load_data` so it can be allowed to fail
|
||||||
|
|
|
@ -15,9 +15,9 @@ pub use section::SectionFrontMatter;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref TOML_RE: Regex =
|
static ref TOML_RE: Regex =
|
||||||
Regex::new(r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
Regex::new(r"^[[:space:]]*\+\+\+(\r?\n(?s).*?(?-s))\+\+\+\r?\n((?s).*(?-s))$").unwrap();
|
||||||
static ref YAML_RE: Regex =
|
static ref YAML_RE: Regex =
|
||||||
Regex::new(r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---\r?\n?((?s).*(?-s))$").unwrap();
|
Regex::new(r"^[[:space:]]*---(\r?\n(?s).*?(?-s))---\r?\n((?s).*(?-s))$").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum RawFrontMatter<'a> {
|
pub enum RawFrontMatter<'a> {
|
||||||
|
@ -174,13 +174,15 @@ Hello
|
||||||
title = "Title"
|
title = "Title"
|
||||||
description = "hey there"
|
description = "hey there"
|
||||||
date = 2002-10-12
|
date = 2002-10-12
|
||||||
+++"#; "toml")]
|
+++
|
||||||
|
"#; "toml")]
|
||||||
#[test_case(r#"
|
#[test_case(r#"
|
||||||
---
|
---
|
||||||
title: Title
|
title: Title
|
||||||
description: hey there
|
description: hey there
|
||||||
date: 2002-10-12
|
date: 2002-10-12
|
||||||
---"#; "yaml")]
|
---
|
||||||
|
"#; "yaml")]
|
||||||
fn can_split_content_with_only_frontmatter_valid(content: &str) {
|
fn can_split_content_with_only_frontmatter_valid(content: &str) {
|
||||||
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
|
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
|
||||||
assert_eq!(content, "");
|
assert_eq!(content, "");
|
||||||
|
|
|
@ -490,7 +490,7 @@ Hello world"#;
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.slugify.paths = SlugifyStrategy::On;
|
config.slugify.paths = SlugifyStrategy::On;
|
||||||
let res =
|
let res =
|
||||||
Page::parse(Path::new(" file with space.md"), "+++\n+++", &config, &PathBuf::new());
|
Page::parse(Path::new(" file with space.md"), "+++\n+++\n", &config, &PathBuf::new());
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
assert_eq!(page.slug, "file-with-space");
|
assert_eq!(page.slug, "file-with-space");
|
||||||
|
@ -501,7 +501,7 @@ Hello world"#;
|
||||||
fn can_make_path_from_utf8_filename() {
|
fn can_make_path_from_utf8_filename() {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
config.slugify.paths = SlugifyStrategy::Safe;
|
config.slugify.paths = SlugifyStrategy::Safe;
|
||||||
let res = Page::parse(Path::new("日本.md"), "+++\n++++", &config, &PathBuf::new());
|
let res = Page::parse(Path::new("日本.md"), "+++\n+++\n", &config, &PathBuf::new());
|
||||||
assert!(res.is_ok());
|
assert!(res.is_ok());
|
||||||
let page = res.unwrap();
|
let page = res.unwrap();
|
||||||
assert_eq!(page.slug, "日本");
|
assert_eq!(page.slug, "日本");
|
||||||
|
|
|
@ -255,8 +255,7 @@ impl TeraFn for LoadData {
|
||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} // Now that we have discarded recoverable errors, we can unwrap the result
|
||||||
// Now that we have discarded recoverable errors, we can unwrap the result
|
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
let result_value: Result<Value> = match file_format {
|
let result_value: Result<Value> = match file_format {
|
||||||
|
|
Loading…
Reference in a new issue