Merge pull request #47 from Keats/sorting
Allow sorting pages by order and date
This commit is contained in:
commit
f322672f08
|
@ -5,6 +5,8 @@
|
||||||
- Fix XML templates overriding and reloading
|
- Fix XML templates overriding and reloading
|
||||||
- `title` and `description` are now optional in the front matter
|
- `title` and `description` are now optional in the front matter
|
||||||
- Add GenericConfig, Vim syntax
|
- Add GenericConfig, Vim syntax
|
||||||
|
- Add `_index.md` for homepage as well
|
||||||
|
- Allow sorting by `none`, `date` and `order` for sections
|
||||||
|
|
||||||
## 0.0.4 (2017-04-23)
|
## 0.0.4 (2017-04-23)
|
||||||
|
|
||||||
|
|
6
Cargo.lock
generated
6
Cargo.lock
generated
|
@ -21,7 +21,7 @@ dependencies = [
|
||||||
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"tera 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"tera 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"term-painter 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"term-painter 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"toml 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"toml 0.4.0 (git+https://github.com/alexcrichton/toml-rs)",
|
||||||
"walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ws 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"ws 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -895,7 +895,7 @@ dependencies = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "toml"
|
name = "toml"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/alexcrichton/toml-rs#95b3545938f67ca98d313be5c9c8930ee2407a30"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -1172,7 +1172,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
|
"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
|
||||||
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
|
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
|
||||||
"checksum time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd7ccbf969a892bf83f1e441126968a07a3941c24ff522a26af9f9f4585d1a3"
|
"checksum time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd7ccbf969a892bf83f1e441126968a07a3941c24ff522a26af9f9f4585d1a3"
|
||||||
"checksum toml 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3063405db158de3dce8efad5fc89cf1baffb9501a3647dc9505ba109694ce31f"
|
"checksum toml 0.4.0 (git+https://github.com/alexcrichton/toml-rs)" = "<none>"
|
||||||
"checksum traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079"
|
"checksum traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079"
|
||||||
"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887"
|
"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887"
|
||||||
"checksum typemap 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "653be63c80a3296da5551e1bfd2cca35227e13cdd08c6668903ae2f4f77aa1f6"
|
"checksum typemap 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "653be63c80a3296da5551e1bfd2cca35227e13cdd08c6668903ae2f4f77aa1f6"
|
||||||
|
|
|
@ -28,7 +28,8 @@ tera = "0.10"
|
||||||
slug = "0.1"
|
slug = "0.1"
|
||||||
syntect = { version = "1", features = ["static-onig"] }
|
syntect = { version = "1", features = ["static-onig"] }
|
||||||
chrono = "0.3"
|
chrono = "0.3"
|
||||||
toml = "0.4"
|
# toml = "0.4"
|
||||||
|
toml = { git = "https://github.com/alexcrichton/toml-rs" }
|
||||||
term-painter = "0.2"
|
term-painter = "0.2"
|
||||||
base64 = "0.5"
|
base64 = "0.5"
|
||||||
|
|
||||||
|
|
|
@ -44,5 +44,5 @@ fn bench_populate_previous_and_next_pages(b: &mut test::Bencher) {
|
||||||
let mut pages = site.pages.values().cloned().collect::<Vec<_>>();
|
let mut pages = site.pages.values().cloned().collect::<Vec<_>>();
|
||||||
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
||||||
|
|
||||||
b.iter(|| populate_previous_and_next_pages(pages.as_slice(), false));
|
b.iter(|| populate_previous_and_next_pages(pages.as_slice()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,13 @@ lazy_static! {
|
||||||
static ref PAGE_RE: Regex = Regex::new(r"^\r?\n?\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
static ref PAGE_RE: Regex = Regex::new(r"^\r?\n?\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "lowercase")]
|
||||||
|
pub enum SortBy {
|
||||||
|
Date,
|
||||||
|
Order,
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
/// The front matter of every page
|
/// The front matter of every page
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
@ -37,6 +44,11 @@ pub struct FrontMatter {
|
||||||
pub draft: Option<bool>,
|
pub draft: Option<bool>,
|
||||||
/// Only one category allowed
|
/// Only one category allowed
|
||||||
pub category: Option<String>,
|
pub category: Option<String>,
|
||||||
|
/// Whether to sort by "date", "order" or "none"
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub sort_by: Option<SortBy>,
|
||||||
|
/// Integer to use to order content. Lowest is at the bottom, highest first
|
||||||
|
pub order: Option<usize>,
|
||||||
/// Optional template, if we want to specify which template to render for that page
|
/// Optional template, if we want to specify which template to render for that page
|
||||||
#[serde(skip_serializing)]
|
#[serde(skip_serializing)]
|
||||||
pub template: Option<String>,
|
pub template: Option<String>,
|
||||||
|
@ -71,7 +83,7 @@ impl FrontMatter {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts the date in the front matter, which can be in 2 formats, into a NaiveDateTime
|
/// Converts the date in the front matter, which can be in 2 formats, into a NaiveDateTime
|
||||||
pub fn parse_date(&self) -> Option<NaiveDateTime> {
|
pub fn date(&self) -> Option<NaiveDateTime> {
|
||||||
match self.date {
|
match self.date {
|
||||||
Some(ref d) => {
|
Some(ref d) => {
|
||||||
if d.contains('T') {
|
if d.contains('T') {
|
||||||
|
@ -83,12 +95,40 @@ impl FrontMatter {
|
||||||
None => None,
|
None => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn order(&self) -> usize {
|
||||||
|
self.order.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sort_by(&self) -> SortBy {
|
||||||
|
match self.sort_by {
|
||||||
|
Some(ref s) => s.clone(),
|
||||||
|
None => SortBy::Date,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for FrontMatter {
|
||||||
|
fn default() -> FrontMatter {
|
||||||
|
FrontMatter {
|
||||||
|
title: None,
|
||||||
|
description: None,
|
||||||
|
date: None,
|
||||||
|
slug: None,
|
||||||
|
url: None,
|
||||||
|
tags: None,
|
||||||
|
draft: None,
|
||||||
|
category: None,
|
||||||
|
sort_by: None,
|
||||||
|
order: None,
|
||||||
|
template: None,
|
||||||
|
extra: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Split a file between the front matter and its content
|
/// Split a file between the front matter and its content
|
||||||
/// It will parse the front matter as well and returns any error encountered
|
/// It will parse the front matter as well and returns any error encountered
|
||||||
/// TODO: add tests
|
|
||||||
pub fn split_content(file_path: &Path, content: &str) -> Result<(FrontMatter, String)> {
|
pub fn split_content(file_path: &Path, content: &str) -> Result<(FrontMatter, String)> {
|
||||||
if !PAGE_RE.is_match(content) {
|
if !PAGE_RE.is_match(content) {
|
||||||
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy());
|
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy());
|
||||||
|
|
|
@ -32,7 +32,7 @@ mod filters;
|
||||||
|
|
||||||
pub use site::{Site, GUTENBERG_TERA};
|
pub use site::{Site, GUTENBERG_TERA};
|
||||||
pub use config::{Config, get_config};
|
pub use config::{Config, get_config};
|
||||||
pub use front_matter::{FrontMatter, split_content};
|
pub use front_matter::{FrontMatter, split_content, SortBy};
|
||||||
pub use page::{Page, populate_previous_and_next_pages};
|
pub use page::{Page, populate_previous_and_next_pages};
|
||||||
pub use section::{Section};
|
pub use section::{Section};
|
||||||
pub use utils::{create_file};
|
pub use utils::{create_file};
|
||||||
|
|
227
src/page.rs
227
src/page.rs
|
@ -12,7 +12,8 @@ use slug::slugify;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use front_matter::{FrontMatter, split_content};
|
use front_matter::{FrontMatter, SortBy, split_content};
|
||||||
|
use section::Section;
|
||||||
use markdown::markdown_to_html;
|
use markdown::markdown_to_html;
|
||||||
use utils::{read_file, find_content_components};
|
use utils::{read_file, find_content_components};
|
||||||
|
|
||||||
|
@ -76,14 +77,10 @@ pub struct Page {
|
||||||
/// as summary
|
/// as summary
|
||||||
pub summary: Option<String>,
|
pub summary: Option<String>,
|
||||||
|
|
||||||
/// The previous page, by date globally
|
/// The previous page, by whatever sorting is used for the index/section
|
||||||
pub previous: Option<Box<Page>>,
|
pub previous: Option<Box<Page>>,
|
||||||
/// The previous page, by date only for the section the page is in
|
/// The next page, by whatever sorting is used for the index/section
|
||||||
pub previous_in_section: Option<Box<Page>>,
|
|
||||||
/// The next page, by date
|
|
||||||
pub next: Option<Box<Page>>,
|
pub next: Option<Box<Page>>,
|
||||||
/// The next page, by date only for the section the page is in
|
|
||||||
pub next_in_section: Option<Box<Page>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,9 +101,7 @@ impl Page {
|
||||||
summary: None,
|
summary: None,
|
||||||
meta: meta,
|
meta: meta,
|
||||||
previous: None,
|
previous: None,
|
||||||
previous_in_section: None,
|
|
||||||
next: None,
|
next: None,
|
||||||
next_in_section: None,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +217,7 @@ impl Page {
|
||||||
|
|
||||||
impl ser::Serialize for Page {
|
impl ser::Serialize for Page {
|
||||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
||||||
let mut state = serializer.serialize_struct("page", 18)?;
|
let mut state = serializer.serialize_struct("page", 16)?;
|
||||||
state.serialize_field("content", &self.content)?;
|
state.serialize_field("content", &self.content)?;
|
||||||
state.serialize_field("title", &self.meta.title)?;
|
state.serialize_field("title", &self.meta.title)?;
|
||||||
state.serialize_field("description", &self.meta.description)?;
|
state.serialize_field("description", &self.meta.description)?;
|
||||||
|
@ -239,13 +234,65 @@ impl ser::Serialize for Page {
|
||||||
state.serialize_field("word_count", &word_count)?;
|
state.serialize_field("word_count", &word_count)?;
|
||||||
state.serialize_field("reading_time", &reading_time)?;
|
state.serialize_field("reading_time", &reading_time)?;
|
||||||
state.serialize_field("previous", &self.previous)?;
|
state.serialize_field("previous", &self.previous)?;
|
||||||
state.serialize_field("previous_in_section", &self.previous_in_section)?;
|
|
||||||
state.serialize_field("next", &self.next)?;
|
state.serialize_field("next", &self.next)?;
|
||||||
state.serialize_field("next_in_section", &self.next_in_section)?;
|
|
||||||
state.end()
|
state.end()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sort pages using the method for the given section
|
||||||
|
///
|
||||||
|
/// Any pages that doesn't have a date when the sorting method is date or order
|
||||||
|
/// when the sorting method is order will be ignored.
|
||||||
|
pub fn sort_pages(pages: Vec<Page>, section: Option<&Section>) -> Vec<Page> {
|
||||||
|
let sort_by = if let Some(ref sec) = section {
|
||||||
|
sec.meta.sort_by()
|
||||||
|
} else {
|
||||||
|
SortBy::Date
|
||||||
|
};
|
||||||
|
|
||||||
|
match sort_by {
|
||||||
|
SortBy::Date => {
|
||||||
|
let mut can_be_sorted = vec![];
|
||||||
|
let mut cannot_be_sorted = vec![];
|
||||||
|
for page in pages {
|
||||||
|
if page.meta.date.is_some() {
|
||||||
|
can_be_sorted.push(page);
|
||||||
|
} else {
|
||||||
|
cannot_be_sorted.push(page);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_be_sorted.sort_by(|a, b| b.meta.date().unwrap().cmp(&a.meta.date().unwrap()));
|
||||||
|
// can_be_sorted.append(&mut cannot_be_sorted);
|
||||||
|
|
||||||
|
can_be_sorted
|
||||||
|
},
|
||||||
|
SortBy::Order => {
|
||||||
|
let mut can_be_sorted = vec![];
|
||||||
|
let mut cannot_be_sorted = vec![];
|
||||||
|
for page in pages {
|
||||||
|
if page.meta.order.is_some() {
|
||||||
|
can_be_sorted.push(page);
|
||||||
|
} else {
|
||||||
|
cannot_be_sorted.push(page);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_be_sorted.sort_by(|a, b| b.meta.order().cmp(&a.meta.order()));
|
||||||
|
// can_be_sorted.append(&mut cannot_be_sorted);
|
||||||
|
|
||||||
|
can_be_sorted
|
||||||
|
},
|
||||||
|
SortBy::None => {
|
||||||
|
let mut p = vec![];
|
||||||
|
for page in pages {
|
||||||
|
p.push(page);
|
||||||
|
}
|
||||||
|
|
||||||
|
p
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Used only by the RSS feed (I think)
|
||||||
impl PartialOrd for Page {
|
impl PartialOrd for Page {
|
||||||
fn partial_cmp(&self, other: &Page) -> Option<Ordering> {
|
fn partial_cmp(&self, other: &Page) -> Option<Ordering> {
|
||||||
if self.meta.date.is_none() {
|
if self.meta.date.is_none() {
|
||||||
|
@ -256,8 +303,8 @@ impl PartialOrd for Page {
|
||||||
return Some(Ordering::Greater);
|
return Some(Ordering::Greater);
|
||||||
}
|
}
|
||||||
|
|
||||||
let this_date = self.meta.parse_date().unwrap();
|
let this_date = self.meta.date().unwrap();
|
||||||
let other_date = other.meta.parse_date().unwrap();
|
let other_date = other.meta.date().unwrap();
|
||||||
|
|
||||||
if this_date > other_date {
|
if this_date > other_date {
|
||||||
return Some(Ordering::Less);
|
return Some(Ordering::Less);
|
||||||
|
@ -273,36 +320,23 @@ impl PartialOrd for Page {
|
||||||
|
|
||||||
/// Horribly inefficient way to set previous and next on each pages
|
/// Horribly inefficient way to set previous and next on each pages
|
||||||
/// So many clones
|
/// So many clones
|
||||||
pub fn populate_previous_and_next_pages(input: &[Page], in_section: bool) -> Vec<Page> {
|
pub fn populate_previous_and_next_pages(input: &[Page]) -> Vec<Page> {
|
||||||
let pages = input.to_vec();
|
let pages = input.to_vec();
|
||||||
let mut res = Vec::new();
|
let mut res = Vec::new();
|
||||||
|
|
||||||
// the input is sorted from most recent to least recent already
|
// the input is already sorted
|
||||||
|
// We might put prev/next randomly if a page is missing date/order, probably fine
|
||||||
for (i, page) in input.iter().enumerate() {
|
for (i, page) in input.iter().enumerate() {
|
||||||
let mut new_page = page.clone();
|
let mut new_page = page.clone();
|
||||||
|
|
||||||
if new_page.has_date() {
|
if i > 0 {
|
||||||
if i > 0 {
|
let next = &pages[i - 1];
|
||||||
let next = &pages[i - 1];
|
new_page.next = Some(Box::new(next.clone()));
|
||||||
if next.has_date() {
|
}
|
||||||
if in_section {
|
|
||||||
new_page.next_in_section = Some(Box::new(next.clone()));
|
|
||||||
} else {
|
|
||||||
new_page.next = Some(Box::new(next.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if i < input.len() - 1 {
|
if i < input.len() - 1 {
|
||||||
let previous = &pages[i + 1];
|
let previous = &pages[i + 1];
|
||||||
if previous.has_date() {
|
new_page.previous = Some(Box::new(previous.clone()));
|
||||||
if in_section {
|
|
||||||
new_page.previous_in_section = Some(Box::new(previous.clone()));
|
|
||||||
} else {
|
|
||||||
new_page.previous = Some(Box::new(previous.clone()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
res.push(new_page);
|
res.push(new_page);
|
||||||
}
|
}
|
||||||
|
@ -315,8 +349,23 @@ mod tests {
|
||||||
use tempdir::TempDir;
|
use tempdir::TempDir;
|
||||||
|
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
use super::{find_related_assets};
|
use front_matter::{FrontMatter, SortBy};
|
||||||
|
use section::Section;
|
||||||
|
use super::{Page, find_related_assets, sort_pages, populate_previous_and_next_pages};
|
||||||
|
|
||||||
|
fn create_page_with_date(date: &str) -> Page {
|
||||||
|
let mut front_matter = FrontMatter::default();
|
||||||
|
front_matter.date = Some(date.to_string());
|
||||||
|
Page::new(front_matter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_page_with_order(order: usize) -> Page {
|
||||||
|
let mut front_matter = FrontMatter::default();
|
||||||
|
front_matter.order = Some(order);
|
||||||
|
Page::new(front_matter)
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_related_assets() {
|
fn test_find_related_assets() {
|
||||||
|
@ -333,4 +382,106 @@ mod tests {
|
||||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
||||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_can_default_sort() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_date("2018-01-01"),
|
||||||
|
create_page_with_date("2017-01-01"),
|
||||||
|
create_page_with_date("2019-01-01"),
|
||||||
|
];
|
||||||
|
let pages = sort_pages(input, None);
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.date.unwrap(), "2019-01-01");
|
||||||
|
assert_eq!(pages[1].clone().meta.date.unwrap(), "2018-01-01");
|
||||||
|
assert_eq!(pages[2].clone().meta.date.unwrap(), "2017-01-01");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_can_sort_dates() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_date("2018-01-01"),
|
||||||
|
create_page_with_date("2017-01-01"),
|
||||||
|
create_page_with_date("2019-01-01"),
|
||||||
|
];
|
||||||
|
let mut front_matter = FrontMatter::default();
|
||||||
|
front_matter.sort_by = Some(SortBy::Date);
|
||||||
|
let section = Section::new(Path::new("hey"), front_matter);
|
||||||
|
let pages = sort_pages(input, Some(§ion));
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.date.unwrap(), "2019-01-01");
|
||||||
|
assert_eq!(pages[1].clone().meta.date.unwrap(), "2018-01-01");
|
||||||
|
assert_eq!(pages[2].clone().meta.date.unwrap(), "2017-01-01");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_can_sort_order() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_order(1),
|
||||||
|
];
|
||||||
|
let mut front_matter = FrontMatter::default();
|
||||||
|
front_matter.sort_by = Some(SortBy::Order);
|
||||||
|
let section = Section::new(Path::new("hey"), front_matter);
|
||||||
|
let pages = sort_pages(input, Some(§ion));
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[1].clone().meta.order.unwrap(), 2);
|
||||||
|
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_can_sort_none() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_order(1),
|
||||||
|
];
|
||||||
|
let mut front_matter = FrontMatter::default();
|
||||||
|
front_matter.sort_by = Some(SortBy::None);
|
||||||
|
let section = Section::new(Path::new("hey"), front_matter);
|
||||||
|
let pages = sort_pages(input, Some(§ion));
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.order.unwrap(), 2);
|
||||||
|
assert_eq!(pages[1].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ignore_page_with_missing_field() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_date("2019-01-01"),
|
||||||
|
];
|
||||||
|
let mut front_matter = FrontMatter::default();
|
||||||
|
front_matter.sort_by = Some(SortBy::Order);
|
||||||
|
let section = Section::new(Path::new("hey"), front_matter);
|
||||||
|
let pages = sort_pages(input, Some(§ion));
|
||||||
|
assert_eq!(pages.len(), 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_populate_previous_and_next_pages() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(1),
|
||||||
|
];
|
||||||
|
let pages = populate_previous_and_next_pages(input.as_slice());
|
||||||
|
|
||||||
|
assert!(pages[0].clone().next.is_none());
|
||||||
|
assert!(pages[0].clone().previous.is_some());
|
||||||
|
assert_eq!(pages[0].clone().previous.unwrap().meta.order.unwrap(), 2);
|
||||||
|
|
||||||
|
assert!(pages[1].clone().next.is_some());
|
||||||
|
assert!(pages[1].clone().previous.is_some());
|
||||||
|
assert_eq!(pages[1].clone().next.unwrap().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[1].clone().previous.unwrap().meta.order.unwrap(), 1);
|
||||||
|
|
||||||
|
assert!(pages[2].clone().next.is_some());
|
||||||
|
assert!(pages[2].clone().previous.is_none());
|
||||||
|
assert_eq!(pages[2].clone().next.unwrap().meta.order.unwrap(), 2);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
17
src/site.rs
17
src/site.rs
|
@ -10,7 +10,7 @@ use walkdir::WalkDir;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use config::{Config, get_config};
|
use config::{Config, get_config};
|
||||||
use page::{Page, populate_previous_and_next_pages};
|
use page::{Page, populate_previous_and_next_pages, sort_pages};
|
||||||
use utils::{create_file, create_directory};
|
use utils::{create_file, create_directory};
|
||||||
use section::{Section};
|
use section::{Section};
|
||||||
use filters;
|
use filters;
|
||||||
|
@ -200,8 +200,9 @@ impl Site {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (parent_path, section) in &mut self.sections {
|
for (parent_path, section) in &mut self.sections {
|
||||||
section.pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
// TODO: avoid this clone
|
||||||
section.pages = populate_previous_and_next_pages(section.pages.as_slice(), true);
|
let sorted_pages = sort_pages(section.pages.clone(), Some(§ion));
|
||||||
|
section.pages = populate_previous_and_next_pages(sorted_pages.as_slice());
|
||||||
|
|
||||||
match grandparent_paths.get(parent_path) {
|
match grandparent_paths.get(parent_path) {
|
||||||
Some(paths) => section.subsections.extend(paths.clone()),
|
Some(paths) => section.subsections.extend(paths.clone()),
|
||||||
|
@ -361,11 +362,13 @@ impl Site {
|
||||||
self.render_categories_and_tags(RenderList::Tags)?;
|
self.render_categories_and_tags(RenderList::Tags)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sort the pages
|
||||||
|
let sorted_pages = sort_pages(pages, self.index.as_ref());
|
||||||
|
|
||||||
// And finally the index page
|
// And finally the index page
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
|
||||||
|
|
||||||
context.add("pages", &populate_previous_and_next_pages(&pages, false));
|
context.add("pages", &populate_previous_and_next_pages(sorted_pages.as_slice()));
|
||||||
context.add("sections", &self.sections.values().collect::<Vec<&Section>>());
|
context.add("sections", &self.sections.values().collect::<Vec<&Section>>());
|
||||||
context.add("config", &self.config);
|
context.add("config", &self.config);
|
||||||
context.add("current_url", &self.config.base_url);
|
context.add("current_url", &self.config.base_url);
|
||||||
|
@ -446,6 +449,10 @@ impl Site {
|
||||||
.filter(|&(path, _)| pages_paths.contains(path))
|
.filter(|&(path, _)| pages_paths.contains(path))
|
||||||
.map(|(_, page)| page)
|
.map(|(_, page)| page)
|
||||||
.collect();
|
.collect();
|
||||||
|
// TODO: how to sort categories and tag content?
|
||||||
|
// Have a setting in config.toml or a _category.md and _tag.md
|
||||||
|
// The latter is more in line with the rest of Gutenberg but order ordering
|
||||||
|
// doesn't really work across sections so default to partial ordering for now (date)
|
||||||
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
+++
|
+++
|
||||||
title = "DevOps"
|
title = "DevOps"
|
||||||
description = ""
|
sort_by = "order"
|
||||||
+++
|
+++
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
+++
|
+++
|
||||||
title = "Docker"
|
title = "Docker"
|
||||||
description = ""
|
order = 1
|
||||||
date = "2017-02-01"
|
|
||||||
+++
|
+++
|
||||||
|
|
||||||
A simple page
|
A simple page
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
+++
|
+++
|
||||||
title = "Nix"
|
title = "Nix"
|
||||||
description = ""
|
order = 2
|
||||||
date = "2017-03-01"
|
|
||||||
+++
|
+++
|
||||||
|
|
||||||
A simple page
|
A simple page
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
+++
|
+++
|
||||||
title = "Programming"
|
title = "Programming"
|
||||||
description = ""
|
sort_by = "order"
|
||||||
+++
|
+++
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
+++
|
+++
|
||||||
title = "Python tutorial"
|
title = "Python tutorial"
|
||||||
description = ""
|
order = 1
|
||||||
+++
|
+++
|
||||||
|
|
||||||
A simple page
|
A simple page
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
+++
|
+++
|
||||||
title = "Rust"
|
title = "Rust"
|
||||||
description = ""
|
order = 2
|
||||||
+++
|
+++
|
||||||
|
|
||||||
A simple page
|
A simple page
|
||||||
|
|
|
@ -3,7 +3,7 @@ extern crate tera;
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use gutenberg::{FrontMatter, split_content};
|
use gutenberg::{FrontMatter, split_content, SortBy};
|
||||||
use tera::to_value;
|
use tera::to_value;
|
||||||
|
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ title = "Hello"
|
||||||
description = "hey there"
|
description = "hey there"
|
||||||
date = "2016-10-10""#;
|
date = "2016-10-10""#;
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
let res = FrontMatter::parse(content).unwrap();
|
||||||
assert!(res.parse_date().is_some());
|
assert!(res.date().is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -135,7 +135,7 @@ title = "Hello"
|
||||||
description = "hey there"
|
description = "hey there"
|
||||||
date = "2002-10-02T15:00:00Z""#;
|
date = "2002-10-02T15:00:00Z""#;
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
let res = FrontMatter::parse(content).unwrap();
|
||||||
assert!(res.parse_date().is_some());
|
assert!(res.date().is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -145,9 +145,41 @@ title = "Hello"
|
||||||
description = "hey there"
|
description = "hey there"
|
||||||
date = "2002/10/12""#;
|
date = "2002/10/12""#;
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
let res = FrontMatter::parse(content).unwrap();
|
||||||
assert!(res.parse_date().is_none());
|
assert!(res.date().is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cant_parse_sort_by_date() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
sort_by = "date""#;
|
||||||
|
let res = FrontMatter::parse(content).unwrap();
|
||||||
|
assert!(res.sort_by.is_some());
|
||||||
|
assert_eq!(res.sort_by.unwrap(), SortBy::Date);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cant_parse_sort_by_order() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
sort_by = "order""#;
|
||||||
|
let res = FrontMatter::parse(content).unwrap();
|
||||||
|
assert!(res.sort_by.is_some());
|
||||||
|
assert_eq!(res.sort_by.unwrap(), SortBy::Order);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_cant_parse_sort_by_none() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
sort_by = "none""#;
|
||||||
|
let res = FrontMatter::parse(content).unwrap();
|
||||||
|
assert!(res.sort_by.is_some());
|
||||||
|
assert_eq!(res.sort_by.unwrap(), SortBy::None);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_split_content_valid() {
|
fn test_can_split_content_valid() {
|
||||||
|
|
|
@ -44,7 +44,7 @@ fn test_can_parse_site() {
|
||||||
let posts_section = &site.sections[&posts_path];
|
let posts_section = &site.sections[&posts_path];
|
||||||
assert_eq!(posts_section.subsections.len(), 1);
|
assert_eq!(posts_section.subsections.len(), 1);
|
||||||
//println!("{:#?}", posts_section.pages);
|
//println!("{:#?}", posts_section.pages);
|
||||||
assert_eq!(posts_section.pages.len(), 5);
|
assert_eq!(posts_section.pages.len(), 4);
|
||||||
|
|
||||||
let tutorials_section = &site.sections[&posts_path.join("tutorials")];
|
let tutorials_section = &site.sections[&posts_path.join("tutorials")];
|
||||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||||
|
|
Loading…
Reference in a new issue