Make index a section like any other
This commit is contained in:
parent
5bfe1c213b
commit
2d4cba5b2d
|
@ -5,7 +5,7 @@
|
|||
- Fix XML templates overriding and reloading
|
||||
- `title` and `description` are now optional in the front matter
|
||||
- Add GenericConfig, Vim syntax
|
||||
- Add `_index.md` for homepage as well
|
||||
- Add `_index.md` for homepage as well and make that into a normal section
|
||||
- Allow sorting by `none`, `date` and `order` for sections
|
||||
- Add pagination
|
||||
|
||||
|
|
|
@ -41,8 +41,7 @@ fn bench_populate_previous_and_next_pages(b: &mut test::Bencher) {
|
|||
path.push("test_site");
|
||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||
site.load().unwrap();
|
||||
let mut pages = site.pages.values().cloned().collect::<Vec<_>>();
|
||||
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
||||
let pages = site.pages.values().cloned().collect::<Vec<_>>();
|
||||
|
||||
b.iter(|| populate_previous_and_next_pages(pages.as_slice()));
|
||||
}
|
||||
|
|
|
@ -6,6 +6,10 @@ pub fn info(message: &str) {
|
|||
println!("{}", NotSet.bold().paint(message));
|
||||
}
|
||||
|
||||
pub fn warn(message: &str) {
|
||||
println!("{}", Yellow.bold().paint(message));
|
||||
}
|
||||
|
||||
pub fn success(message: &str) {
|
||||
println!("{}", Green.bold().paint(message));
|
||||
}
|
||||
|
|
|
@ -14,7 +14,7 @@ lazy_static! {
|
|||
static ref PAGE_RE: Regex = Regex::new(r"^\r?\n?\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum SortBy {
|
||||
Date,
|
||||
|
@ -64,10 +64,6 @@ pub struct FrontMatter {
|
|||
|
||||
impl FrontMatter {
|
||||
pub fn parse(toml: &str) -> Result<FrontMatter> {
|
||||
if toml.trim() == "" {
|
||||
bail!("Front matter of file is missing");
|
||||
}
|
||||
|
||||
let mut f: FrontMatter = match toml::from_str(toml) {
|
||||
Ok(d) => d,
|
||||
Err(e) => bail!(e),
|
||||
|
@ -89,8 +85,6 @@ impl FrontMatter {
|
|||
f.paginate_path = Some("page".to_string());
|
||||
}
|
||||
|
||||
|
||||
|
||||
Ok(f)
|
||||
}
|
||||
|
||||
|
@ -112,10 +106,11 @@ impl FrontMatter {
|
|||
self.order.unwrap()
|
||||
}
|
||||
|
||||
/// Returns the current sorting method, defaults to `None` (== no sorting)
|
||||
pub fn sort_by(&self) -> SortBy {
|
||||
match self.sort_by {
|
||||
Some(ref s) => s.clone(),
|
||||
None => SortBy::Date,
|
||||
Some(ref s) => *s,
|
||||
None => SortBy::None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
72
src/page.rs
72
src/page.rs
|
@ -1,5 +1,4 @@
|
|||
/// A page, can be a blog post or a basic page
|
||||
use std::cmp::Ordering;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::{read_dir};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -13,7 +12,6 @@ use slug::slugify;
|
|||
use errors::{Result, ResultExt};
|
||||
use config::Config;
|
||||
use front_matter::{FrontMatter, SortBy, split_content};
|
||||
use section::Section;
|
||||
use markdown::markdown_to_html;
|
||||
use utils::{read_file, find_content_components};
|
||||
|
||||
|
@ -243,13 +241,7 @@ impl ser::Serialize for Page {
|
|||
///
|
||||
/// Any pages that doesn't have a date when the sorting method is date or order
|
||||
/// when the sorting method is order will be ignored.
|
||||
pub fn sort_pages(pages: Vec<Page>, section: Option<&Section>) -> (Vec<Page>, Vec<Page>) {
|
||||
let sort_by = if let Some(s) = section {
|
||||
s.meta.sort_by()
|
||||
} else {
|
||||
SortBy::None
|
||||
};
|
||||
|
||||
pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) {
|
||||
match sort_by {
|
||||
SortBy::Date => {
|
||||
let mut can_be_sorted = vec![];
|
||||
|
@ -290,32 +282,6 @@ pub fn sort_pages(pages: Vec<Page>, section: Option<&Section>) -> (Vec<Page>, Ve
|
|||
}
|
||||
}
|
||||
|
||||
/// Used only by the RSS feed (I think)
|
||||
impl PartialOrd for Page {
|
||||
fn partial_cmp(&self, other: &Page) -> Option<Ordering> {
|
||||
if self.meta.date.is_none() {
|
||||
return Some(Ordering::Less);
|
||||
}
|
||||
|
||||
if other.meta.date.is_none() {
|
||||
return Some(Ordering::Greater);
|
||||
}
|
||||
|
||||
let this_date = self.meta.date().unwrap();
|
||||
let other_date = other.meta.date().unwrap();
|
||||
|
||||
if this_date > other_date {
|
||||
return Some(Ordering::Less);
|
||||
}
|
||||
if this_date < other_date {
|
||||
return Some(Ordering::Greater);
|
||||
}
|
||||
|
||||
Some(Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Horribly inefficient way to set previous and next on each pages
|
||||
/// So many clones
|
||||
pub fn populate_previous_and_next_pages(input: &[Page]) -> Vec<Page> {
|
||||
|
@ -347,10 +313,8 @@ mod tests {
|
|||
use tempdir::TempDir;
|
||||
|
||||
use std::fs::File;
|
||||
use std::path::Path;
|
||||
|
||||
use front_matter::{FrontMatter, SortBy};
|
||||
use section::Section;
|
||||
use super::{Page, find_related_assets, sort_pages, populate_previous_and_next_pages};
|
||||
|
||||
fn create_page_with_date(date: &str) -> Page {
|
||||
|
@ -381,20 +345,6 @@ mod tests {
|
|||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_default_sort() {
|
||||
let input = vec![
|
||||
create_page_with_date("2018-01-01"),
|
||||
create_page_with_date("2017-01-01"),
|
||||
create_page_with_date("2019-01-01"),
|
||||
];
|
||||
let (pages, _) = sort_pages(input, None);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0].clone().meta.date.unwrap(), "2018-01-01");
|
||||
assert_eq!(pages[1].clone().meta.date.unwrap(), "2017-01-01");
|
||||
assert_eq!(pages[2].clone().meta.date.unwrap(), "2019-01-01");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_can_sort_dates() {
|
||||
let input = vec![
|
||||
|
@ -402,10 +352,7 @@ mod tests {
|
|||
create_page_with_date("2017-01-01"),
|
||||
create_page_with_date("2019-01-01"),
|
||||
];
|
||||
let mut front_matter = FrontMatter::default();
|
||||
front_matter.sort_by = Some(SortBy::Date);
|
||||
let section = Section::new(Path::new("hey"), front_matter);
|
||||
let (pages, _) = sort_pages(input, Some(§ion));
|
||||
let (pages, _) = sort_pages(input, SortBy::Date);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0].clone().meta.date.unwrap(), "2019-01-01");
|
||||
assert_eq!(pages[1].clone().meta.date.unwrap(), "2018-01-01");
|
||||
|
@ -419,10 +366,7 @@ mod tests {
|
|||
create_page_with_order(3),
|
||||
create_page_with_order(1),
|
||||
];
|
||||
let mut front_matter = FrontMatter::default();
|
||||
front_matter.sort_by = Some(SortBy::Order);
|
||||
let section = Section::new(Path::new("hey"), front_matter);
|
||||
let (pages, _) = sort_pages(input, Some(§ion));
|
||||
let (pages, _) = sort_pages(input, SortBy::Order);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
||||
assert_eq!(pages[1].clone().meta.order.unwrap(), 2);
|
||||
|
@ -436,10 +380,7 @@ mod tests {
|
|||
create_page_with_order(3),
|
||||
create_page_with_order(1),
|
||||
];
|
||||
let mut front_matter = FrontMatter::default();
|
||||
front_matter.sort_by = Some(SortBy::None);
|
||||
let section = Section::new(Path::new("hey"), front_matter);
|
||||
let (pages, _) = sort_pages(input, Some(§ion));
|
||||
let (pages, _) = sort_pages(input, SortBy::None);
|
||||
// Should be sorted by date
|
||||
assert_eq!(pages[0].clone().meta.order.unwrap(), 2);
|
||||
assert_eq!(pages[1].clone().meta.order.unwrap(), 3);
|
||||
|
@ -453,10 +394,7 @@ mod tests {
|
|||
create_page_with_order(3),
|
||||
create_page_with_date("2019-01-01"),
|
||||
];
|
||||
let mut front_matter = FrontMatter::default();
|
||||
front_matter.sort_by = Some(SortBy::Order);
|
||||
let section = Section::new(Path::new("hey"), front_matter);
|
||||
let (pages, unsorted) = sort_pages(input, Some(§ion));
|
||||
let (pages, unsorted) = sort_pages(input, SortBy::Order);
|
||||
assert_eq!(pages.len(), 2);
|
||||
assert_eq!(unsorted.len(), 1);
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use config::Config;
|
|||
use front_matter::{FrontMatter, split_content};
|
||||
use errors::{Result, ResultExt};
|
||||
use utils::{read_file, find_content_components};
|
||||
use page::{Page, sort_pages};
|
||||
use page::{Page};
|
||||
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
|
@ -29,6 +29,8 @@ pub struct Section {
|
|||
pub meta: FrontMatter,
|
||||
/// All direct pages of that section
|
||||
pub pages: Vec<Page>,
|
||||
/// All pages that cannot be sorted in this section
|
||||
pub ignored_pages: Vec<Page>,
|
||||
/// All direct subsections
|
||||
pub subsections: Vec<Section>,
|
||||
}
|
||||
|
@ -46,6 +48,7 @@ impl Section {
|
|||
permalink: "".to_string(),
|
||||
meta: meta,
|
||||
pages: vec![],
|
||||
ignored_pages: vec![],
|
||||
subsections: vec![],
|
||||
}
|
||||
}
|
||||
|
@ -86,7 +89,7 @@ impl Section {
|
|||
}
|
||||
|
||||
/// Renders the page using the default layout, unless specified in front-matter
|
||||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> {
|
||||
pub fn render_html(&self, sections: &[&Section], tera: &Tera, config: &Config) -> Result<String> {
|
||||
let tpl_name = self.get_template_name();
|
||||
|
||||
let mut context = Context::new();
|
||||
|
@ -94,14 +97,25 @@ impl Section {
|
|||
context.add("section", self);
|
||||
context.add("current_url", &self.permalink);
|
||||
context.add("current_path", &self.path);
|
||||
if self.is_index() {
|
||||
context.add("sections", §ions);
|
||||
}
|
||||
|
||||
tera.render(&tpl_name, &context)
|
||||
.chain_err(|| format!("Failed to render section '{}'", self.file_path.display()))
|
||||
}
|
||||
|
||||
/// Is this the index section?
|
||||
pub fn is_index(&self) -> bool {
|
||||
self.components.is_empty()
|
||||
}
|
||||
|
||||
pub fn all_pages_path(&self) -> Vec<PathBuf> {
|
||||
let mut paths = vec![];
|
||||
paths.extend(self.pages.iter().map(|p| p.file_path.clone()));
|
||||
paths.extend(self.ignored_pages.iter().map(|p| p.file_path.clone()));
|
||||
paths
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Serialize for Section {
|
||||
|
@ -111,8 +125,7 @@ impl ser::Serialize for Section {
|
|||
state.serialize_field("description", &self.meta.description)?;
|
||||
state.serialize_field("path", &format!("/{}", self.path))?;
|
||||
state.serialize_field("permalink", &self.permalink)?;
|
||||
let (sorted_pages, _) = sort_pages(self.pages.clone(), Some(self));
|
||||
state.serialize_field("pages", &sorted_pages)?;
|
||||
state.serialize_field("pages", &self.pages)?;
|
||||
state.serialize_field("subsections", &self.subsections)?;
|
||||
state.end()
|
||||
}
|
||||
|
|
184
src/site.rs
184
src/site.rs
|
@ -14,6 +14,7 @@ use page::{Page, populate_previous_and_next_pages, sort_pages};
|
|||
use pagination::Paginator;
|
||||
use utils::{create_file, create_directory};
|
||||
use section::{Section};
|
||||
use front_matter::{SortBy};
|
||||
use filters;
|
||||
|
||||
|
||||
|
@ -77,7 +78,6 @@ pub struct Site {
|
|||
pub config: Config,
|
||||
pub pages: HashMap<PathBuf, Page>,
|
||||
pub sections: BTreeMap<PathBuf, Section>,
|
||||
pub index: Option<Section>,
|
||||
pub tera: Tera,
|
||||
live_reload: bool,
|
||||
output_path: PathBuf,
|
||||
|
@ -105,7 +105,6 @@ impl Site {
|
|||
config: get_config(path, config_file),
|
||||
pages: HashMap::new(),
|
||||
sections: BTreeMap::new(),
|
||||
index: None,
|
||||
tera: tera,
|
||||
live_reload: false,
|
||||
output_path: path.join("public"),
|
||||
|
@ -140,13 +139,7 @@ impl Site {
|
|||
for entry in glob(&content_glob).unwrap().filter_map(|e| e.ok()) {
|
||||
let path = entry.as_path();
|
||||
if path.file_name().unwrap() == "_index.md" {
|
||||
// Index section
|
||||
if path.parent().unwrap() == self.base_path.join("content") {
|
||||
self.index = Some(Section::from_file(path, &self.config)?);
|
||||
} else {
|
||||
// all the other sections
|
||||
self.add_section(path)?;
|
||||
}
|
||||
self.add_section(path)?;
|
||||
} else {
|
||||
self.add_page(path)?;
|
||||
}
|
||||
|
@ -214,8 +207,10 @@ impl Site {
|
|||
|
||||
for (parent_path, section) in &mut self.sections {
|
||||
// TODO: avoid this clone
|
||||
let (sorted_pages, _) = sort_pages(section.pages.clone(), Some(section));
|
||||
let (mut sorted_pages, cannot_be_sorted_pages) = sort_pages(section.pages.clone(), section.meta.sort_by());
|
||||
sorted_pages = populate_previous_and_next_pages(&sorted_pages);
|
||||
section.pages = sorted_pages;
|
||||
section.ignored_pages = cannot_be_sorted_pages;
|
||||
|
||||
match grandparent_paths.get(parent_path) {
|
||||
Some(paths) => section.subsections.extend(paths.clone()),
|
||||
|
@ -257,6 +252,14 @@ impl Site {
|
|||
html
|
||||
}
|
||||
|
||||
pub fn ensure_public_directory_exists(&self) -> Result<()> {
|
||||
let public = self.output_path.clone();
|
||||
if !public.exists() {
|
||||
create_directory(&public)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copy static file to public directory.
|
||||
pub fn copy_static_file<P: AsRef<Path>>(&self, path: P) -> Result<()> {
|
||||
let relative_path = path.as_ref().strip_prefix(&self.static_path).unwrap();
|
||||
|
@ -298,39 +301,29 @@ impl Site {
|
|||
|
||||
pub fn rebuild_after_content_change(&mut self, path: &Path) -> Result<()> {
|
||||
let is_section = path.ends_with("_index.md");
|
||||
let is_index_section = if is_section {
|
||||
path.parent().unwrap() == self.base_path.join("content")
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if path.exists() {
|
||||
// file exists, either a new one or updating content
|
||||
if is_section {
|
||||
if is_index_section {
|
||||
self.index = Some(Section::from_file(path, &self.config)?);
|
||||
} else {
|
||||
self.add_section(path)?;
|
||||
}
|
||||
self.add_section(path)?;
|
||||
} else {
|
||||
// probably just an update so just re-parse that page
|
||||
// TODO: we can compare the frontmatter of the existing and new one
|
||||
// to see if we need to update re-build the whole thing or just that
|
||||
// page
|
||||
self.add_page_and_render(path)?;
|
||||
}
|
||||
} else if is_section {
|
||||
// File doesn't exist -> a deletion so we remove it from everything
|
||||
if !is_index_section {
|
||||
let relative_path = self.sections[path].relative_path.clone();
|
||||
self.sections.remove(path);
|
||||
self.permalinks.remove(&relative_path);
|
||||
} else {
|
||||
self.index = None;
|
||||
}
|
||||
let relative_path = self.sections[path].relative_path.clone();
|
||||
self.sections.remove(path);
|
||||
self.permalinks.remove(&relative_path);
|
||||
} else {
|
||||
let relative_path = self.pages[path].relative_path.clone();
|
||||
self.pages.remove(path);
|
||||
self.permalinks.remove(&relative_path);
|
||||
}
|
||||
|
||||
// TODO: probably no need to do that, we should be able to only re-render a page or a section.
|
||||
self.populate_sections();
|
||||
self.populate_tags_and_categories();
|
||||
self.build()
|
||||
|
@ -341,19 +334,16 @@ impl Site {
|
|||
match path.file_name().unwrap().to_str().unwrap() {
|
||||
"sitemap.xml" => self.render_sitemap(),
|
||||
"rss.xml" => self.render_rss_feed(),
|
||||
_ => self.build_pages()
|
||||
_ => self.build() // TODO: change that
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders a single content page
|
||||
pub fn render_page(&self, page: &Page) -> Result<()> {
|
||||
let public = self.output_path.clone();
|
||||
if !public.exists() {
|
||||
create_directory(&public)?;
|
||||
}
|
||||
self.ensure_public_directory_exists()?;
|
||||
|
||||
// Copy the nesting of the content directory if we have sections for that page
|
||||
let mut current_path = public.to_path_buf();
|
||||
let mut current_path = self.output_path.to_path_buf();
|
||||
|
||||
for component in page.path.split('/') {
|
||||
current_path.push(component);
|
||||
|
@ -379,26 +369,16 @@ impl Site {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Renders all content, categories, tags and index pages
|
||||
pub fn build_pages(&self) -> Result<()> {
|
||||
let public = self.output_path.clone();
|
||||
if !public.exists() {
|
||||
create_directory(&public)?;
|
||||
/// Builds the site to the `public` directory after deleting it
|
||||
pub fn build(&self) -> Result<()> {
|
||||
self.clean()?;
|
||||
self.render_sections()?;
|
||||
self.render_orphan_pages()?;
|
||||
self.render_sitemap()?;
|
||||
if self.config.generate_rss.unwrap() {
|
||||
self.render_rss_feed()?;
|
||||
}
|
||||
|
||||
// Sort the pages first
|
||||
// TODO: avoid the clone()
|
||||
let (mut sorted_pages, cannot_sort_pages) = sort_pages(self.pages.values().cloned().collect(), self.index.as_ref());
|
||||
|
||||
sorted_pages = populate_previous_and_next_pages(&sorted_pages);
|
||||
for page in &sorted_pages {
|
||||
self.render_page(page)?;
|
||||
}
|
||||
for page in &cannot_sort_pages {
|
||||
self.render_page(page)?;
|
||||
}
|
||||
|
||||
// Outputting categories and pages
|
||||
self.render_robots()?;
|
||||
if self.config.generate_categories_pages.unwrap() {
|
||||
self.render_categories_and_tags(RenderList::Categories)?;
|
||||
}
|
||||
|
@ -406,49 +386,12 @@ impl Site {
|
|||
self.render_categories_and_tags(RenderList::Tags)?;
|
||||
}
|
||||
|
||||
// And finally the index page
|
||||
let mut rendered_index = false;
|
||||
// Try to render the index as a paginated page first if needed
|
||||
if let Some(ref i) = self.index {
|
||||
if i.meta.is_paginated() {
|
||||
self.render_paginated(&self.output_path, i)?;
|
||||
rendered_index = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise render the default index page
|
||||
if !rendered_index {
|
||||
let mut context = Context::new();
|
||||
context.add("pages", &sorted_pages);
|
||||
context.add("sections", &self.sections.values().collect::<Vec<&Section>>());
|
||||
context.add("config", &self.config);
|
||||
context.add("current_url", &self.config.base_url);
|
||||
context.add("current_path", &"");
|
||||
let index = self.tera.render("index.html", &context)?;
|
||||
create_file(public.join("index.html"), &self.inject_livereload(index))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Builds the site to the `public` directory after deleting it
|
||||
pub fn build(&self) -> Result<()> {
|
||||
self.clean()?;
|
||||
self.build_pages()?;
|
||||
self.render_sitemap()?;
|
||||
|
||||
if self.config.generate_rss.unwrap() {
|
||||
self.render_rss_feed()?;
|
||||
}
|
||||
|
||||
self.render_robots()?;
|
||||
|
||||
self.render_sections()?;
|
||||
self.copy_static_directory()
|
||||
}
|
||||
|
||||
/// Renders robots.txt
|
||||
fn render_robots(&self) -> Result<()> {
|
||||
self.ensure_public_directory_exists()?;
|
||||
create_file(
|
||||
self.output_path.join("robots.txt"),
|
||||
&self.tera.render("robots.txt", &Context::new())?
|
||||
|
@ -472,6 +415,7 @@ impl Site {
|
|||
} else {
|
||||
("tags.html", "tag.html", "tags", "tag")
|
||||
};
|
||||
self.ensure_public_directory_exists()?;
|
||||
|
||||
// Create the categories/tags directory first
|
||||
let public = self.output_path.clone();
|
||||
|
@ -497,7 +441,7 @@ impl Site {
|
|||
|
||||
// Now, each individual item
|
||||
for (item_name, pages_paths) in items.iter() {
|
||||
let mut pages: Vec<&Page> = self.pages
|
||||
let pages: Vec<&Page> = self.pages
|
||||
.iter()
|
||||
.filter(|&(path, _)| pages_paths.contains(path))
|
||||
.map(|(_, page)| page)
|
||||
|
@ -505,8 +449,7 @@ impl Site {
|
|||
// TODO: how to sort categories and tag content?
|
||||
// Have a setting in config.toml or a _category.md and _tag.md
|
||||
// The latter is more in line with the rest of Gutenberg but order ordering
|
||||
// doesn't really work across sections so default to partial ordering for now (date)
|
||||
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
||||
// doesn't really work across sections.
|
||||
|
||||
let mut context = Context::new();
|
||||
let slug = slugify(&item_name);
|
||||
|
@ -529,6 +472,7 @@ impl Site {
|
|||
}
|
||||
|
||||
fn render_sitemap(&self) -> Result<()> {
|
||||
self.ensure_public_directory_exists()?;
|
||||
let mut context = Context::new();
|
||||
context.add("pages", &self.pages.values().collect::<Vec<&Page>>());
|
||||
context.add("sections", &self.sections.values().collect::<Vec<&Section>>());
|
||||
|
@ -563,20 +507,22 @@ impl Site {
|
|||
}
|
||||
|
||||
fn render_rss_feed(&self) -> Result<()> {
|
||||
self.ensure_public_directory_exists()?;
|
||||
|
||||
let mut context = Context::new();
|
||||
let mut pages = self.pages.values()
|
||||
let pages = self.pages.values()
|
||||
.filter(|p| p.meta.date.is_some())
|
||||
.take(15) // limit to the last 15 elements
|
||||
.collect::<Vec<&Page>>();
|
||||
.map(|p| p.clone())
|
||||
.collect::<Vec<Page>>();
|
||||
|
||||
// Don't generate a RSS feed if none of the pages has a date
|
||||
if pages.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
||||
context.add("pages", &pages);
|
||||
context.add("last_build_date", &pages[0].meta.date);
|
||||
let (sorted_pages, _) = sort_pages(pages, SortBy::Date);
|
||||
context.add("pages", &sorted_pages);
|
||||
context.add("config", &self.config);
|
||||
|
||||
let rss_feed_url = if self.config.base_url.ends_with('/') {
|
||||
|
@ -594,6 +540,7 @@ impl Site {
|
|||
}
|
||||
|
||||
fn render_sections(&self) -> Result<()> {
|
||||
self.ensure_public_directory_exists()?;
|
||||
let public = self.output_path.clone();
|
||||
|
||||
for section in self.sections.values() {
|
||||
|
@ -609,9 +556,34 @@ impl Site {
|
|||
if section.meta.is_paginated() {
|
||||
self.render_paginated(&output_path, section)?;
|
||||
} else {
|
||||
let output = section.render_html(&self.tera, &self.config)?;
|
||||
let output = section.render_html(
|
||||
&self.sections.values().collect::<Vec<&Section>>(),
|
||||
&self.tera,
|
||||
&self.config,
|
||||
)?;
|
||||
create_file(output_path.join("index.html"), &self.inject_livereload(output))?;
|
||||
}
|
||||
|
||||
for page in §ion.pages {
|
||||
self.render_page(page)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Renders all pages that do not belong to any sections
|
||||
fn render_orphan_pages(&self) -> Result<()> {
|
||||
self.ensure_public_directory_exists()?;
|
||||
let mut pages_in_sections = vec![];
|
||||
for s in self.sections.values() {
|
||||
pages_in_sections.extend(s.all_pages_path());
|
||||
}
|
||||
|
||||
for page in self.pages.values() {
|
||||
if !pages_in_sections.contains(&page.file_path) {
|
||||
self.render_page(page)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -619,20 +591,14 @@ impl Site {
|
|||
|
||||
/// Renders a list of pages when the section/index is wanting pagination.
|
||||
fn render_paginated(&self, output_path: &Path, section: &Section) -> Result<()> {
|
||||
self.ensure_public_directory_exists()?;
|
||||
|
||||
let paginate_path = match section.meta.paginate_path {
|
||||
Some(ref s) => s.clone(),
|
||||
None => unreachable!()
|
||||
};
|
||||
|
||||
// this will sort too many times!
|
||||
// TODO: make sorting happen once for everything so we don't need to sort all the time
|
||||
let sorted_pages = if section.is_index() {
|
||||
sort_pages(self.pages.values().cloned().collect(), self.index.as_ref()).0
|
||||
} else {
|
||||
sort_pages(section.pages.clone(), Some(section)).0
|
||||
};
|
||||
|
||||
let paginator = Paginator::new(&sorted_pages, section);
|
||||
let paginator = Paginator::new(§ion.pages, section);
|
||||
|
||||
for (i, pager) in paginator.pagers.iter().enumerate() {
|
||||
let folder_path = output_path.join(&paginate_path);
|
||||
|
|
2
test_site/content/hello.md
Normal file
2
test_site/content/hello.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
+++
|
||||
+++
|
|
@ -15,7 +15,7 @@
|
|||
<div class="content">
|
||||
{% block content %}
|
||||
<div class="list-posts">
|
||||
{% for page in pages %}
|
||||
{% for page in section.pages %}
|
||||
<article>
|
||||
<h3 class="post__title"><a href="{{ page.permalink }}">{{ page.title }}</a></h3>
|
||||
</article>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
{% for pager in paginator.pagers %}
|
||||
{{pager.index}}: {{pager.path | safe }}
|
||||
{% endfor %}
|
||||
Num pages: {{ paginator.pages | length }}
|
||||
Num pagers: {{ paginator.pagers | length }}
|
||||
Page size: {{ paginator.paginate_by }}
|
||||
Current index: {{ paginator.current_index }}
|
||||
First: {{ paginator.first | safe }}
|
||||
|
|
|
@ -74,10 +74,10 @@ url = "hello-world""#;
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_errors_with_empty_front_matter() {
|
||||
fn test_is_ok_with_empty_front_matter() {
|
||||
let content = r#" "#;
|
||||
let res = FrontMatter::parse(content);
|
||||
assert!(res.is_err());
|
||||
assert!(res.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -19,12 +19,9 @@ fn test_can_parse_site() {
|
|||
site.load().unwrap();
|
||||
|
||||
// Correct number of pages (sections are pages too)
|
||||
assert_eq!(site.pages.len(), 10);
|
||||
assert_eq!(site.pages.len(), 11);
|
||||
let posts_path = path.join("content").join("posts");
|
||||
|
||||
// We have an index page
|
||||
assert!(site.index.is_some());
|
||||
|
||||
// Make sure we remove all the pwd + content from the sections
|
||||
let basic = &site.pages[&posts_path.join("simple.md")];
|
||||
assert_eq!(basic.components, vec!["posts".to_string()]);
|
||||
|
@ -38,12 +35,16 @@ fn test_can_parse_site() {
|
|||
assert_eq!(asset_folder_post.components, vec!["posts".to_string()]);
|
||||
|
||||
// That we have the right number of sections
|
||||
assert_eq!(site.sections.len(), 4);
|
||||
assert_eq!(site.sections.len(), 5);
|
||||
|
||||
// And that the sections are correct
|
||||
let index_section = &site.sections[&path.join("content")];
|
||||
assert_eq!(index_section.subsections.len(), 1);
|
||||
assert_eq!(index_section.pages.len(), 1);
|
||||
|
||||
let posts_section = &site.sections[&posts_path];
|
||||
assert_eq!(posts_section.subsections.len(), 1);
|
||||
assert_eq!(posts_section.pages.len(), 4);
|
||||
assert_eq!(posts_section.pages.len(), 5);
|
||||
|
||||
let tutorials_section = &site.sections[&posts_path.join("tutorials")];
|
||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||
|
@ -108,6 +109,7 @@ fn test_can_build_site_without_live_reload() {
|
|||
assert!(file_exists!(public, "posts/python/index.html"));
|
||||
assert!(file_exists!(public, "posts/tutorials/devops/nix/index.html"));
|
||||
assert!(file_exists!(public, "posts/with-assets/index.html"));
|
||||
assert!(file_exists!(public, "posts/no-section/simple/index.html"));
|
||||
|
||||
// Sections
|
||||
assert!(file_exists!(public, "posts/index.html"));
|
||||
|
@ -126,9 +128,6 @@ fn test_can_build_site_without_live_reload() {
|
|||
// Both pages and sections are in the sitemap
|
||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts/simple</loc>"));
|
||||
assert!(file_contains!(public, "sitemap.xml", "<loc>https://replace-this-with-your-url.com/posts</loc>"));
|
||||
|
||||
assert!(file_contains!(public, "a-fixed-url/index.html", "Previous article: "));
|
||||
assert!(file_contains!(public, "a-fixed-url/index.html", "Next article: "));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -323,22 +322,22 @@ fn test_can_build_site_with_pagination_for_section() {
|
|||
"posts/page/1/index.html",
|
||||
"http-equiv=\"refresh\" content=\"0;url=https://replace-this-with-your-url.com/posts\""
|
||||
));
|
||||
assert!(file_contains!(public, "posts/index.html", "Num pages: 2"));
|
||||
assert!(file_contains!(public, "posts/index.html", "Num pagers: 3"));
|
||||
assert!(file_contains!(public, "posts/index.html", "Page size: 2"));
|
||||
assert!(file_contains!(public, "posts/index.html", "Current index: 1"));
|
||||
assert!(file_contains!(public, "posts/index.html", "has_next"));
|
||||
assert!(file_contains!(public, "posts/index.html", "First: https://replace-this-with-your-url.com/posts"));
|
||||
assert!(file_contains!(public, "posts/index.html", "Last: https://replace-this-with-your-url.com/posts/page/2"));
|
||||
assert!(file_contains!(public, "posts/index.html", "Last: https://replace-this-with-your-url.com/posts/page/3"));
|
||||
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false);
|
||||
|
||||
assert!(file_exists!(public, "posts/page/2/index.html"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Num pages: 2"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Num pagers: 3"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Page size: 2"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Current index: 2"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_prev"));
|
||||
assert_eq!(file_contains!(public, "posts/page/2/index.html", "has_next"), false);
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "has_next"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "First: https://replace-this-with-your-url.com/posts"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Last: https://replace-this-with-your-url.com/posts/page/2"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Last: https://replace-this-with-your-url.com/posts/page/3"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -347,10 +346,11 @@ fn test_can_build_site_with_pagination_for_index() {
|
|||
path.push("test_site");
|
||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||
site.load().unwrap();
|
||||
let mut index = site.index.unwrap();
|
||||
index.meta.paginate_by = Some(2);
|
||||
index.meta.template = Some("index_paginated.html".to_string());
|
||||
site.index = Some(index);
|
||||
{
|
||||
let mut index = site.sections.get_mut(&path.join("content")).unwrap();
|
||||
index.meta.paginate_by = Some(2);
|
||||
index.meta.template = Some("index_paginated.html".to_string());
|
||||
}
|
||||
let tmp_dir = TempDir::new("example").expect("create temp dir");
|
||||
let public = &tmp_dir.path().join("public");
|
||||
site.set_output_path(&public);
|
||||
|
@ -374,11 +374,11 @@ fn test_can_build_site_with_pagination_for_index() {
|
|||
"page/1/index.html",
|
||||
"http-equiv=\"refresh\" content=\"0;url=https://replace-this-with-your-url.com/\""
|
||||
));
|
||||
assert!(file_contains!(public, "index.html", "Num pages: 2"));
|
||||
assert!(file_contains!(public, "index.html", "Num pages: 1"));
|
||||
assert!(file_contains!(public, "index.html", "Current index: 1"));
|
||||
assert!(file_contains!(public, "index.html", "has_next"));
|
||||
assert!(file_contains!(public, "index.html", "First: https://replace-this-with-your-url.com/"));
|
||||
assert!(file_contains!(public, "index.html", "Last: https://replace-this-with-your-url.com/page/2"));
|
||||
assert!(file_contains!(public, "index.html", "Last: https://replace-this-with-your-url.com/"));
|
||||
assert_eq!(file_contains!(public, "index.html", "has_prev"), false);
|
||||
assert_eq!(file_contains!(public, "index.html", "has_next"), false);
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue