Have a list of ancestors instead of only parent section

This commit is contained in:
Vincent Prouillet 2018-10-18 15:54:51 +02:00
parent f14dbcbdf4
commit 957c6bed9d
11 changed files with 120 additions and 67 deletions

View file

@ -27,8 +27,8 @@ Tera function
- RSS feed now takes all available articles by default instead of limiting to 10000
- `templates` directory is now optional
- Add Reason and F# syntax highlighting
- Add `parent_section` to pages and section pointing to the relative path of the parent
section if there is one to be used with the `get_section` Tera function
- Add `ancestors` to pages and sections pointing to the relative path of all ancestor
sections up to the index to be used with the `get_section` Tera function
## 0.4.2 (2018-09-03)

View file

@ -23,7 +23,7 @@ pub struct SerializingPage<'a> {
content: &'a str,
permalink: &'a str,
slug: &'a str,
parent_section: Option<String>,
ancestors: Vec<String>,
title: &'a Option<String>,
description: &'a Option<String>,
date: &'a Option<String>,
@ -58,14 +58,14 @@ impl<'a> SerializingPage<'a> {
day = Some(d.2);
}
let pages = library.pages();
let lighter = page.lighter.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
let heavier = page.heavier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
let earlier = page.earlier.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
let later = page.later.map(|k| Box::new(SerializingPage::from_page_basic(pages.get(k).unwrap())));
let parent_section = page.parent_section.map(|k| library.get_section_by_key(k).file.relative.clone());
let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
SerializingPage {
parent_section,
ancestors,
content: &page.content,
permalink: &page.permalink,
slug: &page.slug,
@ -93,7 +93,7 @@ impl<'a> SerializingPage<'a> {
}
/// Same as from_page but does not fill sibling pages
pub fn from_page_basic(page: &'a Page) -> Self {
pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self {
let mut year = None;
let mut month = None;
let mut day = None;
@ -102,9 +102,14 @@ impl<'a> SerializingPage<'a> {
month = Some(d.1);
day = Some(d.2);
}
let ancestors = if let Some(ref lib) = library {
page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
} else {
vec![]
};
SerializingPage {
parent_section: None,
ancestors,
content: &page.content,
permalink: &page.permalink,
slug: &page.slug,
@ -138,8 +143,8 @@ pub struct Page {
pub file: FileInfo,
/// The front matter meta-data
pub meta: PageFrontMatter,
/// The parent section if there is one
pub parent_section: Option<Key>,
/// The list of parent sections
pub ancestors: Vec<Key>,
/// The actual content of the page, in markdown
pub raw_content: String,
/// All the non-md files we found next to the .md file
@ -184,7 +189,7 @@ impl Page {
Page {
file: FileInfo::new_page(file_path),
meta,
parent_section: None,
ancestors: vec![],
raw_content: "".to_string(),
assets: vec![],
content: "".to_string(),
@ -305,7 +310,7 @@ impl Page {
anchor_insert,
);
context.tera_context.insert("page", &SerializingPage::from_page_basic(self));
context.tera_context.insert("page", &SerializingPage::from_page_basic(self, None));
let res = render_content(&self.raw_content, &context)
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
@ -347,8 +352,8 @@ impl Page {
SerializingPage::from_page(self, library)
}
pub fn to_serialized_basic(&self) -> SerializingPage {
SerializingPage::from_page_basic(self)
pub fn to_serialized_basic<'a>(&'a self, library: &'a Library) -> SerializingPage<'a> {
SerializingPage::from_page_basic(self, Some(library))
}
}
@ -357,7 +362,7 @@ impl Default for Page {
Page {
file: FileInfo::default(),
meta: PageFrontMatter::default(),
parent_section: None,
ancestors: vec![],
raw_content: "".to_string(),
assets: vec![],
content: "".to_string(),

View file

@ -21,7 +21,7 @@ use library::Library;
pub struct SerializingSection<'a> {
content: &'a str,
permalink: &'a str,
parent_section: Option<String>,
ancestors: Vec<String>,
title: &'a Option<String>,
description: &'a Option<String>,
extra: &'a HashMap<String, Value>,
@ -48,10 +48,10 @@ impl<'a> SerializingSection<'a> {
subsections.push(library.get_section_path_by_key(*k));
}
let parent_section = section.parent_section.map(|k| library.get_section_by_key(k).file.relative.clone());
let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
SerializingSection {
parent_section,
ancestors,
content: &section.content,
permalink: &section.permalink,
title: &section.meta.title,
@ -69,9 +69,15 @@ impl<'a> SerializingSection<'a> {
}
/// Same as from_section but doesn't fetch pages and sections
pub fn from_section_basic(section: &'a Section) -> Self {
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
let ancestors = if let Some(ref lib) = library {
section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
} else {
vec![]
};
SerializingSection {
parent_section: None,
ancestors,
content: &section.content,
permalink: &section.permalink,
title: &section.meta.title,
@ -111,8 +117,8 @@ pub struct Section {
pub pages: Vec<Key>,
/// All pages that cannot be sorted in this section
pub ignored_pages: Vec<Key>,
/// The relative path of the parent section if there is one
pub parent_section: Option<Key>,
/// The list of parent sections
pub ancestors: Vec<Key>,
/// All direct subsections
pub subsections: Vec<Key>,
/// Toc made from the headers of the markdown file
@ -131,7 +137,7 @@ impl Section {
Section {
file: FileInfo::new_section(file_path),
meta,
parent_section: None,
ancestors: vec![],
path: "".to_string(),
components: vec![],
permalink: "".to_string(),
@ -222,7 +228,7 @@ impl Section {
self.meta.insert_anchor_links,
);
context.tera_context.insert("section", &SerializingSection::from_section_basic(self));
context.tera_context.insert("section", &SerializingSection::from_section_basic(self, None));
let res = render_content(&self.raw_content, &context)
.chain_err(|| format!("Failed to render content of {}", self.file.path.display()))?;
@ -270,7 +276,7 @@ impl Default for Section {
Section {
file: FileInfo::default(),
meta: SectionFrontMatter::default(),
parent_section: None,
ancestors: vec![],
path: "".to_string(),
components: vec![],
permalink: "".to_string(),

View file

@ -25,7 +25,7 @@ pub struct Library {
/// A mapping path -> key for pages so we can easily get their key
paths_to_pages: HashMap<PathBuf, Key>,
/// A mapping path -> key for sections so we can easily get their key
paths_to_sections: HashMap<PathBuf, Key>,
pub paths_to_sections: HashMap<PathBuf, Key>,
}
impl Library {
@ -81,25 +81,58 @@ impl Library {
/// Find out the direct subsections of each subsection if there are some
/// as well as the pages for each section
pub fn populate_sections(&mut self) {
let mut grandparent_paths: HashMap<PathBuf, Vec<_>> = HashMap::new();
let (root_path, index_path) = self.sections
.values()
.find(|s| s.is_index())
.map(|s| (s.file.parent.clone(), s.file.path.clone()))
.unwrap();
let root_key = self.paths_to_sections[&index_path];
// We are going to get both the ancestors and grandparents for each section in one go
let mut ancestors: HashMap<PathBuf, Vec<_>> = HashMap::new();
let mut subsections: HashMap<PathBuf, Vec<_>> = HashMap::new();
for section in self.sections.values_mut() {
if let Some(ref grand_parent) = section.file.grand_parent {
grandparent_paths
.entry(grand_parent.to_path_buf())
.or_insert_with(|| vec![])
.push(section.file.path.clone());
}
// Make sure the pages of a section are empty since we can call that many times on `serve`
section.pages = vec![];
section.ignored_pages = vec![];
if let Some(ref grand_parent) = section.file.grand_parent {
subsections
.entry(grand_parent.join("_index.md"))
.or_insert_with(|| vec![])
.push(section.file.path.clone());
}
// Index has no ancestors, no need to go through it
if section.is_index() {
ancestors.insert(section.file.path.clone(), vec![]);
continue;
}
let mut path = root_path.clone();
// Index section is the first ancestor of every single section
let mut parents = vec![root_key.clone()];
for component in &section.file.components {
path = path.join(component);
// Skip itself
if path == section.file.parent {
continue;
}
if let Some(section_key) = self.paths_to_sections.get(&path.join("_index.md")) {
parents.push(*section_key);
}
}
ancestors.insert(section.file.path.clone(), parents);
}
for (key, page) in &mut self.pages {
let parent_section_path = page.file.parent.join("_index.md");
if let Some(section_key) = self.paths_to_sections.get(&parent_section_path) {
self.sections.get_mut(*section_key).unwrap().pages.push(key);
page.parent_section = Some(*section_key);
page.ancestors = ancestors.get(&parent_section_path).cloned().unwrap_or_else(|| vec![]);
// Don't forget to push the actual parent
page.ancestors.push(*section_key);
}
}
@ -111,22 +144,13 @@ impl Library {
sections_weight.insert(key, section.meta.weight);
}
for (grandparent, children) in &grandparent_paths {
let mut subsections = vec![];
let grandparent_path = grandparent.join("_index.md");
if let Some(ref mut section) = self.get_section_mut(&grandparent_path) {
subsections = children.iter().map(|p| sections[p]).collect();
subsections.sort_by(|a, b| sections_weight[a].cmp(&sections_weight[b]));
section.subsections = subsections.clone();
}
// Only there for subsections so we must have a parent section
for key in &subsections {
if let Some(ref mut subsection) = self.sections.get_mut(*key) {
subsection.parent_section = Some(sections[&grandparent_path]);
}
for section in self.sections.values_mut() {
if let Some(ref children) = subsections.get(&section.file.path) {
let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect();
children.sort_by(|a, b| sections_weight[a].cmp(&sections_weight[b]));
section.subsections = children;
}
section.ancestors = ancestors.get(&section.file.path).cloned().unwrap_or_else(|| vec![]);
}
}

View file

@ -108,7 +108,7 @@ impl<'a> Paginator<'a> {
for key in self.all_pages {
let page = library.get_page_by_key(*key);
current_page.push(page.to_serialized_basic());
current_page.push(page.to_serialized_basic(library));
if current_page.len() == self.paginate_by {
pages.push(current_page);
@ -188,12 +188,12 @@ impl<'a> Paginator<'a> {
paginator
}
pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera) -> Result<String> {
pub fn render_pager(&self, pager: &Pager, config: &Config, tera: &Tera, library: &Library) -> Result<String> {
let mut context = Context::new();
context.insert("config", &config);
let template_name = match self.root {
PaginationRoot::Section(s) => {
context.insert("section", &SerializingSection::from_section_basic(s));
context.insert("section", &SerializingSection::from_section_basic(s, Some(library)));
s.get_template_name()
}
PaginationRoot::Taxonomy(t) => {

View file

@ -26,7 +26,7 @@ impl<'a> SerializedTaxonomyItem<'a> {
for key in &item.pages {
let page = library.get_page_by_key(*key);
pages.push(page.to_serialized_basic());
pages.push(page.to_serialized_basic(library));
}
SerializedTaxonomyItem {

View file

@ -125,6 +125,7 @@ fn handle_section_editing(site: &mut Site, path: &Path) -> Result<()> {
s.ignored_pages = prev.ignored_pages;
s.subsections = prev.subsections;
}
site.populate_sections();
if site.library.get_section(&pathbuf).unwrap().meta == prev.meta {
// Front matter didn't change, only content did

View file

@ -63,7 +63,7 @@ fn bench_render_paginated(b: &mut test::Bencher) {
let public = &tmp_dir.path().join("public");
site.set_output_path(&public);
let section = site.library.sections_values()[0];
let paginator = Paginator::from_section(&section, site.library.pages());
let paginator = Paginator::from_section(&section, &site.library);
b.iter(|| site.render_paginated(public, &paginator));
}

View file

@ -735,7 +735,7 @@ impl Site {
let p = pages
.iter()
.take(num_entries)
.map(|x| x.to_serialized_basic())
.map(|x| x.to_serialized_basic(&self.library))
.collect::<Vec<_>>();
context.insert("pages", &p);
@ -856,7 +856,7 @@ impl Site {
.map(|pager| {
let page_path = folder_path.join(&format!("{}", pager.index));
create_directory(&page_path)?;
let output = paginator.render_pager(pager, &self.config, &self.tera)?;
let output = paginator.render_pager(pager, &self.config, &self.tera, &self.library)?;
if pager.index > 1 {
create_file(&page_path.join("index.html"), &self.inject_livereload(output))?;
} else {

View file

@ -37,17 +37,23 @@ fn can_parse_site() {
let index_section = site.library.get_section(&path.join("content").join("_index.md")).unwrap();
assert_eq!(index_section.subsections.len(), 3);
assert_eq!(index_section.pages.len(), 1);
assert!(index_section.parent_section.is_none());
assert!(index_section.ancestors.is_empty());
let posts_section = site.library.get_section(&posts_path.join("_index.md")).unwrap();
assert_eq!(posts_section.subsections.len(), 1);
assert_eq!(posts_section.pages.len(), 7);
assert_eq!(posts_section.parent_section, Some(*site.library.get_section_key(&index_section.file.path).unwrap()));
assert_eq!(posts_section.ancestors, vec![*site.library.get_section_key(&index_section.file.path).unwrap()]);
// Make sure we remove all the pwd + content from the sections
let basic = site.library.get_page(&posts_path.join("simple.md")).unwrap();
assert_eq!(basic.file.components, vec!["posts".to_string()]);
assert_eq!(basic.parent_section, Some(*site.library.get_section_key(&posts_section.file.path).unwrap()));
assert_eq!(
basic.ancestors,
vec![
*site.library.get_section_key(&index_section.file.path).unwrap(),
*site.library.get_section_key(&posts_section.file.path).unwrap(),
]
);
let tutorials_section = site.library.get_section(&posts_path.join("tutorials").join("_index.md")).unwrap();
assert_eq!(tutorials_section.subsections.len(), 2);
@ -60,7 +66,14 @@ fn can_parse_site() {
let devops_section = site.library.get_section(&posts_path.join("tutorials").join("devops").join("_index.md")).unwrap();
assert_eq!(devops_section.subsections.len(), 0);
assert_eq!(devops_section.pages.len(), 2);
assert_eq!(devops_section.parent_section, Some(*site.library.get_section_key(&tutorials_section.file.path).unwrap()));
assert_eq!(
devops_section.ancestors,
vec![
*site.library.get_section_key(&index_section.file.path).unwrap(),
*site.library.get_section_key(&posts_section.file.path).unwrap(),
*site.library.get_section_key(&tutorials_section.file.path).unwrap(),
]
);
let prog_section = site.library.get_section(&posts_path.join("tutorials").join("programming").join("_index.md")).unwrap();
assert_eq!(prog_section.subsections.len(), 0);

View file

@ -45,8 +45,10 @@ month: Number?;
day: Number?;
// Paths of colocated assets, relative to the content directory
assets: Array<String>;
// The relative path of the parent section if existing, for use with the `get_section` Tera function
parent_section: String?;
// The relative paths of the parent sections until the index onef for use with the `get_section` Tera function
// The first item is the index section and the last one is the parent section
// This is filled after rendering a page content so it will be empty in shortcodes
ancestors: Array<String>;
```
## Section variables
@ -83,8 +85,10 @@ reading_time: Number;
toc: Array<Header>;
// Paths of colocated assets, relative to the content directory
assets: Array<String>;
// The relative path of the parent section if existing, for use with the `get_section` Tera function
parent_section: String?;
// The relative paths of the parent sections until the index onef for use with the `get_section` Tera function
// The first item is the index section and the last one is the parent section
// This is filled after rendering a page content so it will be empty in shortcodes
ancestors: Array<String>;
```
## Table of contents