Small refactor for serialized page/sections
This commit is contained in:
parent
dc94aa219b
commit
4e3d231ca9
|
@ -1,7 +1,9 @@
|
|||
mod file_info;
|
||||
mod page;
|
||||
mod section;
|
||||
mod ser;
|
||||
|
||||
pub use self::file_info::FileInfo;
|
||||
pub use self::page::{Page, SerializingPage};
|
||||
pub use self::section::{Section, SerializingSection};
|
||||
pub use self::page::Page;
|
||||
pub use self::section::Section;
|
||||
pub use self::ser::{SerializingPage, SerializingSection};
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use tera::{Tera, Context as TeraContext, Value, Map};
|
||||
use tera::{Tera, Context as TeraContext};
|
||||
use slug::slugify;
|
||||
use slotmap::{Key};
|
||||
|
||||
|
@ -16,129 +16,8 @@ use rendering::{RenderContext, Header, render_content};
|
|||
use library::Library;
|
||||
|
||||
use content::file_info::FileInfo;
|
||||
use content::ser::SerializingPage;
|
||||
|
||||
/// What we are sending to the templates when rendering them
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingPage<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
ancestors: Vec<String>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
date: &'a Option<String>,
|
||||
year: Option<i32>,
|
||||
month: Option<u32>,
|
||||
day: Option<u32>,
|
||||
taxonomies: &'a HashMap<String, Vec<String>>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
summary: &'a Option<String>,
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
toc: &'a [Header],
|
||||
assets: Vec<String>,
|
||||
draft: bool,
|
||||
lighter: Option<Box<SerializingPage<'a>>>,
|
||||
heavier: Option<Box<SerializingPage<'a>>>,
|
||||
earlier: Option<Box<SerializingPage<'a>>>,
|
||||
later: Option<Box<SerializingPage<'a>>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingPage<'a> {
|
||||
/// Grabs all the data from a page, including sibling pages
|
||||
pub fn from_page(page: &'a Page, library: &'a Library) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let pages = library.pages();
|
||||
let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
||||
|
||||
SerializingPage {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
toc: &page.toc,
|
||||
assets: page.serialize_assets(),
|
||||
draft: page.is_draft(),
|
||||
lighter,
|
||||
heavier,
|
||||
earlier,
|
||||
later,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_page but does not fill sibling pages
|
||||
pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let ancestors = if let Some(ref lib) = library {
|
||||
page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
SerializingPage {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
toc: &page.toc,
|
||||
assets: page.serialize_assets(),
|
||||
draft: page.is_draft(),
|
||||
lighter: None,
|
||||
heavier: None,
|
||||
earlier: None,
|
||||
later: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Page {
|
||||
|
@ -343,7 +222,7 @@ impl Page {
|
|||
}
|
||||
|
||||
/// Creates a vectors of asset URLs.
|
||||
fn serialize_assets(&self) -> Vec<String> {
|
||||
pub fn serialize_assets(&self) -> Vec<String> {
|
||||
self.assets.iter()
|
||||
.filter_map(|asset| asset.file_name())
|
||||
.filter_map(|filename| filename.to_str())
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use tera::{Tera, Context as TeraContext, Value};
|
||||
use slotmap::{Key};
|
||||
use tera::{Tera, Context as TeraContext};
|
||||
use slotmap::Key;
|
||||
|
||||
use config::Config;
|
||||
use front_matter::{SectionFrontMatter, split_section_content};
|
||||
|
@ -13,91 +13,10 @@ use utils::site::get_reading_analytics;
|
|||
use rendering::{RenderContext, Header, render_content};
|
||||
|
||||
use content::file_info::FileInfo;
|
||||
use content::SerializingPage;
|
||||
use content::ser::SerializingSection;
|
||||
use library::Library;
|
||||
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingSection<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
ancestors: Vec<String>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
extra: &'a HashMap<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
toc: &'a [Header],
|
||||
assets: Vec<String>,
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
subsections: Vec<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingSection<'a> {
|
||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Self {
|
||||
let mut pages = Vec::with_capacity(section.pages.len());
|
||||
let mut subsections = Vec::with_capacity(section.subsections.len());
|
||||
|
||||
for k in §ion.pages {
|
||||
pages.push(library.get_page_by_key(*k).to_serialized(library));
|
||||
}
|
||||
|
||||
for k in §ion.subsections {
|
||||
subsections.push(library.get_section_path_by_key(*k));
|
||||
}
|
||||
|
||||
let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
||||
|
||||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
toc: §ion.toc,
|
||||
assets: section.serialize_assets(),
|
||||
pages,
|
||||
subsections,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_section but doesn't fetch pages and sections
|
||||
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
|
||||
let ancestors = if let Some(ref lib) = library {
|
||||
section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
toc: §ion.toc,
|
||||
assets: section.serialize_assets(),
|
||||
pages: vec![],
|
||||
subsections: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Section {
|
||||
/// All info about the actual file
|
||||
|
@ -260,7 +179,7 @@ impl Section {
|
|||
}
|
||||
|
||||
/// Creates a vectors of asset URLs.
|
||||
fn serialize_assets(&self) -> Vec<String> {
|
||||
pub fn serialize_assets(&self) -> Vec<String> {
|
||||
self.assets.iter()
|
||||
.filter_map(|asset| asset.file_name())
|
||||
.filter_map(|filename| filename.to_str())
|
||||
|
|
213
components/library/src/content/ser.rs
Normal file
213
components/library/src/content/ser.rs
Normal file
|
@ -0,0 +1,213 @@
|
|||
//! What we are sending to the templates when rendering them
|
||||
use std::collections::HashMap;
|
||||
|
||||
use tera::{Value, Map};
|
||||
|
||||
use library::Library;
|
||||
use content::{Page, Section};
|
||||
use rendering::Header;
|
||||
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingPage<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
slug: &'a str,
|
||||
ancestors: Vec<String>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
date: &'a Option<String>,
|
||||
year: Option<i32>,
|
||||
month: Option<u32>,
|
||||
day: Option<u32>,
|
||||
taxonomies: &'a HashMap<String, Vec<String>>,
|
||||
extra: &'a Map<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
summary: &'a Option<String>,
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
toc: &'a [Header],
|
||||
assets: Vec<String>,
|
||||
draft: bool,
|
||||
lighter: Option<Box<SerializingPage<'a>>>,
|
||||
heavier: Option<Box<SerializingPage<'a>>>,
|
||||
earlier: Option<Box<SerializingPage<'a>>>,
|
||||
later: Option<Box<SerializingPage<'a>>>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingPage<'a> {
|
||||
/// Grabs all the data from a page, including sibling pages
|
||||
pub fn from_page(page: &'a Page, library: &'a Library) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let pages = library.pages();
|
||||
let lighter = page.lighter.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let heavier = page.heavier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let earlier = page.earlier.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let later = page.later.map(|k| Box::new(Self::from_page_basic(pages.get(k).unwrap(), Some(library))));
|
||||
let ancestors = page.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
||||
|
||||
SerializingPage {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
toc: &page.toc,
|
||||
assets: page.serialize_assets(),
|
||||
draft: page.is_draft(),
|
||||
lighter,
|
||||
heavier,
|
||||
earlier,
|
||||
later,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_page but does not fill sibling pages
|
||||
pub fn from_page_basic(page: &'a Page, library: Option<&'a Library>) -> Self {
|
||||
let mut year = None;
|
||||
let mut month = None;
|
||||
let mut day = None;
|
||||
if let Some(d) = page.meta.datetime_tuple {
|
||||
year = Some(d.0);
|
||||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let ancestors = if let Some(ref lib) = library {
|
||||
page.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
SerializingPage {
|
||||
relative_path: &page.file.relative,
|
||||
ancestors,
|
||||
content: &page.content,
|
||||
permalink: &page.permalink,
|
||||
slug: &page.slug,
|
||||
title: &page.meta.title,
|
||||
description: &page.meta.description,
|
||||
extra: &page.meta.extra,
|
||||
date: &page.meta.date,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
taxonomies: &page.meta.taxonomies,
|
||||
path: &page.path,
|
||||
components: &page.components,
|
||||
summary: &page.summary,
|
||||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
toc: &page.toc,
|
||||
assets: page.serialize_assets(),
|
||||
draft: page.is_draft(),
|
||||
lighter: None,
|
||||
heavier: None,
|
||||
earlier: None,
|
||||
later: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize)]
|
||||
pub struct SerializingSection<'a> {
|
||||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
ancestors: Vec<String>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
extra: &'a HashMap<String, Value>,
|
||||
path: &'a str,
|
||||
components: &'a [String],
|
||||
word_count: Option<usize>,
|
||||
reading_time: Option<usize>,
|
||||
toc: &'a [Header],
|
||||
assets: Vec<String>,
|
||||
pages: Vec<SerializingPage<'a>>,
|
||||
subsections: Vec<&'a str>,
|
||||
}
|
||||
|
||||
impl<'a> SerializingSection<'a> {
|
||||
pub fn from_section(section: &'a Section, library: &'a Library) -> Self {
|
||||
let mut pages = Vec::with_capacity(section.pages.len());
|
||||
let mut subsections = Vec::with_capacity(section.subsections.len());
|
||||
|
||||
for k in §ion.pages {
|
||||
pages.push(library.get_page_by_key(*k).to_serialized(library));
|
||||
}
|
||||
|
||||
for k in §ion.subsections {
|
||||
subsections.push(library.get_section_path_by_key(*k));
|
||||
}
|
||||
|
||||
let ancestors = section.ancestors.iter().map(|k| library.get_section_by_key(*k).file.relative.clone()).collect();
|
||||
|
||||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
toc: §ion.toc,
|
||||
assets: section.serialize_assets(),
|
||||
pages,
|
||||
subsections,
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as from_section but doesn't fetch pages and sections
|
||||
pub fn from_section_basic(section: &'a Section, library: Option<&'a Library>) -> Self {
|
||||
let ancestors = if let Some(ref lib) = library {
|
||||
section.ancestors.iter().map(|k| lib.get_section_by_key(*k).file.relative.clone()).collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
description: §ion.meta.description,
|
||||
extra: §ion.meta.extra,
|
||||
path: §ion.path,
|
||||
components: §ion.components,
|
||||
word_count: section.word_count,
|
||||
reading_time: section.reading_time,
|
||||
toc: §ion.toc,
|
||||
assets: section.serialize_assets(),
|
||||
pages: vec![],
|
||||
subsections: vec![],
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue