2017-03-03 08:12:40 +00:00
|
|
|
use std::collections::HashMap;
|
2017-03-07 06:01:20 +00:00
|
|
|
use std::iter::FromIterator;
|
2017-03-10 11:39:58 +00:00
|
|
|
use std::fs::{remove_dir_all, copy, remove_file};
|
2017-03-03 08:12:40 +00:00
|
|
|
use std::path::Path;
|
|
|
|
|
|
|
|
use glob::glob;
|
|
|
|
use tera::{Tera, Context};
|
2017-03-07 06:01:20 +00:00
|
|
|
use slug::slugify;
|
2017-03-09 07:34:12 +00:00
|
|
|
use walkdir::WalkDir;
|
2017-03-03 08:12:40 +00:00
|
|
|
|
|
|
|
use errors::{Result, ResultExt};
|
|
|
|
use config::{Config, get_config};
|
|
|
|
use page::Page;
|
2017-03-10 11:39:58 +00:00
|
|
|
use utils::{create_file, create_directory};
|
2017-03-03 08:12:40 +00:00
|
|
|
|
|
|
|
|
2017-03-09 07:46:38 +00:00
|
|
|
lazy_static! {
|
|
|
|
static ref GUTENBERG_TERA: Tera = {
|
|
|
|
let mut tera = Tera::default();
|
|
|
|
tera.add_raw_templates(vec![
|
|
|
|
("rss.xml", include_str!("templates/rss.xml")),
|
|
|
|
("sitemap.xml", include_str!("templates/sitemap.xml")),
|
|
|
|
]).unwrap();
|
|
|
|
tera
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-03-07 06:01:20 +00:00
|
|
|
#[derive(Debug, PartialEq)]
|
|
|
|
enum RenderList {
|
|
|
|
Tags,
|
|
|
|
Categories,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A tag or category
|
|
|
|
#[derive(Debug, Serialize, PartialEq)]
|
|
|
|
struct ListItem {
|
|
|
|
name: String,
|
|
|
|
slug: String,
|
|
|
|
count: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ListItem {
|
|
|
|
pub fn new(name: &str, count: usize) -> ListItem {
|
|
|
|
ListItem {
|
|
|
|
name: name.to_string(),
|
|
|
|
slug: slugify(name),
|
|
|
|
count: count,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-03-03 08:12:40 +00:00
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct Site {
|
|
|
|
config: Config,
|
|
|
|
pages: HashMap<String, Page>,
|
|
|
|
sections: HashMap<String, Vec<String>>,
|
|
|
|
templates: Tera,
|
2017-03-06 10:35:56 +00:00
|
|
|
live_reload: bool,
|
2017-03-03 08:12:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Site {
|
2017-03-06 10:35:56 +00:00
|
|
|
pub fn new(livereload: bool) -> Result<Site> {
|
2017-03-09 07:46:38 +00:00
|
|
|
let mut tera = Tera::new("templates/**/*")
|
|
|
|
.chain_err(|| "Error parsing templates")?;
|
|
|
|
tera.extend(&GUTENBERG_TERA)?;
|
|
|
|
|
2017-03-03 08:12:40 +00:00
|
|
|
let mut site = Site {
|
|
|
|
config: get_config(),
|
|
|
|
pages: HashMap::new(),
|
|
|
|
sections: HashMap::new(),
|
|
|
|
templates: tera,
|
2017-03-06 10:35:56 +00:00
|
|
|
live_reload: livereload,
|
2017-03-03 08:12:40 +00:00
|
|
|
};
|
|
|
|
site.parse_site()?;
|
|
|
|
|
|
|
|
Ok(site)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Reads all .md files in the `content` directory and create pages
|
|
|
|
/// out of them
|
|
|
|
fn parse_site(&mut self) -> Result<()> {
|
|
|
|
// First step: do all the articles and group article by sections
|
|
|
|
// hardcoded pattern so can't error
|
|
|
|
for entry in glob("content/**/*.md").unwrap().filter_map(|e| e.ok()) {
|
2017-03-06 14:45:57 +00:00
|
|
|
let page = Page::from_file(&entry.as_path(), &self.config)?;
|
2017-03-03 08:12:40 +00:00
|
|
|
|
|
|
|
for section in &page.sections {
|
2017-03-10 13:19:36 +00:00
|
|
|
self.sections.entry(section.clone()).or_insert_with(|| vec![]).push(page.slug.clone());
|
2017-03-03 08:12:40 +00:00
|
|
|
}
|
|
|
|
|
2017-03-06 14:45:57 +00:00
|
|
|
self.pages.insert(page.slug.clone(), page);
|
2017-03-03 08:12:40 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-03-06 10:35:56 +00:00
|
|
|
// Inject live reload script tag if in live reload mode
|
|
|
|
fn inject_livereload(&self, html: String) -> String {
|
|
|
|
if self.live_reload {
|
|
|
|
return html.replace(
|
|
|
|
"</body>",
|
|
|
|
r#"<script src="/livereload.js?port=1112&mindelay=10"></script></body>"#
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
html
|
|
|
|
}
|
|
|
|
|
2017-03-08 04:21:45 +00:00
|
|
|
|
|
|
|
/// Copy the content of the `static` folder into the `public` folder
|
2017-03-09 07:34:12 +00:00
|
|
|
///
|
|
|
|
/// TODO: only copy one file if possible because that would be a waster
|
|
|
|
/// to do re-copy the whole thing
|
|
|
|
pub fn copy_static_directory(&self) -> Result<()> {
|
|
|
|
let from = Path::new("static");
|
|
|
|
let target = Path::new("public");
|
|
|
|
|
|
|
|
for entry in WalkDir::new(from).into_iter().filter_map(|e| e.ok()) {
|
|
|
|
let relative_path = entry.path().strip_prefix(&from).unwrap();
|
|
|
|
let target_path = {
|
|
|
|
let mut target_path = target.to_path_buf();
|
|
|
|
target_path.push(relative_path);
|
|
|
|
target_path
|
|
|
|
};
|
|
|
|
|
|
|
|
if entry.path().is_dir() {
|
|
|
|
if !target_path.exists() {
|
2017-03-10 11:39:58 +00:00
|
|
|
create_directory(&target_path)?;
|
2017-03-09 07:34:12 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if target_path.exists() {
|
|
|
|
remove_file(&target_path)?;
|
|
|
|
}
|
|
|
|
copy(entry.path(), &target_path)?;
|
|
|
|
}
|
|
|
|
}
|
2017-03-08 04:21:45 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-03-10 11:39:58 +00:00
|
|
|
/// Deletes the `public` directory if it exists
|
|
|
|
pub fn clean(&self) -> Result<()> {
|
|
|
|
if Path::new("public").exists() {
|
|
|
|
// Delete current `public` directory so we can start fresh
|
|
|
|
remove_dir_all("public").chain_err(|| "Couldn't delete `public` directory")?;
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-03-10 12:36:43 +00:00
|
|
|
pub fn rebuild_after_content_change(&mut self) -> Result<()> {
|
2017-03-06 10:35:56 +00:00
|
|
|
self.parse_site()?;
|
|
|
|
self.build()
|
|
|
|
}
|
|
|
|
|
2017-03-10 11:39:58 +00:00
|
|
|
pub fn rebuild_after_template_change(&mut self) -> Result<()> {
|
|
|
|
self.templates.full_reload()?;
|
|
|
|
self.build_pages()
|
|
|
|
}
|
2017-03-03 08:12:40 +00:00
|
|
|
|
2017-03-10 11:39:58 +00:00
|
|
|
pub fn build_pages(&self) -> Result<()> {
|
2017-03-03 08:12:40 +00:00
|
|
|
let public = Path::new("public");
|
2017-03-10 11:39:58 +00:00
|
|
|
if !public.exists() {
|
|
|
|
create_directory(&public)?;
|
|
|
|
}
|
2017-03-03 08:12:40 +00:00
|
|
|
|
|
|
|
let mut pages = vec![];
|
2017-03-07 06:01:20 +00:00
|
|
|
let mut category_pages: HashMap<String, Vec<&Page>> = HashMap::new();
|
|
|
|
let mut tag_pages: HashMap<String, Vec<&Page>> = HashMap::new();
|
2017-03-03 08:12:40 +00:00
|
|
|
// First we render the pages themselves
|
|
|
|
for page in self.pages.values() {
|
|
|
|
// Copy the nesting of the content directory if we have sections for that page
|
|
|
|
let mut current_path = public.to_path_buf();
|
|
|
|
|
|
|
|
// This loop happens when the page doesn't have a set URL
|
|
|
|
for section in &page.sections {
|
|
|
|
current_path.push(section);
|
|
|
|
|
|
|
|
if !current_path.exists() {
|
2017-03-10 11:39:58 +00:00
|
|
|
create_directory(¤t_path)?;
|
2017-03-03 08:12:40 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// if we have a url already set, use that as base
|
|
|
|
if let Some(ref url) = page.meta.url {
|
|
|
|
current_path.push(url);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make sure the folder exists
|
2017-03-10 11:39:58 +00:00
|
|
|
create_directory(¤t_path)?;
|
2017-03-03 08:12:40 +00:00
|
|
|
// Finally, create a index.html file there with the page rendered
|
|
|
|
let output = page.render_html(&self.templates, &self.config)?;
|
2017-03-06 10:35:56 +00:00
|
|
|
create_file(current_path.join("index.html"), &self.inject_livereload(output))?;
|
2017-03-12 03:54:57 +00:00
|
|
|
// Copy any asset we found previously into the same directory as the index.html
|
|
|
|
for asset in &page.assets {
|
|
|
|
let asset_path = asset.as_path();
|
|
|
|
copy(&asset_path, ¤t_path.join(asset_path.file_name().unwrap()))?;
|
|
|
|
}
|
2017-03-03 08:12:40 +00:00
|
|
|
pages.push(page);
|
2017-03-07 06:01:20 +00:00
|
|
|
|
|
|
|
if let Some(ref category) = page.meta.category {
|
2017-03-10 13:19:36 +00:00
|
|
|
category_pages.entry(category.to_string()).or_insert_with(|| vec![]).push(page);
|
2017-03-07 06:01:20 +00:00
|
|
|
}
|
|
|
|
if let Some(ref tags) = page.meta.tags {
|
|
|
|
for tag in tags {
|
2017-03-10 13:19:36 +00:00
|
|
|
tag_pages.entry(tag.to_string()).or_insert_with(|| vec![]).push(page);
|
2017-03-07 06:01:20 +00:00
|
|
|
}
|
|
|
|
}
|
2017-03-03 08:12:40 +00:00
|
|
|
}
|
|
|
|
|
2017-03-07 06:01:20 +00:00
|
|
|
// Outputting categories and pages
|
|
|
|
self.render_categories_and_tags(RenderList::Categories, &category_pages)?;
|
|
|
|
self.render_categories_and_tags(RenderList::Tags, &tag_pages)?;
|
2017-03-03 08:12:40 +00:00
|
|
|
|
|
|
|
// And finally the index page
|
|
|
|
let mut context = Context::new();
|
2017-03-06 11:58:31 +00:00
|
|
|
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
2017-03-03 08:12:40 +00:00
|
|
|
context.add("pages", &pages);
|
|
|
|
context.add("config", &self.config);
|
2017-03-06 10:35:56 +00:00
|
|
|
let index = self.templates.render("index.html", &context)?;
|
|
|
|
create_file(public.join("index.html"), &self.inject_livereload(index))?;
|
2017-03-03 08:12:40 +00:00
|
|
|
|
2017-03-07 06:01:20 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
2017-03-10 11:39:58 +00:00
|
|
|
|
|
|
|
/// Builds the site to the `public` directory after deleting it
|
|
|
|
pub fn build(&self) -> Result<()> {
|
|
|
|
self.clean()?;
|
|
|
|
self.build_pages()?;
|
|
|
|
self.render_sitemap()?;
|
2017-03-12 03:59:28 +00:00
|
|
|
if self.config.generate_rss.unwrap() {
|
|
|
|
self.render_rss_feed()?;
|
|
|
|
}
|
2017-03-10 11:39:58 +00:00
|
|
|
self.copy_static_directory()
|
|
|
|
}
|
|
|
|
|
2017-03-07 06:01:20 +00:00
|
|
|
/// Render the /{categories, list} pages and each individual category/tag page
|
|
|
|
fn render_categories_and_tags(&self, kind: RenderList, container: &HashMap<String, Vec<&Page>>) -> Result<()> {
|
|
|
|
if container.is_empty() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
|
|
|
|
let (name, list_tpl_name, single_tpl_name, var_name) = if kind == RenderList::Categories {
|
|
|
|
("categories", "categories.html", "category.html", "category")
|
|
|
|
} else {
|
|
|
|
("tags", "tags.html", "tag.html", "tag")
|
|
|
|
};
|
|
|
|
|
|
|
|
let public = Path::new("public");
|
|
|
|
let mut output_path = public.to_path_buf();
|
|
|
|
output_path.push(name);
|
2017-03-10 11:39:58 +00:00
|
|
|
create_directory(&output_path)?;
|
2017-03-07 06:01:20 +00:00
|
|
|
|
|
|
|
// First we render the list of categories/tags page
|
|
|
|
let mut sorted_container = vec![];
|
|
|
|
for (item, count) in Vec::from_iter(container).into_iter().map(|(a, b)| (a, b.len())) {
|
|
|
|
sorted_container.push(ListItem::new(item, count));
|
|
|
|
}
|
|
|
|
sorted_container.sort_by(|a, b| b.count.cmp(&a.count));
|
|
|
|
|
|
|
|
let mut context = Context::new();
|
|
|
|
context.add(name, &sorted_container);
|
|
|
|
context.add("config", &self.config);
|
|
|
|
|
|
|
|
let list_output = self.templates.render(list_tpl_name, &context)?;
|
|
|
|
create_file(output_path.join("index.html"), &self.inject_livereload(list_output))?;
|
|
|
|
|
|
|
|
// and then each individual item
|
|
|
|
for (item_name, mut pages) in container.clone() {
|
|
|
|
let mut context = Context::new();
|
|
|
|
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
|
|
|
let slug = slugify(&item_name);
|
|
|
|
context.add(var_name, &item_name);
|
|
|
|
context.add(&format!("{}_slug", var_name), &slug);
|
|
|
|
context.add("pages", &pages);
|
|
|
|
context.add("config", &self.config);
|
|
|
|
let single_output = self.templates.render(single_tpl_name, &context)?;
|
|
|
|
|
2017-03-10 11:39:58 +00:00
|
|
|
create_directory(&output_path.join(&slug))?;
|
2017-03-07 06:01:20 +00:00
|
|
|
create_file(
|
|
|
|
output_path.join(&slug).join("index.html"),
|
|
|
|
&self.inject_livereload(single_output)
|
|
|
|
)?;
|
|
|
|
}
|
2017-03-06 14:45:57 +00:00
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
|
2017-03-07 07:43:27 +00:00
|
|
|
fn render_sitemap(&self) -> Result<()> {
|
2017-03-06 14:45:57 +00:00
|
|
|
let mut context = Context::new();
|
|
|
|
context.add("pages", &self.pages.values().collect::<Vec<&Page>>());
|
2017-03-09 07:46:38 +00:00
|
|
|
let sitemap = self.templates.render("sitemap.xml", &context)?;
|
2017-03-06 14:45:57 +00:00
|
|
|
|
|
|
|
let public = Path::new("public");
|
|
|
|
create_file(public.join("sitemap.xml"), &sitemap)?;
|
|
|
|
|
2017-03-03 08:12:40 +00:00
|
|
|
Ok(())
|
|
|
|
}
|
2017-03-07 07:43:27 +00:00
|
|
|
|
|
|
|
fn render_rss_feed(&self) -> Result<()> {
|
|
|
|
let mut context = Context::new();
|
|
|
|
let mut pages = self.pages.values()
|
|
|
|
.filter(|p| p.meta.date.is_some())
|
|
|
|
.take(15) // limit to the last 15 elements
|
|
|
|
.collect::<Vec<&Page>>();
|
|
|
|
|
|
|
|
// Don't generate a RSS feed if none of the pages has a date
|
|
|
|
if pages.is_empty() {
|
|
|
|
return Ok(());
|
|
|
|
}
|
|
|
|
pages.sort_by(|a, b| a.partial_cmp(b).unwrap());
|
|
|
|
context.add("pages", &pages);
|
|
|
|
context.add("last_build_date", &pages[0].meta.date);
|
|
|
|
context.add("config", &self.config);
|
2017-03-10 11:39:58 +00:00
|
|
|
|
2017-03-10 13:19:36 +00:00
|
|
|
let rss_feed_url = if self.config.base_url.ends_with('/') {
|
2017-03-10 11:39:58 +00:00
|
|
|
format!("{}{}", self.config.base_url, "feed.xml")
|
|
|
|
} else {
|
|
|
|
format!("{}/{}", self.config.base_url, "feed.xml")
|
|
|
|
};
|
|
|
|
context.add("feed_url", &rss_feed_url);
|
2017-03-07 07:43:27 +00:00
|
|
|
|
2017-03-09 07:46:38 +00:00
|
|
|
let sitemap = self.templates.render("rss.xml", &context)?;
|
2017-03-07 07:43:27 +00:00
|
|
|
|
|
|
|
let public = Path::new("public");
|
|
|
|
create_file(public.join("rss.xml"), &sitemap)?;
|
|
|
|
|
|
|
|
Ok(())
|
|
|
|
}
|
2017-03-03 08:12:40 +00:00
|
|
|
}
|