commit
9a7244b35f
|
@ -4,11 +4,14 @@
|
||||||
|
|
||||||
- Fix XML templates overriding and reloading
|
- Fix XML templates overriding and reloading
|
||||||
- `title` and `description` are now optional in the front matter
|
- `title` and `description` are now optional in the front matter
|
||||||
- Add GenericConfig, Vim syntax
|
- Add GenericConfig, Vim, Jinja2 syntax
|
||||||
- Add `_index.md` for homepage as well and make that into a normal section
|
- Add `_index.md` for homepage as well and make that into a normal section
|
||||||
- Allow sorting by `none`, `date` and `order` for sections
|
- Allow sorting by `none`, `date` and `order` for sections
|
||||||
- Add pagination
|
- Add pagination
|
||||||
- Add a `get_page` global function to tera
|
- Add a `get_page` global function to tera
|
||||||
|
- Revamp index page, no more `pages` variables
|
||||||
|
- Fix livereload stopping randomly
|
||||||
|
- Smarter re-rendering in `serve` command
|
||||||
|
|
||||||
## 0.0.4 (2017-04-23)
|
## 0.0.4 (2017-04-23)
|
||||||
|
|
||||||
|
|
10
Cargo.lock
generated
10
Cargo.lock
generated
|
@ -68,7 +68,7 @@ name = "backtrace-sys"
|
||||||
version = "0.1.10"
|
version = "0.1.10"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -173,7 +173,7 @@ name = "cmake"
|
||||||
version = "0.1.22"
|
version = "0.1.22"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -257,7 +257,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gcc"
|
name = "gcc"
|
||||||
version = "0.3.45"
|
version = "0.3.46"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -408,7 +408,7 @@ name = "miniz-sys"
|
||||||
version = "0.1.9"
|
version = "0.1.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1096,7 +1096,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
|
"checksum fnv 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6cc484842f1e2884faf56f529f960cc12ad8c71ce96cc7abba0a067c98fee344"
|
||||||
"checksum fsevent 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)" = "dfe593ebcfc76884138b25426999890b10da8e6a46d01b499d7c54c604672c38"
|
"checksum fsevent 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)" = "dfe593ebcfc76884138b25426999890b10da8e6a46d01b499d7c54c604672c38"
|
||||||
"checksum fsevent-sys 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1a772d36c338d07a032d5375a36f15f9a7043bf0cb8ce7cee658e037c6032874"
|
"checksum fsevent-sys 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "1a772d36c338d07a032d5375a36f15f9a7043bf0cb8ce7cee658e037c6032874"
|
||||||
"checksum gcc 0.3.45 (registry+https://github.com/rust-lang/crates.io-index)" = "40899336fb50db0c78710f53e87afc54d8c7266fb76262fecc78ca1a7f09deae"
|
"checksum gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "181e3cebba1d663bd92eb90e2da787e10597e027eb00de8d742b260a7850948f"
|
||||||
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
|
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
|
||||||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||||
"checksum httparse 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77f756bed9ee3a83ce98774f4155b42a31b787029013f3a7d83eca714e500e21"
|
"checksum httparse 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77f756bed9ee3a83ce98774f4155b42a31b787029013f3a7d83eca714e500e21"
|
||||||
|
|
|
@ -3,11 +3,12 @@ use std::env;
|
||||||
use gutenberg::errors::Result;
|
use gutenberg::errors::Result;
|
||||||
use gutenberg::Site;
|
use gutenberg::Site;
|
||||||
|
|
||||||
|
use console;
|
||||||
|
|
||||||
pub fn build(config_file: &str) -> Result<()> {
|
pub fn build(config_file: &str) -> Result<()> {
|
||||||
let mut site = Site::new(env::current_dir().unwrap(), config_file)?;
|
let mut site = Site::new(env::current_dir().unwrap(), config_file)?;
|
||||||
site.load()?;
|
site.load()?;
|
||||||
super::notify_site_size(&site);
|
console::notify_site_size(&site);
|
||||||
super::warn_about_ignored_pages(&site);
|
console::warn_about_ignored_pages(&site);
|
||||||
site.build()
|
site.build()
|
||||||
}
|
}
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
_ref = require('./protocol'), Parser = _ref.Parser, PROTOCOL_6 = _ref.PROTOCOL_6, PROTOCOL_7 = _ref.PROTOCOL_7;
|
_ref = require('./protocol'), Parser = _ref.Parser, PROTOCOL_6 = _ref.PROTOCOL_6, PROTOCOL_7 = _ref.PROTOCOL_7;
|
||||||
|
|
||||||
Version = '2.2.2';
|
Version = '2.2.1';
|
||||||
|
|
||||||
exports.Connector = Connector = (function() {
|
exports.Connector = Connector = (function() {
|
||||||
function Connector(options, WebSocket, Timer, handlers) {
|
function Connector(options, WebSocket, Timer, handlers) {
|
||||||
|
@ -12,7 +12,7 @@
|
||||||
this.WebSocket = WebSocket;
|
this.WebSocket = WebSocket;
|
||||||
this.Timer = Timer;
|
this.Timer = Timer;
|
||||||
this.handlers = handlers;
|
this.handlers = handlers;
|
||||||
this._uri = "ws" + (this.options.https ? "s" : "") + "://" + this.options.host + ":" + this.options.port + "/livereload";
|
this._uri = "ws://" + this.options.host + ":" + this.options.port + "/livereload";
|
||||||
this._nextDelay = this.options.mindelay;
|
this._nextDelay = this.options.mindelay;
|
||||||
this._connectionDesired = false;
|
this._connectionDesired = false;
|
||||||
this.protocol = 0;
|
this.protocol = 0;
|
||||||
|
@ -278,8 +278,7 @@
|
||||||
|
|
||||||
},{}],4:[function(require,module,exports){
|
},{}],4:[function(require,module,exports){
|
||||||
(function() {
|
(function() {
|
||||||
var Connector, LiveReload, Options, Reloader, Timer,
|
var Connector, LiveReload, Options, Reloader, Timer;
|
||||||
__hasProp = {}.hasOwnProperty;
|
|
||||||
|
|
||||||
Connector = require('./connector').Connector;
|
Connector = require('./connector').Connector;
|
||||||
|
|
||||||
|
@ -291,15 +290,11 @@
|
||||||
|
|
||||||
exports.LiveReload = LiveReload = (function() {
|
exports.LiveReload = LiveReload = (function() {
|
||||||
function LiveReload(window) {
|
function LiveReload(window) {
|
||||||
var k, v, _ref;
|
|
||||||
this.window = window;
|
this.window = window;
|
||||||
this.listeners = {};
|
this.listeners = {};
|
||||||
this.plugins = [];
|
this.plugins = [];
|
||||||
this.pluginIdentifiers = {};
|
this.pluginIdentifiers = {};
|
||||||
this.console = this.window.console && this.window.console.log && this.window.console.error ? this.window.location.href.match(/LR-verbose/) ? this.window.console : {
|
this.console = this.window.location.href.match(/LR-verbose/) && this.window.console && this.window.console.log && this.window.console.error ? this.window.console : {
|
||||||
log: function() {},
|
|
||||||
error: this.window.console.error.bind(this.window.console)
|
|
||||||
} : {
|
|
||||||
log: function() {},
|
log: function() {},
|
||||||
error: function() {}
|
error: function() {}
|
||||||
};
|
};
|
||||||
|
@ -307,20 +302,9 @@
|
||||||
this.console.error("LiveReload disabled because the browser does not seem to support web sockets");
|
this.console.error("LiveReload disabled because the browser does not seem to support web sockets");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if ('LiveReloadOptions' in window) {
|
if (!(this.options = Options.extract(this.window.document))) {
|
||||||
this.options = new Options();
|
this.console.error("LiveReload disabled because it could not find its own <SCRIPT> tag");
|
||||||
_ref = window['LiveReloadOptions'];
|
return;
|
||||||
for (k in _ref) {
|
|
||||||
if (!__hasProp.call(_ref, k)) continue;
|
|
||||||
v = _ref[k];
|
|
||||||
this.options.set(k, v);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
this.options = Options.extract(this.window.document);
|
|
||||||
if (!this.options) {
|
|
||||||
this.console.error("LiveReload disabled because it could not find its own <SCRIPT> tag");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
this.reloader = new Reloader(this.window, this.console, Timer);
|
this.reloader = new Reloader(this.window, this.console, Timer);
|
||||||
this.connector = new Connector(this.options, this.WebSocket, Timer, {
|
this.connector = new Connector(this.options, this.WebSocket, Timer, {
|
||||||
|
@ -342,7 +326,16 @@
|
||||||
})(this),
|
})(this),
|
||||||
error: (function(_this) {
|
error: (function(_this) {
|
||||||
return function(e) {
|
return function(e) {
|
||||||
return console.log("" + e.message + ".");
|
console.log(e);
|
||||||
|
// if (e instanceof ProtocolError) {
|
||||||
|
// if (typeof console !== "undefined" && console !== null) {
|
||||||
|
// return console.log("" + e.message + ".");
|
||||||
|
// }
|
||||||
|
// } else {
|
||||||
|
// if (typeof console !== "undefined" && console !== null) {
|
||||||
|
// return console.log("LiveReload internal error: " + e.message);
|
||||||
|
// }
|
||||||
|
// }
|
||||||
};
|
};
|
||||||
})(this),
|
})(this),
|
||||||
disconnected: (function(_this) {
|
disconnected: (function(_this) {
|
||||||
|
@ -380,7 +373,6 @@
|
||||||
};
|
};
|
||||||
})(this)
|
})(this)
|
||||||
});
|
});
|
||||||
this.initialized = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LiveReload.prototype.on = function(eventName, handler) {
|
LiveReload.prototype.on = function(eventName, handler) {
|
||||||
|
@ -409,9 +401,6 @@
|
||||||
|
|
||||||
LiveReload.prototype.shutDown = function() {
|
LiveReload.prototype.shutDown = function() {
|
||||||
var _base;
|
var _base;
|
||||||
if (!this.initialized) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this.connector.disconnect();
|
this.connector.disconnect();
|
||||||
this.log("LiveReload disconnected.");
|
this.log("LiveReload disconnected.");
|
||||||
return typeof (_base = this.listeners).shutdown === "function" ? _base.shutdown() : void 0;
|
return typeof (_base = this.listeners).shutdown === "function" ? _base.shutdown() : void 0;
|
||||||
|
@ -423,9 +412,6 @@
|
||||||
|
|
||||||
LiveReload.prototype.addPlugin = function(pluginClass) {
|
LiveReload.prototype.addPlugin = function(pluginClass) {
|
||||||
var plugin;
|
var plugin;
|
||||||
if (!this.initialized) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (this.hasPlugin(pluginClass.identifier)) {
|
if (this.hasPlugin(pluginClass.identifier)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -448,9 +434,6 @@
|
||||||
|
|
||||||
LiveReload.prototype.analyze = function() {
|
LiveReload.prototype.analyze = function() {
|
||||||
var plugin, pluginData, pluginsData, _i, _len, _ref;
|
var plugin, pluginData, pluginsData, _i, _len, _ref;
|
||||||
if (!this.initialized) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!(this.connector.protocol >= 7)) {
|
if (!(this.connector.protocol >= 7)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -480,7 +463,6 @@
|
||||||
|
|
||||||
exports.Options = Options = (function() {
|
exports.Options = Options = (function() {
|
||||||
function Options() {
|
function Options() {
|
||||||
this.https = false;
|
|
||||||
this.host = null;
|
this.host = null;
|
||||||
this.port = 35729;
|
this.port = 35729;
|
||||||
this.snipver = null;
|
this.snipver = null;
|
||||||
|
@ -512,7 +494,6 @@
|
||||||
element = _ref[_i];
|
element = _ref[_i];
|
||||||
if ((src = element.src) && (m = src.match(/^[^:]+:\/\/(.*)\/z?livereload\.js(?:\?(.*))?$/))) {
|
if ((src = element.src) && (m = src.match(/^[^:]+:\/\/(.*)\/z?livereload\.js(?:\?(.*))?$/))) {
|
||||||
options = new Options();
|
options = new Options();
|
||||||
options.https = src.indexOf("https") === 0;
|
|
||||||
if (mm = m[1].match(/^([^\/:]+)(?::(\d+))?$/)) {
|
if (mm = m[1].match(/^([^\/:]+)(?::(\d+))?$/)) {
|
||||||
options.host = mm[1];
|
options.host = mm[1];
|
||||||
if (mm[2]) {
|
if (mm[2]) {
|
||||||
|
@ -561,7 +542,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
Parser.prototype.reset = function() {
|
Parser.prototype.reset = function() {
|
||||||
return this.protocol = 7;
|
return this.protocol = null;
|
||||||
};
|
};
|
||||||
|
|
||||||
Parser.prototype.process = function(data) {
|
Parser.prototype.process = function(data) {
|
7
src/bin/cmd/mod.rs
Normal file
7
src/bin/cmd/mod.rs
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
mod init;
|
||||||
|
mod build;
|
||||||
|
mod serve;
|
||||||
|
|
||||||
|
pub use self::init::create_new_project;
|
||||||
|
pub use self::build::build;
|
||||||
|
pub use self::serve::serve;
|
|
@ -9,14 +9,12 @@ use iron::{Iron, Request, IronResult, Response, status};
|
||||||
use mount::Mount;
|
use mount::Mount;
|
||||||
use staticfile::Static;
|
use staticfile::Static;
|
||||||
use notify::{Watcher, RecursiveMode, watcher};
|
use notify::{Watcher, RecursiveMode, watcher};
|
||||||
use ws::{WebSocket, Sender};
|
use ws::{WebSocket, Sender, Message};
|
||||||
use gutenberg::Site;
|
use gutenberg::Site;
|
||||||
use gutenberg::errors::{Result, ResultExt};
|
use gutenberg::errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
|
||||||
use ::{report_elapsed_time, unravel_errors};
|
|
||||||
use console;
|
use console;
|
||||||
|
use rebuild;
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
enum ChangeKind {
|
enum ChangeKind {
|
||||||
|
@ -47,7 +45,7 @@ fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &st
|
||||||
}}"#, reload_path)
|
}}"#, reload_path)
|
||||||
).unwrap();
|
).unwrap();
|
||||||
},
|
},
|
||||||
Err(e) => unravel_errors("Failed to build the site", &e, false)
|
Err(e) => console::unravel_errors("Failed to build the site", &e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -67,10 +65,10 @@ pub fn serve(interface: &str, port: &str, config_file: &str) -> Result<()> {
|
||||||
|
|
||||||
site.load()?;
|
site.load()?;
|
||||||
site.enable_live_reload();
|
site.enable_live_reload();
|
||||||
super::notify_site_size(&site);
|
console::notify_site_size(&site);
|
||||||
super::warn_about_ignored_pages(&site);
|
console::warn_about_ignored_pages(&site);
|
||||||
site.build()?;
|
site.build()?;
|
||||||
report_elapsed_time(start);
|
console::report_elapsed_time(start);
|
||||||
|
|
||||||
// Setup watchers
|
// Setup watchers
|
||||||
let (tx, rx) = channel();
|
let (tx, rx) = channel();
|
||||||
|
@ -93,8 +91,17 @@ pub fn serve(interface: &str, port: &str, config_file: &str) -> Result<()> {
|
||||||
let _iron = Iron::new(mount).http(address.as_str()).unwrap();
|
let _iron = Iron::new(mount).http(address.as_str()).unwrap();
|
||||||
|
|
||||||
// The websocket for livereload
|
// The websocket for livereload
|
||||||
let ws_server = WebSocket::new(|_| {
|
let ws_server = WebSocket::new(|output: Sender| {
|
||||||
|_| {
|
move |msg: Message| {
|
||||||
|
if msg.into_text().unwrap().contains("\"hello\"") {
|
||||||
|
return output.send(Message::text(r#"
|
||||||
|
{
|
||||||
|
"command": "hello",
|
||||||
|
"protocols": [ "http://livereload.com/protocols/official-7" ],
|
||||||
|
"serverName": "Gutenberg"
|
||||||
|
}
|
||||||
|
"#));
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}).unwrap();
|
}).unwrap();
|
||||||
|
@ -131,12 +138,12 @@ pub fn serve(interface: &str, port: &str, config_file: &str) -> Result<()> {
|
||||||
(ChangeKind::Content, _) => {
|
(ChangeKind::Content, _) => {
|
||||||
console::info(&format!("-> Content changed {}", path.display()));
|
console::info(&format!("-> Content changed {}", path.display()));
|
||||||
// Force refresh
|
// Force refresh
|
||||||
rebuild_done_handling(&broadcaster, site.rebuild_after_content_change(&path), "/x.js");
|
rebuild_done_handling(&broadcaster, rebuild::after_content_change(&mut site, &path), "/x.js");
|
||||||
},
|
},
|
||||||
(ChangeKind::Templates, _) => {
|
(ChangeKind::Templates, _) => {
|
||||||
console::info(&format!("-> Template changed {}", path.display()));
|
console::info(&format!("-> Template changed {}", path.display()));
|
||||||
// Force refresh
|
// Force refresh
|
||||||
rebuild_done_handling(&broadcaster, site.rebuild_after_template_change(&path), "/x.js");
|
rebuild_done_handling(&broadcaster, rebuild::after_template_change(&mut site, &path), "/x.js");
|
||||||
},
|
},
|
||||||
(ChangeKind::StaticFiles, p) => {
|
(ChangeKind::StaticFiles, p) => {
|
||||||
if path.is_file() {
|
if path.is_file() {
|
||||||
|
@ -145,7 +152,7 @@ pub fn serve(interface: &str, port: &str, config_file: &str) -> Result<()> {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
report_elapsed_time(start);
|
console::report_elapsed_time(start);
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
70
src/bin/console.rs
Normal file
70
src/bin/console.rs
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
use chrono::Duration;
|
||||||
|
use term_painter::ToStyle;
|
||||||
|
use term_painter::Color::*;
|
||||||
|
|
||||||
|
use gutenberg::errors::Error;
|
||||||
|
use gutenberg::Site;
|
||||||
|
|
||||||
|
|
||||||
|
pub fn info(message: &str) {
|
||||||
|
println!("{}", NotSet.bold().paint(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn warn(message: &str) {
|
||||||
|
println!("{}", Yellow.bold().paint(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn success(message: &str) {
|
||||||
|
println!("{}", Green.bold().paint(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error(message: &str) {
|
||||||
|
println!("{}", Red.bold().paint(message));
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Display in the console the number of pages/sections in the site
|
||||||
|
pub fn notify_site_size(site: &Site) {
|
||||||
|
println!(
|
||||||
|
"-> Creating {} pages ({} orphan) and {} sections",
|
||||||
|
site.pages.len(),
|
||||||
|
site.get_all_orphan_pages().len(),
|
||||||
|
site.sections.len() - 1, // -1 since we do not the index as a section
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Display a warning in the console if there are ignored pages in the site
|
||||||
|
pub fn warn_about_ignored_pages(site: &Site) {
|
||||||
|
let ignored_pages = site.get_ignored_pages();
|
||||||
|
if !ignored_pages.is_empty() {
|
||||||
|
warn(&format!(
|
||||||
|
"{} page(s) ignored (missing date or order in a sorted section):",
|
||||||
|
ignored_pages.len()
|
||||||
|
));
|
||||||
|
for path in site.get_ignored_pages() {
|
||||||
|
warn(&format!("- {}", path.display()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Print the time elapsed rounded to 1 decimal
|
||||||
|
pub fn report_elapsed_time(instant: Instant) {
|
||||||
|
let duration_ms = Duration::from_std(instant.elapsed()).unwrap().num_milliseconds() as f64;
|
||||||
|
|
||||||
|
if duration_ms < 1000.0 {
|
||||||
|
success(&format!("Done in {}ms.\n", duration_ms));
|
||||||
|
} else {
|
||||||
|
let duration_sec = duration_ms / 1000.0;
|
||||||
|
success(&format!("Done in {:.1}s.\n", ((duration_sec * 10.0).round() / 10.0)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Display an error message and the actual error(s)
|
||||||
|
pub fn unravel_errors(message: &str, error: &Error) {
|
||||||
|
self::error(message);
|
||||||
|
self::error(&format!("Error: {}", error));
|
||||||
|
for e in error.iter().skip(1) {
|
||||||
|
self::error(&format!("Reason: {}", e));
|
||||||
|
}
|
||||||
|
}
|
|
@ -12,39 +12,11 @@ extern crate mount;
|
||||||
extern crate notify;
|
extern crate notify;
|
||||||
extern crate ws;
|
extern crate ws;
|
||||||
|
|
||||||
|
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use chrono::Duration;
|
|
||||||
use gutenberg::errors::Error;
|
|
||||||
|
|
||||||
mod cmd;
|
mod cmd;
|
||||||
mod console;
|
mod console;
|
||||||
|
mod rebuild;
|
||||||
|
|
||||||
/// Print the time elapsed rounded to 1 decimal
|
|
||||||
fn report_elapsed_time(instant: Instant) {
|
|
||||||
let duration_ms = Duration::from_std(instant.elapsed()).unwrap().num_milliseconds() as f64;
|
|
||||||
|
|
||||||
if duration_ms < 1000.0 {
|
|
||||||
console::success(&format!("Done in {}ms.\n", duration_ms));
|
|
||||||
} else {
|
|
||||||
let duration_sec = duration_ms / 1000.0;
|
|
||||||
console::success(&format!("Done in {:.1}s.\n", ((duration_sec * 10.0).round() / 10.0)));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
////Display an error message, the actual error and then exits if requested
|
|
||||||
fn unravel_errors(message: &str, error: &Error, exit: bool) {
|
|
||||||
console::error(message);
|
|
||||||
console::error(&format!("Error: {}", error));
|
|
||||||
for e in error.iter().skip(1) {
|
|
||||||
console::error(&format!("Reason: {}", e));
|
|
||||||
}
|
|
||||||
if exit {
|
|
||||||
::std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -74,15 +46,21 @@ fn main() {
|
||||||
("init", Some(matches)) => {
|
("init", Some(matches)) => {
|
||||||
match cmd::create_new_project(matches.value_of("name").unwrap()) {
|
match cmd::create_new_project(matches.value_of("name").unwrap()) {
|
||||||
Ok(()) => console::success("Project created"),
|
Ok(()) => console::success("Project created"),
|
||||||
Err(e) => unravel_errors("Failed to create the project", &e, true),
|
Err(e) => {
|
||||||
|
console::unravel_errors("Failed to create the project", &e);
|
||||||
|
::std::process::exit(1);
|
||||||
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
("build", Some(_)) => {
|
("build", Some(_)) => {
|
||||||
console::info("Building site...");
|
console::info("Building site...");
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
match cmd::build(config_file) {
|
match cmd::build(config_file) {
|
||||||
Ok(()) => report_elapsed_time(start),
|
Ok(()) => console::report_elapsed_time(start),
|
||||||
Err(e) => unravel_errors("Failed to build the site", &e, true),
|
Err(e) => {
|
||||||
|
console::unravel_errors("Failed to build the site", &e);
|
||||||
|
::std::process::exit(1);
|
||||||
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
("serve", Some(matches)) => {
|
("serve", Some(matches)) => {
|
||||||
|
@ -91,7 +69,10 @@ fn main() {
|
||||||
console::info("Building site...");
|
console::info("Building site...");
|
||||||
match cmd::serve(interface, port, config_file) {
|
match cmd::serve(interface, port, config_file) {
|
||||||
Ok(()) => (),
|
Ok(()) => (),
|
||||||
Err(e) => unravel_errors("Failed to build the site", &e, true),
|
Err(e) => {
|
||||||
|
console::unravel_errors("Failed to build the site", &e);
|
||||||
|
::std::process::exit(1);
|
||||||
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
227
src/bin/rebuild.rs
Normal file
227
src/bin/rebuild.rs
Normal file
|
@ -0,0 +1,227 @@
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use gutenberg::{Site, SectionFrontMatter, PageFrontMatter};
|
||||||
|
use gutenberg::errors::Result;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
enum PageChangesNeeded {
|
||||||
|
/// Editing `tags`
|
||||||
|
Tags,
|
||||||
|
/// Editing `categories`
|
||||||
|
Categories,
|
||||||
|
/// Editing `date` or `order`
|
||||||
|
Sort,
|
||||||
|
/// Editing anything else
|
||||||
|
Render,
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: seems like editing sort_by/render do weird stuff
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||||
|
enum SectionChangesNeeded {
|
||||||
|
/// Editing `sort_by`
|
||||||
|
Sort,
|
||||||
|
/// Editing `title`, `description`, `extra`, `template` or setting `render` to true
|
||||||
|
Render,
|
||||||
|
/// Editing `paginate_by` or `paginate_path`
|
||||||
|
RenderWithPages,
|
||||||
|
/// Setting `render` to false
|
||||||
|
Delete,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Evaluates all the params in the front matter that changed so we can do the smallest
|
||||||
|
/// delta in the serve command
|
||||||
|
fn find_section_front_matter_changes(current: &SectionFrontMatter, other: &SectionFrontMatter) -> Vec<SectionChangesNeeded> {
|
||||||
|
let mut changes_needed = vec![];
|
||||||
|
|
||||||
|
if current.sort_by != other.sort_by {
|
||||||
|
changes_needed.push(SectionChangesNeeded::Sort);
|
||||||
|
}
|
||||||
|
|
||||||
|
if !current.should_render() && other.should_render() {
|
||||||
|
changes_needed.push(SectionChangesNeeded::Delete);
|
||||||
|
// Nothing else we can do
|
||||||
|
return changes_needed;
|
||||||
|
}
|
||||||
|
|
||||||
|
if current.paginate_by != other.paginate_by || current.paginate_path != other.paginate_path {
|
||||||
|
changes_needed.push(SectionChangesNeeded::RenderWithPages);
|
||||||
|
// Nothing else we can do
|
||||||
|
return changes_needed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any other change will trigger a re-rendering of the section page only
|
||||||
|
changes_needed.push(SectionChangesNeeded::Render);
|
||||||
|
changes_needed
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Evaluates all the params in the front matter that changed so we can do the smallest
|
||||||
|
/// delta in the serve command
|
||||||
|
fn find_page_front_matter_changes(current: &PageFrontMatter, other: &PageFrontMatter) -> Vec<PageChangesNeeded> {
|
||||||
|
let mut changes_needed = vec![];
|
||||||
|
|
||||||
|
if current.tags != other.tags {
|
||||||
|
changes_needed.push(PageChangesNeeded::Tags);
|
||||||
|
}
|
||||||
|
|
||||||
|
if current.category != other.category {
|
||||||
|
changes_needed.push(PageChangesNeeded::Categories);
|
||||||
|
}
|
||||||
|
|
||||||
|
if current.date != other.date || current.order != other.order {
|
||||||
|
changes_needed.push(PageChangesNeeded::Sort);
|
||||||
|
}
|
||||||
|
|
||||||
|
changes_needed.push(PageChangesNeeded::Render);
|
||||||
|
changes_needed
|
||||||
|
}
|
||||||
|
|
||||||
|
// What happens when a section or a page is changed
|
||||||
|
pub fn after_content_change(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
|
let is_section = path.file_name().unwrap() == "_index.md";
|
||||||
|
|
||||||
|
// A page or section got deleted
|
||||||
|
if !path.exists() {
|
||||||
|
if is_section {
|
||||||
|
// A section was deleted, many things can be impacted:
|
||||||
|
// - the pages of the section are becoming orphans
|
||||||
|
// - any page that was referencing the section (index, etc)
|
||||||
|
let relative_path = site.sections[path].relative_path.clone();
|
||||||
|
// Remove the link to it and the section itself from the Site
|
||||||
|
site.permalinks.remove(&relative_path);
|
||||||
|
site.sections.remove(path);
|
||||||
|
site.populate_sections();
|
||||||
|
} else {
|
||||||
|
// A page was deleted, many things can be impacted:
|
||||||
|
// - the section the page is in
|
||||||
|
// - any page that was referencing the section (index, etc)
|
||||||
|
let relative_path = site.pages[path].relative_path.clone();
|
||||||
|
site.permalinks.remove(&relative_path);
|
||||||
|
if let Some(p) = site.pages.remove(path) {
|
||||||
|
if p.meta.has_tags() || p.meta.category.is_some() {
|
||||||
|
site.populate_tags_and_categories();
|
||||||
|
}
|
||||||
|
|
||||||
|
if site.find_parent_section(&p).is_some() {
|
||||||
|
site.populate_sections();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
// Deletion is something that doesn't happen all the time so we
|
||||||
|
// don't need to optimise it too much
|
||||||
|
return site.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
// A section was edited
|
||||||
|
if is_section {
|
||||||
|
match site.add_section(path, true)? {
|
||||||
|
Some(prev) => {
|
||||||
|
// Updating a section
|
||||||
|
let current_meta = site.sections[path].meta.clone();
|
||||||
|
// Front matter didn't change, only content did
|
||||||
|
// so we render only the section page, not its pages
|
||||||
|
if current_meta == prev.meta {
|
||||||
|
return site.render_section(&site.sections[path], false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Front matter changed
|
||||||
|
for changes in find_section_front_matter_changes(¤t_meta, &prev.meta) {
|
||||||
|
// Sort always comes first if present so the rendering will be fine
|
||||||
|
match changes {
|
||||||
|
SectionChangesNeeded::Sort => site.sort_sections_pages(Some(path)),
|
||||||
|
SectionChangesNeeded::Render => site.render_section(&site.sections[path], false)?,
|
||||||
|
SectionChangesNeeded::RenderWithPages => site.render_section(&site.sections[path], true)?,
|
||||||
|
// can't be arsed to make the Delete efficient, it's not a common enough operation
|
||||||
|
SectionChangesNeeded::Delete => {
|
||||||
|
site.populate_sections();
|
||||||
|
site.build()?;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
// New section, only render that one
|
||||||
|
site.populate_sections();
|
||||||
|
return site.render_section(&site.sections[path], true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// A page was edited
|
||||||
|
match site.add_page(path, true)? {
|
||||||
|
Some(prev) => {
|
||||||
|
// Updating a page
|
||||||
|
let current = site.pages[path].clone();
|
||||||
|
// Front matter didn't change, only content did
|
||||||
|
// so we render only the section page, not its pages
|
||||||
|
if current.meta == prev.meta {
|
||||||
|
return site.render_page(&site.pages[path]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Front matter changed
|
||||||
|
for changes in find_page_front_matter_changes(¤t.meta, &prev.meta) {
|
||||||
|
// Sort always comes first if present so the rendering will be fine
|
||||||
|
match changes {
|
||||||
|
PageChangesNeeded::Tags => {
|
||||||
|
site.populate_tags_and_categories();
|
||||||
|
site.render_tags()?;
|
||||||
|
},
|
||||||
|
PageChangesNeeded::Categories => {
|
||||||
|
site.populate_tags_and_categories();
|
||||||
|
site.render_categories()?;
|
||||||
|
},
|
||||||
|
PageChangesNeeded::Sort => {
|
||||||
|
let section_path = match site.find_parent_section(&site.pages[path]) {
|
||||||
|
Some(s) => s.file_path.clone(),
|
||||||
|
None => continue // Do nothing if it's an orphan page
|
||||||
|
};
|
||||||
|
site.populate_sections();
|
||||||
|
site.sort_sections_pages(Some(§ion_path));
|
||||||
|
site.render_index()?;
|
||||||
|
},
|
||||||
|
PageChangesNeeded::Render => {
|
||||||
|
site.render_page(&site.pages[path])?;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return Ok(());
|
||||||
|
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
// It's a new page!
|
||||||
|
site.populate_sections();
|
||||||
|
site.populate_tags_and_categories();
|
||||||
|
// No need to optimise that yet, we can revisit if it becomes an issue
|
||||||
|
site.build()?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// What happens when a template is changed
|
||||||
|
pub fn after_template_change(site: &mut Site, path: &Path) -> Result<()> {
|
||||||
|
site.tera.full_reload()?;
|
||||||
|
|
||||||
|
match path.file_name().unwrap().to_str().unwrap() {
|
||||||
|
"sitemap.xml" => site.render_sitemap(),
|
||||||
|
"rss.xml" => site.render_rss_feed(),
|
||||||
|
"robots.txt" => site.render_robots(),
|
||||||
|
"categories.html" | "category.html" => site.render_categories(),
|
||||||
|
"tags.html" | "tag.html" => site.render_tags(),
|
||||||
|
"page.html" => {
|
||||||
|
site.render_sections()?;
|
||||||
|
site.render_orphan_pages()
|
||||||
|
},
|
||||||
|
"section.html" => site.render_sections(),
|
||||||
|
// Either the index or some unknown template changed
|
||||||
|
// We can't really know what this change affects so rebuild all
|
||||||
|
// the things
|
||||||
|
_ => {
|
||||||
|
site.render_sections()?;
|
||||||
|
site.render_orphan_pages()?;
|
||||||
|
site.render_categories()?;
|
||||||
|
site.render_tags()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,33 +0,0 @@
|
||||||
mod init;
|
|
||||||
mod build;
|
|
||||||
mod serve;
|
|
||||||
|
|
||||||
pub use self::init::create_new_project;
|
|
||||||
pub use self::build::build;
|
|
||||||
pub use self::serve::serve;
|
|
||||||
|
|
||||||
use gutenberg::Site;
|
|
||||||
|
|
||||||
use console::warn;
|
|
||||||
|
|
||||||
fn notify_site_size(site: &Site) {
|
|
||||||
println!(
|
|
||||||
"-> Creating {} pages ({} orphan) and {} sections",
|
|
||||||
site.pages.len(),
|
|
||||||
site.get_all_orphan_pages().len(),
|
|
||||||
site.sections.len()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn warn_about_ignored_pages(site: &Site) {
|
|
||||||
let ignored_pages = site.get_ignored_pages();
|
|
||||||
if !ignored_pages.is_empty() {
|
|
||||||
warn(&format!(
|
|
||||||
"{} page(s) ignored (missing date or order in a sorted section):",
|
|
||||||
ignored_pages.len()
|
|
||||||
));
|
|
||||||
for path in site.get_ignored_pages() {
|
|
||||||
warn(&format!("- {}", path.display()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -58,6 +58,10 @@ impl Config {
|
||||||
|
|
||||||
set_default!(config.language_code, "en".to_string());
|
set_default!(config.language_code, "en".to_string());
|
||||||
set_default!(config.highlight_code, false);
|
set_default!(config.highlight_code, false);
|
||||||
|
set_default!(config.generate_rss, false);
|
||||||
|
set_default!(config.generate_tags_pages, false);
|
||||||
|
set_default!(config.generate_categories_pages, false);
|
||||||
|
set_default!(config.insert_anchor_links, false);
|
||||||
|
|
||||||
match config.highlight_theme {
|
match config.highlight_theme {
|
||||||
Some(ref t) => {
|
Some(ref t) => {
|
||||||
|
@ -68,11 +72,6 @@ impl Config {
|
||||||
None => config.highlight_theme = Some("base16-ocean-dark".to_string())
|
None => config.highlight_theme = Some("base16-ocean-dark".to_string())
|
||||||
};
|
};
|
||||||
|
|
||||||
set_default!(config.generate_rss, false);
|
|
||||||
set_default!(config.generate_tags_pages, true);
|
|
||||||
set_default!(config.generate_categories_pages, true);
|
|
||||||
set_default!(config.insert_anchor_links, false);
|
|
||||||
|
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -135,7 +134,7 @@ mod tests {
|
||||||
use super::{Config};
|
use super::{Config};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_import_valid_config() {
|
fn can_import_valid_config() {
|
||||||
let config = r#"
|
let config = r#"
|
||||||
title = "My site"
|
title = "My site"
|
||||||
base_url = "https://replace-this-with-your-url.com"
|
base_url = "https://replace-this-with-your-url.com"
|
||||||
|
@ -146,7 +145,7 @@ base_url = "https://replace-this-with-your-url.com"
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_errors_when_invalid_type() {
|
fn errors_when_invalid_type() {
|
||||||
let config = r#"
|
let config = r#"
|
||||||
title = 1
|
title = 1
|
||||||
base_url = "https://replace-this-with-your-url.com"
|
base_url = "https://replace-this-with-your-url.com"
|
||||||
|
@ -157,7 +156,8 @@ base_url = "https://replace-this-with-your-url.com"
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_errors_when_missing_required_field() {
|
fn errors_when_missing_required_field() {
|
||||||
|
// base_url is required
|
||||||
let config = r#"
|
let config = r#"
|
||||||
title = ""
|
title = ""
|
||||||
"#;
|
"#;
|
||||||
|
@ -167,7 +167,7 @@ title = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_add_extra_values() {
|
fn can_add_extra_values() {
|
||||||
let config = r#"
|
let config = r#"
|
||||||
title = "My site"
|
title = "My site"
|
||||||
base_url = "https://replace-this-with-your-url.com"
|
base_url = "https://replace-this-with-your-url.com"
|
||||||
|
@ -181,15 +181,4 @@ hello = "world"
|
||||||
assert_eq!(config.unwrap().extra.unwrap().get("hello").unwrap().as_str().unwrap(), "world");
|
assert_eq!(config.unwrap().extra.unwrap().get("hello").unwrap().as_str().unwrap(), "world");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_language_defaults_to_en() {
|
|
||||||
let config = r#"
|
|
||||||
title = "My site"
|
|
||||||
base_url = "https://replace-this-with-your-url.com""#;
|
|
||||||
|
|
||||||
let config = Config::parse(config);
|
|
||||||
assert!(config.is_ok());
|
|
||||||
let config = config.unwrap();
|
|
||||||
assert_eq!(config.language_code.unwrap(), "en");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
use term_painter::ToStyle;
|
|
||||||
use term_painter::Color::*;
|
|
||||||
|
|
||||||
|
|
||||||
pub fn info(message: &str) {
|
|
||||||
println!("{}", NotSet.bold().paint(message));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn warn(message: &str) {
|
|
||||||
println!("{}", Yellow.bold().paint(message));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn success(message: &str) {
|
|
||||||
println!("{}", Green.bold().paint(message));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn error(message: &str) {
|
|
||||||
println!("{}", Red.bold().paint(message));
|
|
||||||
}
|
|
14
src/content/mod.rs
Normal file
14
src/content/mod.rs
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
// TODO: move section/page and maybe pagination in this mod
|
||||||
|
// Not sure where pagination stands if I add a render mod
|
||||||
|
|
||||||
|
mod page;
|
||||||
|
mod pagination;
|
||||||
|
mod section;
|
||||||
|
mod sorting;
|
||||||
|
mod utils;
|
||||||
|
|
||||||
|
pub use self::page::{Page};
|
||||||
|
pub use self::section::{Section};
|
||||||
|
pub use self::pagination::{Paginator, Pager};
|
||||||
|
pub use self::sorting::{SortBy, sort_pages, populate_previous_and_next_pages};
|
||||||
|
|
228
src/content/page.rs
Normal file
228
src/content/page.rs
Normal file
|
@ -0,0 +1,228 @@
|
||||||
|
/// A page, can be a blog post or a basic page
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
use std::result::Result as StdResult;
|
||||||
|
|
||||||
|
|
||||||
|
use tera::{Tera, Context};
|
||||||
|
use serde::ser::{SerializeStruct, self};
|
||||||
|
use slug::slugify;
|
||||||
|
|
||||||
|
use errors::{Result, ResultExt};
|
||||||
|
use config::Config;
|
||||||
|
use front_matter::{PageFrontMatter, split_page_content};
|
||||||
|
use markdown::markdown_to_html;
|
||||||
|
use utils::{read_file, find_content_components};
|
||||||
|
use content::utils::{find_related_assets, get_reading_analytics};
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
|
pub struct Page {
|
||||||
|
/// The front matter meta-data
|
||||||
|
pub meta: PageFrontMatter,
|
||||||
|
/// The .md path
|
||||||
|
pub file_path: PathBuf,
|
||||||
|
/// The .md path, starting from the content directory, with / slashes
|
||||||
|
pub relative_path: String,
|
||||||
|
/// The parent directory of the file. Is actually the grand parent directory
|
||||||
|
/// if it's an asset folder
|
||||||
|
pub parent_path: PathBuf,
|
||||||
|
/// The name of the .md file
|
||||||
|
pub file_name: String,
|
||||||
|
/// The directories above our .md file
|
||||||
|
/// for example a file at content/kb/solutions/blabla.md will have 2 components:
|
||||||
|
/// `kb` and `solutions`
|
||||||
|
pub components: Vec<String>,
|
||||||
|
/// The actual content of the page, in markdown
|
||||||
|
pub raw_content: String,
|
||||||
|
/// All the non-md files we found next to the .md file
|
||||||
|
pub assets: Vec<PathBuf>,
|
||||||
|
/// The HTML rendered of the page
|
||||||
|
pub content: String,
|
||||||
|
|
||||||
|
/// The slug of that page.
|
||||||
|
/// First tries to find the slug in the meta and defaults to filename otherwise
|
||||||
|
pub slug: String,
|
||||||
|
/// The URL path of the page
|
||||||
|
pub path: String,
|
||||||
|
/// The full URL for that page
|
||||||
|
pub permalink: String,
|
||||||
|
/// The summary for the article, defaults to None
|
||||||
|
/// When <!-- more --> is found in the text, will take the content up to that part
|
||||||
|
/// as summary
|
||||||
|
pub summary: Option<String>,
|
||||||
|
|
||||||
|
/// The previous page, by whatever sorting is used for the index/section
|
||||||
|
pub previous: Option<Box<Page>>,
|
||||||
|
/// The next page, by whatever sorting is used for the index/section
|
||||||
|
pub next: Option<Box<Page>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl Page {
|
||||||
|
pub fn new(meta: PageFrontMatter) -> Page {
|
||||||
|
Page {
|
||||||
|
meta: meta,
|
||||||
|
file_path: PathBuf::new(),
|
||||||
|
relative_path: String::new(),
|
||||||
|
parent_path: PathBuf::new(),
|
||||||
|
file_name: "".to_string(),
|
||||||
|
components: vec![],
|
||||||
|
raw_content: "".to_string(),
|
||||||
|
assets: vec![],
|
||||||
|
content: "".to_string(),
|
||||||
|
slug: "".to_string(),
|
||||||
|
path: "".to_string(),
|
||||||
|
permalink: "".to_string(),
|
||||||
|
summary: None,
|
||||||
|
previous: None,
|
||||||
|
next: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse a page given the content of the .md file
|
||||||
|
/// Files without front matter or with invalid front matter are considered
|
||||||
|
/// erroneous
|
||||||
|
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Page> {
|
||||||
|
// 1. separate front matter from content
|
||||||
|
let (meta, content) = split_page_content(file_path, content)?;
|
||||||
|
let mut page = Page::new(meta);
|
||||||
|
page.file_path = file_path.to_path_buf();
|
||||||
|
page.parent_path = page.file_path.parent().unwrap().to_path_buf();
|
||||||
|
page.raw_content = content;
|
||||||
|
|
||||||
|
let path = Path::new(file_path);
|
||||||
|
page.file_name = path.file_stem().unwrap().to_string_lossy().to_string();
|
||||||
|
|
||||||
|
page.slug = {
|
||||||
|
if let Some(ref slug) = page.meta.slug {
|
||||||
|
slug.trim().to_string()
|
||||||
|
} else {
|
||||||
|
slugify(page.file_name.clone())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
page.components = find_content_components(&page.file_path);
|
||||||
|
page.relative_path = format!("{}/{}.md", page.components.join("/"), page.file_name);
|
||||||
|
|
||||||
|
// 4. Find sections
|
||||||
|
// Pages with custom urls exists outside of sections
|
||||||
|
let mut path_set = false;
|
||||||
|
if let Some(ref u) = page.meta.url {
|
||||||
|
page.path = u.trim().to_string();
|
||||||
|
path_set = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !page.components.is_empty() {
|
||||||
|
// If we have a folder with an asset, don't consider it as a component
|
||||||
|
if page.file_name == "index" {
|
||||||
|
page.components.pop();
|
||||||
|
// also set parent_path to grandparent instead
|
||||||
|
page.parent_path = page.parent_path.parent().unwrap().to_path_buf();
|
||||||
|
}
|
||||||
|
if !path_set {
|
||||||
|
// Don't add a trailing slash to sections
|
||||||
|
page.path = format!("{}/{}", page.components.join("/"), page.slug);
|
||||||
|
}
|
||||||
|
} else if !path_set {
|
||||||
|
page.path = page.slug.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
page.permalink = config.make_permalink(&page.path);
|
||||||
|
|
||||||
|
Ok(page)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Read and parse a .md file into a Page struct
|
||||||
|
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config) -> Result<Page> {
|
||||||
|
let path = path.as_ref();
|
||||||
|
let content = read_file(path)?;
|
||||||
|
let mut page = Page::parse(path, &content, config)?;
|
||||||
|
page.assets = find_related_assets(path.parent().unwrap());
|
||||||
|
|
||||||
|
if !page.assets.is_empty() && page.file_name != "index" {
|
||||||
|
bail!("Page `{}` has assets ({:?}) but is not named index.md", path.display(), page.assets);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(page)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/// We need access to all pages url to render links relative to content
|
||||||
|
/// so that can't happen at the same time as parsing
|
||||||
|
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config) -> Result<()> {
|
||||||
|
self.content = markdown_to_html(&self.raw_content, permalinks, tera, config)?;
|
||||||
|
|
||||||
|
if self.raw_content.contains("<!-- more -->") {
|
||||||
|
self.summary = Some({
|
||||||
|
let summary = self.raw_content.splitn(2, "<!-- more -->").collect::<Vec<&str>>()[0];
|
||||||
|
markdown_to_html(summary, permalinks, tera, config)?
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Renders the page using the default layout, unless specified in front-matter
|
||||||
|
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> {
|
||||||
|
let tpl_name = match self.meta.template {
|
||||||
|
Some(ref l) => l.to_string(),
|
||||||
|
None => "page.html".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut context = Context::new();
|
||||||
|
context.add("config", config);
|
||||||
|
context.add("page", self);
|
||||||
|
context.add("current_url", &self.permalink);
|
||||||
|
context.add("current_path", &self.path);
|
||||||
|
|
||||||
|
tera.render(&tpl_name, &context)
|
||||||
|
.chain_err(|| format!("Failed to render page '{}'", self.file_path.display()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Page {
|
||||||
|
fn default() -> Page {
|
||||||
|
Page {
|
||||||
|
meta: PageFrontMatter::default(),
|
||||||
|
file_path: PathBuf::new(),
|
||||||
|
relative_path: String::new(),
|
||||||
|
parent_path: PathBuf::new(),
|
||||||
|
file_name: "".to_string(),
|
||||||
|
components: vec![],
|
||||||
|
raw_content: "".to_string(),
|
||||||
|
assets: vec![],
|
||||||
|
content: "".to_string(),
|
||||||
|
slug: "".to_string(),
|
||||||
|
path: "".to_string(),
|
||||||
|
permalink: "".to_string(),
|
||||||
|
summary: None,
|
||||||
|
previous: None,
|
||||||
|
next: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ser::Serialize for Page {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
||||||
|
let mut state = serializer.serialize_struct("page", 16)?;
|
||||||
|
state.serialize_field("content", &self.content)?;
|
||||||
|
state.serialize_field("title", &self.meta.title)?;
|
||||||
|
state.serialize_field("description", &self.meta.description)?;
|
||||||
|
state.serialize_field("date", &self.meta.date)?;
|
||||||
|
state.serialize_field("slug", &self.slug)?;
|
||||||
|
state.serialize_field("path", &format!("/{}", self.path))?;
|
||||||
|
state.serialize_field("permalink", &self.permalink)?;
|
||||||
|
state.serialize_field("summary", &self.summary)?;
|
||||||
|
state.serialize_field("tags", &self.meta.tags)?;
|
||||||
|
state.serialize_field("draft", &self.meta.draft)?;
|
||||||
|
state.serialize_field("category", &self.meta.category)?;
|
||||||
|
state.serialize_field("extra", &self.meta.extra)?;
|
||||||
|
let (word_count, reading_time) = get_reading_analytics(&self.raw_content);
|
||||||
|
state.serialize_field("word_count", &word_count)?;
|
||||||
|
state.serialize_field("reading_time", &reading_time)?;
|
||||||
|
state.serialize_field("previous", &self.previous)?;
|
||||||
|
state.serialize_field("next", &self.next)?;
|
||||||
|
state.end()
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,8 +2,7 @@ use std::collections::HashMap;
|
||||||
use tera::{Context, to_value, Value};
|
use tera::{Context, to_value, Value};
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use page::Page;
|
use content::{Page, Section};
|
||||||
use section::Section;
|
|
||||||
use site::Site;
|
use site::Site;
|
||||||
|
|
||||||
|
|
||||||
|
@ -23,10 +22,10 @@ pub struct Pager<'a> {
|
||||||
impl<'a> Pager<'a> {
|
impl<'a> Pager<'a> {
|
||||||
fn new(index: usize, pages: Vec<&'a Page>, permalink: String, path: String) -> Pager<'a> {
|
fn new(index: usize, pages: Vec<&'a Page>, permalink: String, path: String) -> Pager<'a> {
|
||||||
Pager {
|
Pager {
|
||||||
index: index,
|
index,
|
||||||
permalink: permalink,
|
permalink,
|
||||||
path: path,
|
path,
|
||||||
pages: pages,
|
pages,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -44,6 +43,8 @@ pub struct Paginator<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Paginator<'a> {
|
impl<'a> Paginator<'a> {
|
||||||
|
/// Create a new paginator
|
||||||
|
/// It will always at least create one pager (the first) even if there are no pages to paginate
|
||||||
pub fn new(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> {
|
pub fn new(all_pages: &'a [Page], section: &'a Section) -> Paginator<'a> {
|
||||||
let paginate_by = section.meta.paginate_by.unwrap();
|
let paginate_by = section.meta.paginate_by.unwrap();
|
||||||
let paginate_path = match section.meta.paginate_path {
|
let paginate_path = match section.meta.paginate_path {
|
||||||
|
@ -87,6 +88,11 @@ impl<'a> Paginator<'a> {
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We always have the index one at least
|
||||||
|
if pagers.is_empty() {
|
||||||
|
pagers.push(Pager::new(1, vec![], section.permalink.clone(), section.path.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
Paginator {
|
Paginator {
|
||||||
all_pages: all_pages,
|
all_pages: all_pages,
|
||||||
pagers: pagers,
|
pagers: pagers,
|
||||||
|
@ -147,14 +153,13 @@ impl<'a> Paginator<'a> {
|
||||||
mod tests {
|
mod tests {
|
||||||
use tera::{to_value};
|
use tera::{to_value};
|
||||||
|
|
||||||
use front_matter::FrontMatter;
|
use front_matter::SectionFrontMatter;
|
||||||
use page::Page;
|
use content::{Page, Section};
|
||||||
use section::Section;
|
|
||||||
|
|
||||||
use super::{Paginator};
|
use super::{Paginator};
|
||||||
|
|
||||||
fn create_section(is_index: bool) -> Section {
|
fn create_section(is_index: bool) -> Section {
|
||||||
let mut f = FrontMatter::default();
|
let mut f = SectionFrontMatter::default();
|
||||||
f.paginate_by = Some(2);
|
f.paginate_by = Some(2);
|
||||||
f.paginate_path = Some("page".to_string());
|
f.paginate_path = Some("page".to_string());
|
||||||
let mut s = Section::new("content/_index.md", f);
|
let mut s = Section::new("content/_index.md", f);
|
||||||
|
@ -171,9 +176,9 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_create_paginator() {
|
fn test_can_create_paginator() {
|
||||||
let pages = vec![
|
let pages = vec![
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
];
|
];
|
||||||
let section = create_section(false);
|
let section = create_section(false);
|
||||||
let paginator = Paginator::new(pages.as_slice(), §ion);
|
let paginator = Paginator::new(pages.as_slice(), §ion);
|
||||||
|
@ -193,9 +198,9 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_create_paginator_for_index() {
|
fn test_can_create_paginator_for_index() {
|
||||||
let pages = vec![
|
let pages = vec![
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
];
|
];
|
||||||
let section = create_section(true);
|
let section = create_section(true);
|
||||||
let paginator = Paginator::new(pages.as_slice(), §ion);
|
let paginator = Paginator::new(pages.as_slice(), §ion);
|
||||||
|
@ -215,9 +220,9 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn test_can_build_paginator_context() {
|
fn test_can_build_paginator_context() {
|
||||||
let pages = vec![
|
let pages = vec![
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
Page::new(FrontMatter::default()),
|
Page::default(),
|
||||||
];
|
];
|
||||||
let section = create_section(false);
|
let section = create_section(false);
|
||||||
let paginator = Paginator::new(pages.as_slice(), §ion);
|
let paginator = Paginator::new(pages.as_slice(), §ion);
|
|
@ -6,14 +6,17 @@ use tera::{Tera, Context};
|
||||||
use serde::ser::{SerializeStruct, self};
|
use serde::ser::{SerializeStruct, self};
|
||||||
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
use front_matter::{FrontMatter, split_content};
|
use front_matter::{SectionFrontMatter, split_section_content};
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use utils::{read_file, find_content_components};
|
use utils::{read_file, find_content_components};
|
||||||
use page::{Page};
|
use markdown::markdown_to_html;
|
||||||
|
use content::Page;
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
#[derive(Clone, Debug, PartialEq)]
|
||||||
pub struct Section {
|
pub struct Section {
|
||||||
|
/// The front matter meta-data
|
||||||
|
pub meta: SectionFrontMatter,
|
||||||
/// The _index.md full path
|
/// The _index.md full path
|
||||||
pub file_path: PathBuf,
|
pub file_path: PathBuf,
|
||||||
/// The .md path, starting from the content directory, with / slashes
|
/// The .md path, starting from the content directory, with / slashes
|
||||||
|
@ -26,8 +29,10 @@ pub struct Section {
|
||||||
pub path: String,
|
pub path: String,
|
||||||
/// The full URL for that page
|
/// The full URL for that page
|
||||||
pub permalink: String,
|
pub permalink: String,
|
||||||
/// The front matter meta-data
|
/// The actual content of the page, in markdown
|
||||||
pub meta: FrontMatter,
|
pub raw_content: String,
|
||||||
|
/// The HTML rendered of the page
|
||||||
|
pub content: String,
|
||||||
/// All direct pages of that section
|
/// All direct pages of that section
|
||||||
pub pages: Vec<Page>,
|
pub pages: Vec<Page>,
|
||||||
/// All pages that cannot be sorted in this section
|
/// All pages that cannot be sorted in this section
|
||||||
|
@ -37,17 +42,19 @@ pub struct Section {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Section {
|
impl Section {
|
||||||
pub fn new<P: AsRef<Path>>(file_path: P, meta: FrontMatter) -> Section {
|
pub fn new<P: AsRef<Path>>(file_path: P, meta: SectionFrontMatter) -> Section {
|
||||||
let file_path = file_path.as_ref();
|
let file_path = file_path.as_ref();
|
||||||
|
|
||||||
Section {
|
Section {
|
||||||
|
meta: meta,
|
||||||
file_path: file_path.to_path_buf(),
|
file_path: file_path.to_path_buf(),
|
||||||
relative_path: "".to_string(),
|
relative_path: "".to_string(),
|
||||||
parent_path: file_path.parent().unwrap().to_path_buf(),
|
parent_path: file_path.parent().unwrap().to_path_buf(),
|
||||||
components: vec![],
|
components: vec![],
|
||||||
path: "".to_string(),
|
path: "".to_string(),
|
||||||
permalink: "".to_string(),
|
permalink: "".to_string(),
|
||||||
meta: meta,
|
raw_content: "".to_string(),
|
||||||
|
content: "".to_string(),
|
||||||
pages: vec![],
|
pages: vec![],
|
||||||
ignored_pages: vec![],
|
ignored_pages: vec![],
|
||||||
subsections: vec![],
|
subsections: vec![],
|
||||||
|
@ -55,12 +62,14 @@ impl Section {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Section> {
|
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Section> {
|
||||||
let (meta, _) = split_content(file_path, content)?;
|
let (meta, content) = split_section_content(file_path, content)?;
|
||||||
let mut section = Section::new(file_path, meta);
|
let mut section = Section::new(file_path, meta);
|
||||||
|
section.raw_content = content.clone();
|
||||||
section.components = find_content_components(§ion.file_path);
|
section.components = find_content_components(§ion.file_path);
|
||||||
section.path = section.components.join("/");
|
section.path = section.components.join("/");
|
||||||
section.permalink = config.make_permalink(§ion.path);
|
section.permalink = config.make_permalink(§ion.path);
|
||||||
if section.components.is_empty() {
|
if section.components.is_empty() {
|
||||||
|
// the index one
|
||||||
section.relative_path = "_index.md".to_string();
|
section.relative_path = "_index.md".to_string();
|
||||||
} else {
|
} else {
|
||||||
section.relative_path = format!("{}/_index.md", section.components.join("/"));
|
section.relative_path = format!("{}/_index.md", section.components.join("/"));
|
||||||
|
@ -89,8 +98,15 @@ impl Section {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// We need access to all pages url to render links relative to content
|
||||||
|
/// so that can't happen at the same time as parsing
|
||||||
|
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config) -> Result<()> {
|
||||||
|
self.content = markdown_to_html(&self.raw_content, permalinks, tera, config)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Renders the page using the default layout, unless specified in front-matter
|
/// Renders the page using the default layout, unless specified in front-matter
|
||||||
pub fn render_html(&self, sections: &HashMap<String, Section>, tera: &Tera, config: &Config) -> Result<String> {
|
pub fn render_html(&self, sections: HashMap<String, Section>, tera: &Tera, config: &Config) -> Result<String> {
|
||||||
let tpl_name = self.get_template_name();
|
let tpl_name = self.get_template_name();
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
|
@ -99,7 +115,7 @@ impl Section {
|
||||||
context.add("current_url", &self.permalink);
|
context.add("current_url", &self.permalink);
|
||||||
context.add("current_path", &self.path);
|
context.add("current_path", &self.path);
|
||||||
if self.is_index() {
|
if self.is_index() {
|
||||||
context.add("sections", sections);
|
context.add("sections", §ions);
|
||||||
}
|
}
|
||||||
|
|
||||||
tera.render(&tpl_name, &context)
|
tera.render(&tpl_name, &context)
|
||||||
|
@ -111,17 +127,36 @@ impl Section {
|
||||||
self.components.is_empty()
|
self.components.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns all the paths for the pages belonging to that section
|
||||||
pub fn all_pages_path(&self) -> Vec<PathBuf> {
|
pub fn all_pages_path(&self) -> Vec<PathBuf> {
|
||||||
let mut paths = vec![];
|
let mut paths = vec![];
|
||||||
paths.extend(self.pages.iter().map(|p| p.file_path.clone()));
|
paths.extend(self.pages.iter().map(|p| p.file_path.clone()));
|
||||||
paths.extend(self.ignored_pages.iter().map(|p| p.file_path.clone()));
|
paths.extend(self.ignored_pages.iter().map(|p| p.file_path.clone()));
|
||||||
paths
|
paths
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Whether the page given belongs to that section
|
||||||
|
pub fn is_child_page(&self, page: &Page) -> bool {
|
||||||
|
for p in &self.pages {
|
||||||
|
if p.file_path == page.file_path {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for p in &self.ignored_pages {
|
||||||
|
if p.file_path == page.file_path {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ser::Serialize for Section {
|
impl ser::Serialize for Section {
|
||||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
||||||
let mut state = serializer.serialize_struct("section", 6)?;
|
let mut state = serializer.serialize_struct("section", 7)?;
|
||||||
|
state.serialize_field("content", &self.content)?;
|
||||||
state.serialize_field("title", &self.meta.title)?;
|
state.serialize_field("title", &self.meta.title)?;
|
||||||
state.serialize_field("description", &self.meta.description)?;
|
state.serialize_field("description", &self.meta.description)?;
|
||||||
state.serialize_field("path", &format!("/{}", self.path))?;
|
state.serialize_field("path", &format!("/{}", self.path))?;
|
||||||
|
@ -131,3 +166,23 @@ impl ser::Serialize for Section {
|
||||||
state.end()
|
state.end()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for Section {
|
||||||
|
/// Used to create a default index section if there is no _index.md in the root content directory
|
||||||
|
fn default() -> Section {
|
||||||
|
Section {
|
||||||
|
meta: SectionFrontMatter::default(),
|
||||||
|
file_path: PathBuf::new(),
|
||||||
|
relative_path: "".to_string(),
|
||||||
|
parent_path: PathBuf::new(),
|
||||||
|
components: vec![],
|
||||||
|
path: "".to_string(),
|
||||||
|
permalink: "".to_string(),
|
||||||
|
raw_content: "".to_string(),
|
||||||
|
content: "".to_string(),
|
||||||
|
pages: vec![],
|
||||||
|
ignored_pages: vec![],
|
||||||
|
subsections: vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
169
src/content/sorting.rs
Normal file
169
src/content/sorting.rs
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
use content::Page;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "lowercase")]
|
||||||
|
pub enum SortBy {
|
||||||
|
Date,
|
||||||
|
Order,
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sort pages using the method for the given section
|
||||||
|
///
|
||||||
|
/// Any pages that doesn't have a date when the sorting method is date or order
|
||||||
|
/// when the sorting method is order will be ignored.
|
||||||
|
pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) {
|
||||||
|
match sort_by {
|
||||||
|
SortBy::Date => {
|
||||||
|
let mut can_be_sorted = vec![];
|
||||||
|
let mut cannot_be_sorted = vec![];
|
||||||
|
for page in pages {
|
||||||
|
if page.meta.date.is_some() {
|
||||||
|
can_be_sorted.push(page);
|
||||||
|
} else {
|
||||||
|
cannot_be_sorted.push(page);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_be_sorted.sort_by(|a, b| b.meta.date().unwrap().cmp(&a.meta.date().unwrap()));
|
||||||
|
|
||||||
|
(can_be_sorted, cannot_be_sorted)
|
||||||
|
},
|
||||||
|
SortBy::Order => {
|
||||||
|
let mut can_be_sorted = vec![];
|
||||||
|
let mut cannot_be_sorted = vec![];
|
||||||
|
for page in pages {
|
||||||
|
if page.meta.order.is_some() {
|
||||||
|
can_be_sorted.push(page);
|
||||||
|
} else {
|
||||||
|
cannot_be_sorted.push(page);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_be_sorted.sort_by(|a, b| b.meta.order().cmp(&a.meta.order()));
|
||||||
|
|
||||||
|
(can_be_sorted, cannot_be_sorted)
|
||||||
|
},
|
||||||
|
SortBy::None => (pages, vec![])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Horribly inefficient way to set previous and next on each pages
|
||||||
|
/// So many clones
|
||||||
|
pub fn populate_previous_and_next_pages(input: &[Page]) -> Vec<Page> {
|
||||||
|
let pages = input.to_vec();
|
||||||
|
let mut res = Vec::new();
|
||||||
|
|
||||||
|
// the input is already sorted
|
||||||
|
// We might put prev/next randomly if a page is missing date/order, probably fine
|
||||||
|
for (i, page) in input.iter().enumerate() {
|
||||||
|
let mut new_page = page.clone();
|
||||||
|
|
||||||
|
if i > 0 {
|
||||||
|
let next = &pages[i - 1];
|
||||||
|
new_page.next = Some(Box::new(next.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if i < input.len() - 1 {
|
||||||
|
let previous = &pages[i + 1];
|
||||||
|
new_page.previous = Some(Box::new(previous.clone()));
|
||||||
|
}
|
||||||
|
res.push(new_page);
|
||||||
|
}
|
||||||
|
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use front_matter::{PageFrontMatter};
|
||||||
|
use content::Page;
|
||||||
|
use super::{SortBy, sort_pages, populate_previous_and_next_pages};
|
||||||
|
|
||||||
|
fn create_page_with_date(date: &str) -> Page {
|
||||||
|
let mut front_matter = PageFrontMatter::default();
|
||||||
|
front_matter.date = Some(date.to_string());
|
||||||
|
Page::new(front_matter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_page_with_order(order: usize) -> Page {
|
||||||
|
let mut front_matter = PageFrontMatter::default();
|
||||||
|
front_matter.order = Some(order);
|
||||||
|
Page::new(front_matter)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_sort_by_dates() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_date("2018-01-01"),
|
||||||
|
create_page_with_date("2017-01-01"),
|
||||||
|
create_page_with_date("2019-01-01"),
|
||||||
|
];
|
||||||
|
let (pages, _) = sort_pages(input, SortBy::Date);
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.date.unwrap(), "2019-01-01");
|
||||||
|
assert_eq!(pages[1].clone().meta.date.unwrap(), "2018-01-01");
|
||||||
|
assert_eq!(pages[2].clone().meta.date.unwrap(), "2017-01-01");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_sort_by_order() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_order(1),
|
||||||
|
];
|
||||||
|
let (pages, _) = sort_pages(input, SortBy::Order);
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[1].clone().meta.order.unwrap(), 2);
|
||||||
|
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_sort_by_none() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_order(1),
|
||||||
|
];
|
||||||
|
let (pages, _) = sort_pages(input, SortBy::None);
|
||||||
|
// Should be sorted by date
|
||||||
|
assert_eq!(pages[0].clone().meta.order.unwrap(), 2);
|
||||||
|
assert_eq!(pages[1].clone().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn ignore_page_with_missing_field() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_date("2019-01-01"),
|
||||||
|
];
|
||||||
|
let (pages, unsorted) = sort_pages(input, SortBy::Order);
|
||||||
|
assert_eq!(pages.len(), 2);
|
||||||
|
assert_eq!(unsorted.len(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_populate_previous_and_next_pages() {
|
||||||
|
let input = vec![
|
||||||
|
create_page_with_order(3),
|
||||||
|
create_page_with_order(2),
|
||||||
|
create_page_with_order(1),
|
||||||
|
];
|
||||||
|
let pages = populate_previous_and_next_pages(input.as_slice());
|
||||||
|
|
||||||
|
assert!(pages[0].clone().next.is_none());
|
||||||
|
assert!(pages[0].clone().previous.is_some());
|
||||||
|
assert_eq!(pages[0].clone().previous.unwrap().meta.order.unwrap(), 2);
|
||||||
|
|
||||||
|
assert!(pages[1].clone().next.is_some());
|
||||||
|
assert!(pages[1].clone().previous.is_some());
|
||||||
|
assert_eq!(pages[1].clone().next.unwrap().meta.order.unwrap(), 3);
|
||||||
|
assert_eq!(pages[1].clone().previous.unwrap().meta.order.unwrap(), 1);
|
||||||
|
|
||||||
|
assert!(pages[2].clone().next.is_some());
|
||||||
|
assert!(pages[2].clone().previous.is_none());
|
||||||
|
assert_eq!(pages[2].clone().next.unwrap().meta.order.unwrap(), 2);
|
||||||
|
}
|
||||||
|
}
|
77
src/content/utils.rs
Normal file
77
src/content/utils.rs
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
use std::fs::read_dir;
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
/// Looks into the current folder for the path and see if there's anything that is not a .md
|
||||||
|
/// file. Those will be copied next to the rendered .html file
|
||||||
|
pub fn find_related_assets(path: &Path) -> Vec<PathBuf> {
|
||||||
|
let mut assets = vec![];
|
||||||
|
|
||||||
|
for entry in read_dir(path).unwrap().filter_map(|e| e.ok()) {
|
||||||
|
let entry_path = entry.path();
|
||||||
|
if entry_path.is_file() {
|
||||||
|
match entry_path.extension() {
|
||||||
|
Some(e) => match e.to_str() {
|
||||||
|
Some("md") => continue,
|
||||||
|
_ => assets.push(entry_path.to_path_buf()),
|
||||||
|
},
|
||||||
|
None => continue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assets
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get word count and estimated reading time
|
||||||
|
pub fn get_reading_analytics(content: &str) -> (usize, usize) {
|
||||||
|
// Only works for latin language but good enough for a start
|
||||||
|
let word_count: usize = content.split_whitespace().count();
|
||||||
|
|
||||||
|
// https://help.medium.com/hc/en-us/articles/214991667-Read-time
|
||||||
|
// 275 seems a bit too high though
|
||||||
|
(word_count, (word_count / 200))
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::fs::File;
|
||||||
|
|
||||||
|
use tempdir::TempDir;
|
||||||
|
|
||||||
|
use super::{find_related_assets, get_reading_analytics};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_find_related_assets() {
|
||||||
|
let tmp_dir = TempDir::new("example").expect("create temp dir");
|
||||||
|
File::create(tmp_dir.path().join("index.md")).unwrap();
|
||||||
|
File::create(tmp_dir.path().join("example.js")).unwrap();
|
||||||
|
File::create(tmp_dir.path().join("graph.jpg")).unwrap();
|
||||||
|
File::create(tmp_dir.path().join("fail.png")).unwrap();
|
||||||
|
|
||||||
|
let assets = find_related_assets(tmp_dir.path());
|
||||||
|
assert_eq!(assets.len(), 3);
|
||||||
|
assert_eq!(assets.iter().filter(|p| p.extension().unwrap() != "md").count(), 3);
|
||||||
|
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "example.js").count(), 1);
|
||||||
|
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
||||||
|
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reading_analytics_short_text() {
|
||||||
|
let (word_count, reading_time) = get_reading_analytics("Hello World");
|
||||||
|
assert_eq!(word_count, 2);
|
||||||
|
assert_eq!(reading_time, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn reading_analytics_long_text() {
|
||||||
|
let mut content = String::new();
|
||||||
|
for _ in 0..1000 {
|
||||||
|
content.push_str(" Hello world");
|
||||||
|
}
|
||||||
|
let (word_count, reading_time) = get_reading_analytics(&content);
|
||||||
|
assert_eq!(word_count, 2000);
|
||||||
|
assert_eq!(reading_time, 10);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,177 +0,0 @@
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use toml;
|
|
||||||
use tera::Value;
|
|
||||||
use chrono::prelude::*;
|
|
||||||
use regex::Regex;
|
|
||||||
|
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
|
||||||
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref PAGE_RE: Regex = Regex::new(r"^\r?\n?\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "lowercase")]
|
|
||||||
pub enum SortBy {
|
|
||||||
Date,
|
|
||||||
Order,
|
|
||||||
None,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The front matter of every page
|
|
||||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub struct FrontMatter {
|
|
||||||
/// <title> of the page
|
|
||||||
pub title: Option<String>,
|
|
||||||
/// Description in <meta> that appears when linked, e.g. on twitter
|
|
||||||
pub description: Option<String>,
|
|
||||||
/// Date if we want to order pages (ie blog post)
|
|
||||||
pub date: Option<String>,
|
|
||||||
/// The page slug. Will be used instead of the filename if present
|
|
||||||
/// Can't be an empty string if present
|
|
||||||
pub slug: Option<String>,
|
|
||||||
/// The url the page appears at, overrides the slug if set in the front-matter
|
|
||||||
/// otherwise is set after parsing front matter and sections
|
|
||||||
/// Can't be an empty string if present
|
|
||||||
pub url: Option<String>,
|
|
||||||
/// Tags, not to be confused with categories
|
|
||||||
pub tags: Option<Vec<String>>,
|
|
||||||
/// Whether this page is a draft and should be published or not
|
|
||||||
pub draft: Option<bool>,
|
|
||||||
/// Only one category allowed
|
|
||||||
pub category: Option<String>,
|
|
||||||
/// Whether to sort by "date", "order" or "none". Defaults to `none`.
|
|
||||||
#[serde(skip_serializing)]
|
|
||||||
pub sort_by: Option<SortBy>,
|
|
||||||
/// Integer to use to order content. Lowest is at the bottom, highest first
|
|
||||||
pub order: Option<usize>,
|
|
||||||
/// Optional template, if we want to specify which template to render for that page
|
|
||||||
#[serde(skip_serializing)]
|
|
||||||
pub template: Option<String>,
|
|
||||||
/// How many pages to be displayed per paginated page. No pagination will happen if this isn't set
|
|
||||||
#[serde(skip_serializing)]
|
|
||||||
pub paginate_by: Option<usize>,
|
|
||||||
/// Path to be used by pagination: the page number will be appended after it. Defaults to `page`.
|
|
||||||
#[serde(skip_serializing)]
|
|
||||||
pub paginate_path: Option<String>,
|
|
||||||
/// Whether to render that page/section or not. Defaults to `true`.
|
|
||||||
#[serde(skip_serializing)]
|
|
||||||
pub render: Option<bool>,
|
|
||||||
/// Any extra parameter present in the front matter
|
|
||||||
pub extra: Option<HashMap<String, Value>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FrontMatter {
|
|
||||||
pub fn parse(toml: &str) -> Result<FrontMatter> {
|
|
||||||
let mut f: FrontMatter = match toml::from_str(toml) {
|
|
||||||
Ok(d) => d,
|
|
||||||
Err(e) => bail!(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(ref slug) = f.slug {
|
|
||||||
if slug == "" {
|
|
||||||
bail!("`slug` can't be empty if present")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref url) = f.url {
|
|
||||||
if url == "" {
|
|
||||||
bail!("`url` can't be empty if present")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.paginate_path.is_none() {
|
|
||||||
f.paginate_path = Some("page".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.render.is_none() {
|
|
||||||
f.render = Some(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(f)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts the date in the front matter, which can be in 2 formats, into a NaiveDateTime
|
|
||||||
pub fn date(&self) -> Option<NaiveDateTime> {
|
|
||||||
match self.date {
|
|
||||||
Some(ref d) => {
|
|
||||||
if d.contains('T') {
|
|
||||||
DateTime::parse_from_rfc3339(d).ok().and_then(|s| Some(s.naive_local()))
|
|
||||||
} else {
|
|
||||||
NaiveDate::parse_from_str(d, "%Y-%m-%d").ok().and_then(|s| Some(s.and_hms(0,0,0)))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn order(&self) -> usize {
|
|
||||||
self.order.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the current sorting method, defaults to `None` (== no sorting)
|
|
||||||
pub fn sort_by(&self) -> SortBy {
|
|
||||||
match self.sort_by {
|
|
||||||
Some(ref s) => *s,
|
|
||||||
None => SortBy::None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Only applies to section, whether it is paginated or not.
|
|
||||||
pub fn is_paginated(&self) -> bool {
|
|
||||||
match self.paginate_by {
|
|
||||||
Some(v) => v > 0,
|
|
||||||
None => false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn should_render(&self) -> bool {
|
|
||||||
self.render.unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for FrontMatter {
|
|
||||||
fn default() -> FrontMatter {
|
|
||||||
FrontMatter {
|
|
||||||
title: None,
|
|
||||||
description: None,
|
|
||||||
date: None,
|
|
||||||
slug: None,
|
|
||||||
url: None,
|
|
||||||
tags: None,
|
|
||||||
draft: None,
|
|
||||||
category: None,
|
|
||||||
sort_by: None,
|
|
||||||
order: None,
|
|
||||||
template: None,
|
|
||||||
paginate_by: None,
|
|
||||||
paginate_path: None,
|
|
||||||
render: None,
|
|
||||||
extra: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Split a file between the front matter and its content
|
|
||||||
/// It will parse the front matter as well and returns any error encountered
|
|
||||||
pub fn split_content(file_path: &Path, content: &str) -> Result<(FrontMatter, String)> {
|
|
||||||
if !PAGE_RE.is_match(content) {
|
|
||||||
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy());
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2. extract the front matter and the content
|
|
||||||
let caps = PAGE_RE.captures(content).unwrap();
|
|
||||||
// caps[0] is the full match
|
|
||||||
let front_matter = &caps[1];
|
|
||||||
let content = &caps[2];
|
|
||||||
|
|
||||||
// 3. create our page, parse front matter and assign all of that
|
|
||||||
let meta = FrontMatter::parse(front_matter)
|
|
||||||
.chain_err(|| format!("Error when parsing front matter of file `{}`", file_path.to_string_lossy()))?;
|
|
||||||
|
|
||||||
Ok((meta, content.to_string()))
|
|
||||||
}
|
|
122
src/front_matter/mod.rs
Normal file
122
src/front_matter/mod.rs
Normal file
|
@ -0,0 +1,122 @@
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
mod page;
|
||||||
|
mod section;
|
||||||
|
|
||||||
|
pub use self::page::PageFrontMatter;
|
||||||
|
pub use self::section::{SectionFrontMatter};
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref PAGE_RE: Regex = Regex::new(r"^[[:space:]]*\+\+\+\r?\n((?s).*?(?-s))\+\+\+\r?\n?((?s).*(?-s))$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split a file between the front matter and its content
|
||||||
|
/// Will return an error if the front matter wasn't found
|
||||||
|
fn split_content(file_path: &Path, content: &str) -> Result<(String, String)> {
|
||||||
|
if !PAGE_RE.is_match(content) {
|
||||||
|
bail!("Couldn't find front matter in `{}`. Did you forget to add `+++`?", file_path.to_string_lossy());
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. extract the front matter and the content
|
||||||
|
let caps = PAGE_RE.captures(content).unwrap();
|
||||||
|
// caps[0] is the full match
|
||||||
|
// caps[1] => front matter
|
||||||
|
// caps[2] => content
|
||||||
|
Ok((caps[1].to_string(), caps[2].to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split a file between the front matter and its content.
|
||||||
|
/// Returns a parsed `SectionFrontMatter` and the rest of the content
|
||||||
|
pub fn split_section_content(file_path: &Path, content: &str) -> Result<(SectionFrontMatter, String)> {
|
||||||
|
let (front_matter, content) = split_content(file_path, content)?;
|
||||||
|
let meta = SectionFrontMatter::parse(&front_matter)
|
||||||
|
.chain_err(|| format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()))?;
|
||||||
|
Ok((meta, content))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Split a file between the front matter and its content
|
||||||
|
/// Returns a parsed `PageFrontMatter` and the rest of the content
|
||||||
|
pub fn split_page_content(file_path: &Path, content: &str) -> Result<(PageFrontMatter, String)> {
|
||||||
|
let (front_matter, content) = split_content(file_path, content)?;
|
||||||
|
let meta = PageFrontMatter::parse(&front_matter)
|
||||||
|
.chain_err(|| format!("Error when parsing front matter of section `{}`", file_path.to_string_lossy()))?;
|
||||||
|
Ok((meta, content))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use super::{split_section_content, split_page_content};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_split_page_content_valid() {
|
||||||
|
let content = r#"
|
||||||
|
+++
|
||||||
|
title = "Title"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2002/10/12"
|
||||||
|
+++
|
||||||
|
Hello
|
||||||
|
"#;
|
||||||
|
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
|
||||||
|
assert_eq!(content, "Hello\n");
|
||||||
|
assert_eq!(front_matter.title.unwrap(), "Title");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_split_section_content_valid() {
|
||||||
|
let content = r#"
|
||||||
|
+++
|
||||||
|
paginate_by = 10
|
||||||
|
+++
|
||||||
|
Hello
|
||||||
|
"#;
|
||||||
|
let (front_matter, content) = split_section_content(Path::new(""), content).unwrap();
|
||||||
|
assert_eq!(content, "Hello\n");
|
||||||
|
assert!(front_matter.is_paginated());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_split_content_with_only_frontmatter_valid() {
|
||||||
|
let content = r#"
|
||||||
|
+++
|
||||||
|
title = "Title"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2002/10/12"
|
||||||
|
+++"#;
|
||||||
|
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
|
||||||
|
assert_eq!(content, "");
|
||||||
|
assert_eq!(front_matter.title.unwrap(), "Title");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_split_content_lazily() {
|
||||||
|
let content = r#"
|
||||||
|
+++
|
||||||
|
title = "Title"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2002-10-02T15:00:00Z"
|
||||||
|
+++
|
||||||
|
+++"#;
|
||||||
|
let (front_matter, content) = split_page_content(Path::new(""), content).unwrap();
|
||||||
|
assert_eq!(content, "+++");
|
||||||
|
assert_eq!(front_matter.title.unwrap(), "Title");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errors_if_cannot_locate_frontmatter() {
|
||||||
|
let content = r#"
|
||||||
|
+++
|
||||||
|
title = "Title"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2002/10/12""#;
|
||||||
|
let res = split_page_content(Path::new(""), content);
|
||||||
|
assert!(res.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
219
src/front_matter/page.rs
Normal file
219
src/front_matter/page.rs
Normal file
|
@ -0,0 +1,219 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use chrono::prelude::*;
|
||||||
|
use tera::Value;
|
||||||
|
use toml;
|
||||||
|
|
||||||
|
use errors::{Result};
|
||||||
|
|
||||||
|
/// The front matter of every page
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct PageFrontMatter {
|
||||||
|
/// <title> of the page
|
||||||
|
pub title: Option<String>,
|
||||||
|
/// Description in <meta> that appears when linked, e.g. on twitter
|
||||||
|
pub description: Option<String>,
|
||||||
|
/// Date if we want to order pages (ie blog post)
|
||||||
|
pub date: Option<String>,
|
||||||
|
/// The page slug. Will be used instead of the filename if present
|
||||||
|
/// Can't be an empty string if present
|
||||||
|
pub slug: Option<String>,
|
||||||
|
/// The url the page appears at, overrides the slug if set in the front-matter
|
||||||
|
/// otherwise is set after parsing front matter and sections
|
||||||
|
/// Can't be an empty string if present
|
||||||
|
pub url: Option<String>,
|
||||||
|
/// Tags, not to be confused with categories
|
||||||
|
pub tags: Option<Vec<String>>,
|
||||||
|
/// Whether this page is a draft and should be published or not
|
||||||
|
pub draft: Option<bool>,
|
||||||
|
/// Only one category allowed. Can't be an empty string if present
|
||||||
|
pub category: Option<String>,
|
||||||
|
/// Integer to use to order content. Lowest is at the bottom, highest first
|
||||||
|
pub order: Option<usize>,
|
||||||
|
/// Specify a template different from `page.html` to use for that page
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub template: Option<String>,
|
||||||
|
/// Any extra parameter present in the front matter
|
||||||
|
pub extra: Option<HashMap<String, Value>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PageFrontMatter {
|
||||||
|
pub fn parse(toml: &str) -> Result<PageFrontMatter> {
|
||||||
|
let f: PageFrontMatter = match toml::from_str(toml) {
|
||||||
|
Ok(d) => d,
|
||||||
|
Err(e) => bail!(e),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(ref slug) = f.slug {
|
||||||
|
if slug == "" {
|
||||||
|
bail!("`slug` can't be empty if present")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref url) = f.url {
|
||||||
|
if url == "" {
|
||||||
|
bail!("`url` can't be empty if present")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref category) = f.category {
|
||||||
|
if category == "" {
|
||||||
|
bail!("`category` can't be empty if present")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts the date in the front matter, which can be in 2 formats, into a NaiveDateTime
|
||||||
|
pub fn date(&self) -> Option<NaiveDateTime> {
|
||||||
|
match self.date {
|
||||||
|
Some(ref d) => {
|
||||||
|
if d.contains('T') {
|
||||||
|
DateTime::parse_from_rfc3339(d).ok().and_then(|s| Some(s.naive_local()))
|
||||||
|
} else {
|
||||||
|
NaiveDate::parse_from_str(d, "%Y-%m-%d").ok().and_then(|s| Some(s.and_hms(0,0,0)))
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn order(&self) -> usize {
|
||||||
|
self.order.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_tags(&self) -> bool {
|
||||||
|
match self.tags {
|
||||||
|
Some(ref t) => !t.is_empty(),
|
||||||
|
None => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for PageFrontMatter {
|
||||||
|
fn default() -> PageFrontMatter {
|
||||||
|
PageFrontMatter {
|
||||||
|
title: None,
|
||||||
|
description: None,
|
||||||
|
date: None,
|
||||||
|
slug: None,
|
||||||
|
url: None,
|
||||||
|
tags: None,
|
||||||
|
draft: None,
|
||||||
|
category: None,
|
||||||
|
order: None,
|
||||||
|
template: None,
|
||||||
|
extra: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::PageFrontMatter;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_have_empty_front_matter() {
|
||||||
|
let content = r#" "#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_parse_valid_front_matter() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there""#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_ok());
|
||||||
|
let res = res.unwrap();
|
||||||
|
assert_eq!(res.title.unwrap(), "Hello".to_string());
|
||||||
|
assert_eq!(res.description.unwrap(), "hey there".to_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_parse_tags() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
slug = "hello-world"
|
||||||
|
tags = ["rust", "html"]"#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_ok());
|
||||||
|
let res = res.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(res.title.unwrap(), "Hello".to_string());
|
||||||
|
assert_eq!(res.slug.unwrap(), "hello-world".to_string());
|
||||||
|
assert_eq!(res.tags.unwrap(), ["rust".to_string(), "html".to_string()]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errors_with_invalid_front_matter() {
|
||||||
|
let content = r#"title = 1\n"#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errors_on_non_string_tag() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
slug = "hello-world"
|
||||||
|
tags = ["rust", 1]"#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errors_on_present_but_empty_slug() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
slug = """#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn errors_on_present_but_empty_url() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
url = """#;
|
||||||
|
let res = PageFrontMatter::parse(content);
|
||||||
|
assert!(res.is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_parse_date_yyyy_mm_dd() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2016-10-10""#;
|
||||||
|
let res = PageFrontMatter::parse(content).unwrap();
|
||||||
|
assert!(res.date().is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_parse_date_rfc3339() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2002-10-02T15:00:00Z""#;
|
||||||
|
let res = PageFrontMatter::parse(content).unwrap();
|
||||||
|
assert!(res.date().is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cannot_parse_random_date_format() {
|
||||||
|
let content = r#"
|
||||||
|
title = "Hello"
|
||||||
|
description = "hey there"
|
||||||
|
date = "2002/10/12""#;
|
||||||
|
let res = PageFrontMatter::parse(content).unwrap();
|
||||||
|
assert!(res.date().is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
93
src/front_matter/section.rs
Normal file
93
src/front_matter/section.rs
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use tera::Value;
|
||||||
|
use toml;
|
||||||
|
|
||||||
|
use errors::{Result};
|
||||||
|
use content::SortBy;
|
||||||
|
|
||||||
|
static DEFAULT_PAGINATE_PATH: &'static str = "page";
|
||||||
|
|
||||||
|
|
||||||
|
/// The front matter of every section
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct SectionFrontMatter {
|
||||||
|
/// <title> of the page
|
||||||
|
pub title: Option<String>,
|
||||||
|
/// Description in <meta> that appears when linked, e.g. on twitter
|
||||||
|
pub description: Option<String>,
|
||||||
|
/// Whether to sort by "date", "order" or "none". Defaults to `none`.
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub sort_by: Option<SortBy>,
|
||||||
|
/// Optional template, if we want to specify which template to render for that page
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub template: Option<String>,
|
||||||
|
/// How many pages to be displayed per paginated page. No pagination will happen if this isn't set
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub paginate_by: Option<usize>,
|
||||||
|
/// Path to be used by pagination: the page number will be appended after it. Defaults to `page`.
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub paginate_path: Option<String>,
|
||||||
|
/// Whether to render that section or not. Defaults to `true`.
|
||||||
|
/// Useful when the section is only there to organize things but is not meant
|
||||||
|
/// to be used directly, like a posts section in a personal site
|
||||||
|
#[serde(skip_serializing)]
|
||||||
|
pub render: Option<bool>,
|
||||||
|
/// Any extra parameter present in the front matter
|
||||||
|
pub extra: Option<HashMap<String, Value>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SectionFrontMatter {
|
||||||
|
pub fn parse(toml: &str) -> Result<SectionFrontMatter> {
|
||||||
|
let mut f: SectionFrontMatter = match toml::from_str(toml) {
|
||||||
|
Ok(d) => d,
|
||||||
|
Err(e) => bail!(e),
|
||||||
|
};
|
||||||
|
|
||||||
|
if f.paginate_path.is_none() {
|
||||||
|
f.paginate_path = Some(DEFAULT_PAGINATE_PATH.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.render.is_none() {
|
||||||
|
f.render = Some(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.sort_by.is_none() {
|
||||||
|
f.sort_by = Some(SortBy::None);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the current sorting method, defaults to `None` (== no sorting)
|
||||||
|
pub fn sort_by(&self) -> SortBy {
|
||||||
|
self.sort_by.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Only applies to section, whether it is paginated or not.
|
||||||
|
pub fn is_paginated(&self) -> bool {
|
||||||
|
match self.paginate_by {
|
||||||
|
Some(v) => v > 0,
|
||||||
|
None => false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn should_render(&self) -> bool {
|
||||||
|
self.render.unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for SectionFrontMatter {
|
||||||
|
fn default() -> SectionFrontMatter {
|
||||||
|
SectionFrontMatter {
|
||||||
|
title: None,
|
||||||
|
description: None,
|
||||||
|
sort_by: None,
|
||||||
|
template: None,
|
||||||
|
paginate_by: None,
|
||||||
|
paginate_path: Some(DEFAULT_PAGINATE_PATH.to_string()),
|
||||||
|
render: Some(true),
|
||||||
|
extra: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
17
src/lib.rs
17
src/lib.rs
|
@ -22,21 +22,16 @@ extern crate tempdir;
|
||||||
mod utils;
|
mod utils;
|
||||||
mod config;
|
mod config;
|
||||||
pub mod errors;
|
pub mod errors;
|
||||||
mod page;
|
|
||||||
mod front_matter;
|
mod front_matter;
|
||||||
|
mod content;
|
||||||
mod site;
|
mod site;
|
||||||
mod markdown;
|
mod markdown;
|
||||||
mod section;
|
// Filters, Global Fns and default instance of Tera
|
||||||
mod pagination;
|
mod templates;
|
||||||
/// Additional filters for Tera
|
|
||||||
mod filters;
|
|
||||||
/// Global fns for Tera
|
|
||||||
mod global_fns;
|
|
||||||
|
|
||||||
pub use site::{Site, GUTENBERG_TERA};
|
pub use site::{Site};
|
||||||
pub use config::{Config, get_config};
|
pub use config::{Config, get_config};
|
||||||
pub use front_matter::{FrontMatter, split_content, SortBy};
|
pub use front_matter::{PageFrontMatter, SectionFrontMatter, split_page_content, split_section_content};
|
||||||
pub use page::{Page, populate_previous_and_next_pages};
|
pub use content::{Page, Section, SortBy, sort_pages, populate_previous_and_next_pages};
|
||||||
pub use section::{Section};
|
|
||||||
pub use utils::{create_file};
|
pub use utils::{create_file};
|
||||||
pub use markdown::markdown_to_html;
|
pub use markdown::markdown_to_html;
|
||||||
|
|
|
@ -340,7 +340,7 @@ pub fn markdown_to_html(content: &str, permalinks: &HashMap<String, String>, ter
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use site::GUTENBERG_TERA;
|
use templates::GUTENBERG_TERA;
|
||||||
use tera::Tera;
|
use tera::Tera;
|
||||||
|
|
||||||
use config::Config;
|
use config::Config;
|
||||||
|
|
428
src/page.rs
428
src/page.rs
|
@ -1,428 +0,0 @@
|
||||||
/// A page, can be a blog post or a basic page
|
|
||||||
use std::collections::HashMap;
|
|
||||||
use std::fs::{read_dir};
|
|
||||||
use std::path::{Path, PathBuf};
|
|
||||||
use std::result::Result as StdResult;
|
|
||||||
|
|
||||||
|
|
||||||
use tera::{Tera, Context};
|
|
||||||
use serde::ser::{SerializeStruct, self};
|
|
||||||
use slug::slugify;
|
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
|
||||||
use config::Config;
|
|
||||||
use front_matter::{FrontMatter, SortBy, split_content};
|
|
||||||
use markdown::markdown_to_html;
|
|
||||||
use utils::{read_file, find_content_components};
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/// Looks into the current folder for the path and see if there's anything that is not a .md
|
|
||||||
/// file. Those will be copied next to the rendered .html file
|
|
||||||
fn find_related_assets(path: &Path) -> Vec<PathBuf> {
|
|
||||||
let mut assets = vec![];
|
|
||||||
|
|
||||||
for entry in read_dir(path).unwrap().filter_map(|e| e.ok()) {
|
|
||||||
let entry_path = entry.path();
|
|
||||||
if entry_path.is_file() {
|
|
||||||
match entry_path.extension() {
|
|
||||||
Some(e) => match e.to_str() {
|
|
||||||
Some("md") => continue,
|
|
||||||
_ => assets.push(entry_path.to_path_buf()),
|
|
||||||
},
|
|
||||||
None => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assets
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq)]
|
|
||||||
pub struct Page {
|
|
||||||
/// The .md path
|
|
||||||
pub file_path: PathBuf,
|
|
||||||
/// The .md path, starting from the content directory, with / slashes
|
|
||||||
pub relative_path: String,
|
|
||||||
/// The parent directory of the file. Is actually the grand parent directory
|
|
||||||
/// if it's an asset folder
|
|
||||||
pub parent_path: PathBuf,
|
|
||||||
/// The name of the .md file
|
|
||||||
pub file_name: String,
|
|
||||||
/// The directories above our .md file
|
|
||||||
/// for example a file at content/kb/solutions/blabla.md will have 2 components:
|
|
||||||
/// `kb` and `solutions`
|
|
||||||
pub components: Vec<String>,
|
|
||||||
/// The actual content of the page, in markdown
|
|
||||||
pub raw_content: String,
|
|
||||||
/// All the non-md files we found next to the .md file
|
|
||||||
pub assets: Vec<PathBuf>,
|
|
||||||
/// The HTML rendered of the page
|
|
||||||
pub content: String,
|
|
||||||
/// The front matter meta-data
|
|
||||||
pub meta: FrontMatter,
|
|
||||||
|
|
||||||
/// The slug of that page.
|
|
||||||
/// First tries to find the slug in the meta and defaults to filename otherwise
|
|
||||||
pub slug: String,
|
|
||||||
/// The URL path of the page
|
|
||||||
pub path: String,
|
|
||||||
/// The full URL for that page
|
|
||||||
pub permalink: String,
|
|
||||||
/// The summary for the article, defaults to None
|
|
||||||
/// When <!-- more --> is found in the text, will take the content up to that part
|
|
||||||
/// as summary
|
|
||||||
pub summary: Option<String>,
|
|
||||||
|
|
||||||
/// The previous page, by whatever sorting is used for the index/section
|
|
||||||
pub previous: Option<Box<Page>>,
|
|
||||||
/// The next page, by whatever sorting is used for the index/section
|
|
||||||
pub next: Option<Box<Page>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
impl Page {
|
|
||||||
pub fn new(meta: FrontMatter) -> Page {
|
|
||||||
Page {
|
|
||||||
file_path: PathBuf::new(),
|
|
||||||
relative_path: String::new(),
|
|
||||||
parent_path: PathBuf::new(),
|
|
||||||
file_name: "".to_string(),
|
|
||||||
components: vec![],
|
|
||||||
raw_content: "".to_string(),
|
|
||||||
assets: vec![],
|
|
||||||
content: "".to_string(),
|
|
||||||
slug: "".to_string(),
|
|
||||||
path: "".to_string(),
|
|
||||||
permalink: "".to_string(),
|
|
||||||
summary: None,
|
|
||||||
meta: meta,
|
|
||||||
previous: None,
|
|
||||||
next: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn has_date(&self) -> bool {
|
|
||||||
self.meta.date.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get word count and estimated reading time
|
|
||||||
pub fn get_reading_analytics(&self) -> (usize, usize) {
|
|
||||||
// Only works for latin language but good enough for a start
|
|
||||||
let word_count: usize = self.raw_content.split_whitespace().count();
|
|
||||||
|
|
||||||
// https://help.medium.com/hc/en-us/articles/214991667-Read-time
|
|
||||||
// 275 seems a bit too high though
|
|
||||||
(word_count, (word_count / 200))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parse a page given the content of the .md file
|
|
||||||
/// Files without front matter or with invalid front matter are considered
|
|
||||||
/// erroneous
|
|
||||||
pub fn parse(file_path: &Path, content: &str, config: &Config) -> Result<Page> {
|
|
||||||
// 1. separate front matter from content
|
|
||||||
let (meta, content) = split_content(file_path, content)?;
|
|
||||||
let mut page = Page::new(meta);
|
|
||||||
page.file_path = file_path.to_path_buf();
|
|
||||||
page.parent_path = page.file_path.parent().unwrap().to_path_buf();
|
|
||||||
page.raw_content = content;
|
|
||||||
|
|
||||||
let path = Path::new(file_path);
|
|
||||||
page.file_name = path.file_stem().unwrap().to_string_lossy().to_string();
|
|
||||||
|
|
||||||
page.slug = {
|
|
||||||
if let Some(ref slug) = page.meta.slug {
|
|
||||||
slug.trim().to_string()
|
|
||||||
} else {
|
|
||||||
slugify(page.file_name.clone())
|
|
||||||
}
|
|
||||||
};
|
|
||||||
page.components = find_content_components(&page.file_path);
|
|
||||||
page.relative_path = format!("{}/{}.md", page.components.join("/"), page.file_name);
|
|
||||||
|
|
||||||
// 4. Find sections
|
|
||||||
// Pages with custom urls exists outside of sections
|
|
||||||
let mut path_set = false;
|
|
||||||
if let Some(ref u) = page.meta.url {
|
|
||||||
page.path = u.trim().to_string();
|
|
||||||
path_set = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if !page.components.is_empty() {
|
|
||||||
// If we have a folder with an asset, don't consider it as a component
|
|
||||||
if page.file_name == "index" {
|
|
||||||
page.components.pop();
|
|
||||||
// also set parent_path to grandparent instead
|
|
||||||
page.parent_path = page.parent_path.parent().unwrap().to_path_buf();
|
|
||||||
}
|
|
||||||
if !path_set {
|
|
||||||
// Don't add a trailing slash to sections
|
|
||||||
page.path = format!("{}/{}", page.components.join("/"), page.slug);
|
|
||||||
}
|
|
||||||
} else if !path_set {
|
|
||||||
page.path = page.slug.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
page.permalink = config.make_permalink(&page.path);
|
|
||||||
|
|
||||||
Ok(page)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Read and parse a .md file into a Page struct
|
|
||||||
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config) -> Result<Page> {
|
|
||||||
let path = path.as_ref();
|
|
||||||
let content = read_file(path)?;
|
|
||||||
let mut page = Page::parse(path, &content, config)?;
|
|
||||||
page.assets = find_related_assets(path.parent().unwrap());
|
|
||||||
|
|
||||||
if !page.assets.is_empty() && page.file_name != "index" {
|
|
||||||
bail!("Page `{}` has assets ({:?}) but is not named index.md", path.display(), page.assets);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(page)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/// We need access to all pages url to render links relative to content
|
|
||||||
/// so that can't happen at the same time as parsing
|
|
||||||
pub fn render_markdown(&mut self, permalinks: &HashMap<String, String>, tera: &Tera, config: &Config) -> Result<()> {
|
|
||||||
self.content = markdown_to_html(&self.raw_content, permalinks, tera, config)?;
|
|
||||||
|
|
||||||
if self.raw_content.contains("<!-- more -->") {
|
|
||||||
self.summary = Some({
|
|
||||||
let summary = self.raw_content.splitn(2, "<!-- more -->").collect::<Vec<&str>>()[0];
|
|
||||||
markdown_to_html(summary, permalinks, tera, config)?
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Renders the page using the default layout, unless specified in front-matter
|
|
||||||
pub fn render_html(&self, tera: &Tera, config: &Config) -> Result<String> {
|
|
||||||
let tpl_name = match self.meta.template {
|
|
||||||
Some(ref l) => l.to_string(),
|
|
||||||
None => "page.html".to_string()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut context = Context::new();
|
|
||||||
context.add("config", config);
|
|
||||||
context.add("page", self);
|
|
||||||
context.add("current_url", &self.permalink);
|
|
||||||
context.add("current_path", &self.path);
|
|
||||||
|
|
||||||
tera.render(&tpl_name, &context)
|
|
||||||
.chain_err(|| format!("Failed to render page '{}'", self.file_path.display()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ser::Serialize for Page {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: ser::Serializer {
|
|
||||||
let mut state = serializer.serialize_struct("page", 16)?;
|
|
||||||
state.serialize_field("content", &self.content)?;
|
|
||||||
state.serialize_field("title", &self.meta.title)?;
|
|
||||||
state.serialize_field("description", &self.meta.description)?;
|
|
||||||
state.serialize_field("date", &self.meta.date)?;
|
|
||||||
state.serialize_field("slug", &self.slug)?;
|
|
||||||
state.serialize_field("path", &format!("/{}", self.path))?;
|
|
||||||
state.serialize_field("permalink", &self.permalink)?;
|
|
||||||
state.serialize_field("summary", &self.summary)?;
|
|
||||||
state.serialize_field("tags", &self.meta.tags)?;
|
|
||||||
state.serialize_field("draft", &self.meta.draft)?;
|
|
||||||
state.serialize_field("category", &self.meta.category)?;
|
|
||||||
state.serialize_field("extra", &self.meta.extra)?;
|
|
||||||
let (word_count, reading_time) = self.get_reading_analytics();
|
|
||||||
state.serialize_field("word_count", &word_count)?;
|
|
||||||
state.serialize_field("reading_time", &reading_time)?;
|
|
||||||
state.serialize_field("previous", &self.previous)?;
|
|
||||||
state.serialize_field("next", &self.next)?;
|
|
||||||
state.end()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Sort pages using the method for the given section
|
|
||||||
///
|
|
||||||
/// Any pages that doesn't have a date when the sorting method is date or order
|
|
||||||
/// when the sorting method is order will be ignored.
|
|
||||||
pub fn sort_pages(pages: Vec<Page>, sort_by: SortBy) -> (Vec<Page>, Vec<Page>) {
|
|
||||||
match sort_by {
|
|
||||||
SortBy::Date => {
|
|
||||||
let mut can_be_sorted = vec![];
|
|
||||||
let mut cannot_be_sorted = vec![];
|
|
||||||
for page in pages {
|
|
||||||
if page.meta.date.is_some() {
|
|
||||||
can_be_sorted.push(page);
|
|
||||||
} else {
|
|
||||||
cannot_be_sorted.push(page);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
can_be_sorted.sort_by(|a, b| b.meta.date().unwrap().cmp(&a.meta.date().unwrap()));
|
|
||||||
|
|
||||||
(can_be_sorted, cannot_be_sorted)
|
|
||||||
},
|
|
||||||
SortBy::Order => {
|
|
||||||
let mut can_be_sorted = vec![];
|
|
||||||
let mut cannot_be_sorted = vec![];
|
|
||||||
for page in pages {
|
|
||||||
if page.meta.order.is_some() {
|
|
||||||
can_be_sorted.push(page);
|
|
||||||
} else {
|
|
||||||
cannot_be_sorted.push(page);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
can_be_sorted.sort_by(|a, b| b.meta.order().cmp(&a.meta.order()));
|
|
||||||
|
|
||||||
(can_be_sorted, cannot_be_sorted)
|
|
||||||
},
|
|
||||||
SortBy::None => {
|
|
||||||
let mut p = vec![];
|
|
||||||
for page in pages {
|
|
||||||
p.push(page);
|
|
||||||
}
|
|
||||||
|
|
||||||
(p, vec![])
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Horribly inefficient way to set previous and next on each pages
|
|
||||||
/// So many clones
|
|
||||||
pub fn populate_previous_and_next_pages(input: &[Page]) -> Vec<Page> {
|
|
||||||
let pages = input.to_vec();
|
|
||||||
let mut res = Vec::new();
|
|
||||||
|
|
||||||
// the input is already sorted
|
|
||||||
// We might put prev/next randomly if a page is missing date/order, probably fine
|
|
||||||
for (i, page) in input.iter().enumerate() {
|
|
||||||
let mut new_page = page.clone();
|
|
||||||
|
|
||||||
if i > 0 {
|
|
||||||
let next = &pages[i - 1];
|
|
||||||
new_page.next = Some(Box::new(next.clone()));
|
|
||||||
}
|
|
||||||
|
|
||||||
if i < input.len() - 1 {
|
|
||||||
let previous = &pages[i + 1];
|
|
||||||
new_page.previous = Some(Box::new(previous.clone()));
|
|
||||||
}
|
|
||||||
res.push(new_page);
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use tempdir::TempDir;
|
|
||||||
|
|
||||||
use std::fs::File;
|
|
||||||
|
|
||||||
use front_matter::{FrontMatter, SortBy};
|
|
||||||
use super::{Page, find_related_assets, sort_pages, populate_previous_and_next_pages};
|
|
||||||
|
|
||||||
fn create_page_with_date(date: &str) -> Page {
|
|
||||||
let mut front_matter = FrontMatter::default();
|
|
||||||
front_matter.date = Some(date.to_string());
|
|
||||||
Page::new(front_matter)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_page_with_order(order: usize) -> Page {
|
|
||||||
let mut front_matter = FrontMatter::default();
|
|
||||||
front_matter.order = Some(order);
|
|
||||||
Page::new(front_matter)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_find_related_assets() {
|
|
||||||
let tmp_dir = TempDir::new("example").expect("create temp dir");
|
|
||||||
File::create(tmp_dir.path().join("index.md")).unwrap();
|
|
||||||
File::create(tmp_dir.path().join("example.js")).unwrap();
|
|
||||||
File::create(tmp_dir.path().join("graph.jpg")).unwrap();
|
|
||||||
File::create(tmp_dir.path().join("fail.png")).unwrap();
|
|
||||||
|
|
||||||
let assets = find_related_assets(tmp_dir.path());
|
|
||||||
assert_eq!(assets.len(), 3);
|
|
||||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap() != "md").count(), 3);
|
|
||||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "example.js").count(), 1);
|
|
||||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
|
||||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_sort_dates() {
|
|
||||||
let input = vec![
|
|
||||||
create_page_with_date("2018-01-01"),
|
|
||||||
create_page_with_date("2017-01-01"),
|
|
||||||
create_page_with_date("2019-01-01"),
|
|
||||||
];
|
|
||||||
let (pages, _) = sort_pages(input, SortBy::Date);
|
|
||||||
// Should be sorted by date
|
|
||||||
assert_eq!(pages[0].clone().meta.date.unwrap(), "2019-01-01");
|
|
||||||
assert_eq!(pages[1].clone().meta.date.unwrap(), "2018-01-01");
|
|
||||||
assert_eq!(pages[2].clone().meta.date.unwrap(), "2017-01-01");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_sort_order() {
|
|
||||||
let input = vec![
|
|
||||||
create_page_with_order(2),
|
|
||||||
create_page_with_order(3),
|
|
||||||
create_page_with_order(1),
|
|
||||||
];
|
|
||||||
let (pages, _) = sort_pages(input, SortBy::Order);
|
|
||||||
// Should be sorted by date
|
|
||||||
assert_eq!(pages[0].clone().meta.order.unwrap(), 3);
|
|
||||||
assert_eq!(pages[1].clone().meta.order.unwrap(), 2);
|
|
||||||
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_sort_none() {
|
|
||||||
let input = vec![
|
|
||||||
create_page_with_order(2),
|
|
||||||
create_page_with_order(3),
|
|
||||||
create_page_with_order(1),
|
|
||||||
];
|
|
||||||
let (pages, _) = sort_pages(input, SortBy::None);
|
|
||||||
// Should be sorted by date
|
|
||||||
assert_eq!(pages[0].clone().meta.order.unwrap(), 2);
|
|
||||||
assert_eq!(pages[1].clone().meta.order.unwrap(), 3);
|
|
||||||
assert_eq!(pages[2].clone().meta.order.unwrap(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_ignore_page_with_missing_field() {
|
|
||||||
let input = vec![
|
|
||||||
create_page_with_order(2),
|
|
||||||
create_page_with_order(3),
|
|
||||||
create_page_with_date("2019-01-01"),
|
|
||||||
];
|
|
||||||
let (pages, unsorted) = sort_pages(input, SortBy::Order);
|
|
||||||
assert_eq!(pages.len(), 2);
|
|
||||||
assert_eq!(unsorted.len(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_populate_previous_and_next_pages() {
|
|
||||||
let input = vec![
|
|
||||||
create_page_with_order(3),
|
|
||||||
create_page_with_order(2),
|
|
||||||
create_page_with_order(1),
|
|
||||||
];
|
|
||||||
let pages = populate_previous_and_next_pages(input.as_slice());
|
|
||||||
|
|
||||||
assert!(pages[0].clone().next.is_none());
|
|
||||||
assert!(pages[0].clone().previous.is_some());
|
|
||||||
assert_eq!(pages[0].clone().previous.unwrap().meta.order.unwrap(), 2);
|
|
||||||
|
|
||||||
assert!(pages[1].clone().next.is_some());
|
|
||||||
assert!(pages[1].clone().previous.is_some());
|
|
||||||
assert_eq!(pages[1].clone().next.unwrap().meta.order.unwrap(), 3);
|
|
||||||
assert_eq!(pages[1].clone().previous.unwrap().meta.order.unwrap(), 1);
|
|
||||||
|
|
||||||
assert!(pages[2].clone().next.is_some());
|
|
||||||
assert!(pages[2].clone().previous.is_none());
|
|
||||||
assert_eq!(pages[2].clone().next.unwrap().meta.order.unwrap(), 2);
|
|
||||||
}
|
|
||||||
}
|
|
348
src/site.rs
348
src/site.rs
|
@ -10,43 +10,9 @@ use walkdir::WalkDir;
|
||||||
|
|
||||||
use errors::{Result, ResultExt};
|
use errors::{Result, ResultExt};
|
||||||
use config::{Config, get_config};
|
use config::{Config, get_config};
|
||||||
use page::{Page, populate_previous_and_next_pages, sort_pages};
|
|
||||||
use pagination::Paginator;
|
|
||||||
use utils::{create_file, create_directory};
|
use utils::{create_file, create_directory};
|
||||||
use section::{Section};
|
use content::{Page, Section, Paginator, SortBy, populate_previous_and_next_pages, sort_pages};
|
||||||
use front_matter::{SortBy};
|
use templates::{GUTENBERG_TERA, global_fns, render_redirect_template};
|
||||||
use filters;
|
|
||||||
use global_fns;
|
|
||||||
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
pub static ref GUTENBERG_TERA: Tera = {
|
|
||||||
let mut tera = Tera::default();
|
|
||||||
tera.add_raw_templates(vec![
|
|
||||||
("rss.xml", include_str!("templates/rss.xml")),
|
|
||||||
("sitemap.xml", include_str!("templates/sitemap.xml")),
|
|
||||||
("robots.txt", include_str!("templates/robots.txt")),
|
|
||||||
("anchor-link.html", include_str!("templates/anchor-link.html")),
|
|
||||||
|
|
||||||
("shortcodes/youtube.html", include_str!("templates/shortcodes/youtube.html")),
|
|
||||||
("shortcodes/vimeo.html", include_str!("templates/shortcodes/vimeo.html")),
|
|
||||||
("shortcodes/gist.html", include_str!("templates/shortcodes/gist.html")),
|
|
||||||
|
|
||||||
("internal/alias.html", include_str!("templates/internal/alias.html")),
|
|
||||||
]).unwrap();
|
|
||||||
tera
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Renders the `internal/alias.html` template that will redirect
|
|
||||||
/// via refresh to the url given
|
|
||||||
fn render_alias(url: &str, tera: &Tera) -> Result<String> {
|
|
||||||
let mut context = Context::new();
|
|
||||||
context.add("url", &url);
|
|
||||||
|
|
||||||
tera.render("internal/alias.html", &context)
|
|
||||||
.chain_err(|| format!("Failed to render alias for '{}'", url))
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
|
@ -85,6 +51,8 @@ pub struct Site {
|
||||||
static_path: PathBuf,
|
static_path: PathBuf,
|
||||||
pub tags: HashMap<String, Vec<PathBuf>>,
|
pub tags: HashMap<String, Vec<PathBuf>>,
|
||||||
pub categories: HashMap<String, Vec<PathBuf>>,
|
pub categories: HashMap<String, Vec<PathBuf>>,
|
||||||
|
/// A map of all .md files (section and pages) and their permalink
|
||||||
|
/// We need that if there are relative links in the content that need to be resolved
|
||||||
pub permalinks: HashMap<String, String>,
|
pub permalinks: HashMap<String, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,9 +65,6 @@ impl Site {
|
||||||
let tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml");
|
let tpl_glob = format!("{}/{}", path.to_string_lossy().replace("\\", "/"), "templates/**/*.*ml");
|
||||||
let mut tera = Tera::new(&tpl_glob).chain_err(|| "Error parsing templates")?;
|
let mut tera = Tera::new(&tpl_glob).chain_err(|| "Error parsing templates")?;
|
||||||
tera.extend(&GUTENBERG_TERA)?;
|
tera.extend(&GUTENBERG_TERA)?;
|
||||||
tera.register_filter("markdown", filters::markdown);
|
|
||||||
tera.register_filter("base64_encode", filters::base64_encode);
|
|
||||||
tera.register_filter("base64_decode", filters::base64_decode);
|
|
||||||
|
|
||||||
let site = Site {
|
let site = Site {
|
||||||
base_path: path.to_path_buf(),
|
base_path: path.to_path_buf(),
|
||||||
|
@ -124,6 +89,7 @@ impl Site {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the path of all ignored pages in the site
|
/// Gets the path of all ignored pages in the site
|
||||||
|
/// Used for reporting them in the CLI
|
||||||
pub fn get_ignored_pages(&self) -> Vec<PathBuf> {
|
pub fn get_ignored_pages(&self) -> Vec<PathBuf> {
|
||||||
self.sections
|
self.sections
|
||||||
.values()
|
.values()
|
||||||
|
@ -149,6 +115,17 @@ impl Site {
|
||||||
orphans
|
orphans
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Finds the section that contains the page given if there is one
|
||||||
|
pub fn find_parent_section(&self, page: &Page) -> Option<&Section> {
|
||||||
|
for section in self.sections.values() {
|
||||||
|
if section.is_child_page(page) {
|
||||||
|
return Some(section)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
/// Used by tests to change the output path to a tmp dir
|
/// Used by tests to change the output path to a tmp dir
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn set_output_path<P: AsRef<Path>>(&mut self, path: P) {
|
pub fn set_output_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||||
|
@ -161,34 +138,32 @@ impl Site {
|
||||||
let base_path = self.base_path.to_string_lossy().replace("\\", "/");
|
let base_path = self.base_path.to_string_lossy().replace("\\", "/");
|
||||||
let content_glob = format!("{}/{}", base_path, "content/**/*.md");
|
let content_glob = format!("{}/{}", base_path, "content/**/*.md");
|
||||||
|
|
||||||
// TODO: make that parallel, that's the main bottleneck
|
|
||||||
// `add_section` and `add_page` can't be used in the parallel version afaik
|
|
||||||
for entry in glob(&content_glob).unwrap().filter_map(|e| e.ok()) {
|
for entry in glob(&content_glob).unwrap().filter_map(|e| e.ok()) {
|
||||||
let path = entry.as_path();
|
let path = entry.as_path();
|
||||||
if path.file_name().unwrap() == "_index.md" {
|
if path.file_name().unwrap() == "_index.md" {
|
||||||
self.add_section(path)?;
|
self.add_section(path, false)?;
|
||||||
} else {
|
} else {
|
||||||
self.add_page(path)?;
|
self.add_page(path, false)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// Insert a default index section so we don't need to create a _index.md to render
|
||||||
// A map of all .md files (section and pages) and their permalink
|
// the index page
|
||||||
// We need that if there are relative links in the content that need to be resolved
|
let index_path = self.base_path.join("content").join("_index.md");
|
||||||
let mut permalinks = HashMap::new();
|
if !self.sections.contains_key(&index_path) {
|
||||||
|
let mut index_section = Section::default();
|
||||||
for page in self.pages.values() {
|
index_section.permalink = self.config.make_permalink("");
|
||||||
permalinks.insert(page.relative_path.clone(), page.permalink.clone());
|
self.sections.insert(index_path, index_section);
|
||||||
}
|
|
||||||
|
|
||||||
for section in self.sections.values() {
|
|
||||||
permalinks.insert(section.relative_path.clone(), section.permalink.clone());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: make that parallel
|
||||||
for page in self.pages.values_mut() {
|
for page in self.pages.values_mut() {
|
||||||
page.render_markdown(&permalinks, &self.tera, &self.config)?;
|
page.render_markdown(&self.permalinks, &self.tera, &self.config)?;
|
||||||
|
}
|
||||||
|
// TODO: make that parallel
|
||||||
|
for section in self.sections.values_mut() {
|
||||||
|
section.render_markdown(&self.permalinks, &self.tera, &self.config)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.permalinks = permalinks;
|
|
||||||
self.populate_sections();
|
self.populate_sections();
|
||||||
self.populate_tags_and_categories();
|
self.populate_tags_and_categories();
|
||||||
|
|
||||||
|
@ -197,59 +172,82 @@ impl Site {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Simple wrapper fn to avoid repeating that code in several places
|
/// Add a page to the site
|
||||||
fn add_page(&mut self, path: &Path) -> Result<()> {
|
/// The `render` parameter is used in the serve command, when rebuilding a page.
|
||||||
|
/// If `true`, it will also render the markdown for that page
|
||||||
|
/// Returns the previous page struct if there was one
|
||||||
|
pub fn add_page(&mut self, path: &Path, render: bool) -> Result<Option<Page>> {
|
||||||
let page = Page::from_file(&path, &self.config)?;
|
let page = Page::from_file(&path, &self.config)?;
|
||||||
self.pages.insert(page.file_path.clone(), page);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Simple wrapper fn to avoid repeating that code in several places
|
|
||||||
fn add_section(&mut self, path: &Path) -> Result<()> {
|
|
||||||
let section = Section::from_file(path, &self.config)?;
|
|
||||||
self.sections.insert(section.parent_path.clone(), section);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Called in serve, add a page again updating permalinks and its content
|
|
||||||
/// The bool in the result is whether the front matter has been updated or not
|
|
||||||
fn add_page_and_render(&mut self, path: &Path) -> Result<(bool, Page)> {
|
|
||||||
let existing_page = self.pages.get(path).expect("Page was supposed to exist in add_page_and_render").clone();
|
|
||||||
self.add_page(path)?;
|
|
||||||
let mut page = self.pages.get_mut(path).unwrap();
|
|
||||||
self.permalinks.insert(page.relative_path.clone(), page.permalink.clone());
|
self.permalinks.insert(page.relative_path.clone(), page.permalink.clone());
|
||||||
page.render_markdown(&self.permalinks, &self.tera, &self.config)?;
|
let prev = self.pages.insert(page.file_path.clone(), page);
|
||||||
|
|
||||||
Ok((existing_page.meta != page.meta, page.clone()))
|
if render {
|
||||||
|
let mut page = self.pages.get_mut(path).unwrap();
|
||||||
|
page.render_markdown(&self.permalinks, &self.tera, &self.config)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(prev)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add a section to the site
|
||||||
|
/// The `render` parameter is used in the serve command, when rebuilding a page.
|
||||||
|
/// If `true`, it will also render the markdown for that page
|
||||||
|
/// Returns the previous page struct if there was one
|
||||||
|
pub fn add_section(&mut self, path: &Path, render: bool) -> Result<Option<Section>> {
|
||||||
|
let section = Section::from_file(path, &self.config)?;
|
||||||
|
self.permalinks.insert(section.relative_path.clone(), section.permalink.clone());
|
||||||
|
let prev = self.sections.insert(section.file_path.clone(), section);
|
||||||
|
|
||||||
|
if render {
|
||||||
|
let mut section = self.sections.get_mut(path).unwrap();
|
||||||
|
section.render_markdown(&self.permalinks, &self.tera, &self.config)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(prev)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find out the direct subsections of each subsection if there are some
|
/// Find out the direct subsections of each subsection if there are some
|
||||||
/// as well as the pages for each section
|
/// as well as the pages for each section
|
||||||
fn populate_sections(&mut self) {
|
pub fn populate_sections(&mut self) {
|
||||||
|
let mut grandparent_paths = HashMap::new();
|
||||||
|
for section in self.sections.values_mut() {
|
||||||
|
if let Some(grand_parent) = section.parent_path.parent() {
|
||||||
|
grandparent_paths.entry(grand_parent.to_path_buf()).or_insert_with(|| vec![]).push(section.clone());
|
||||||
|
}
|
||||||
|
// Make sure the pages of a section are empty since we can call that many times on `serve`
|
||||||
|
section.pages = vec![];
|
||||||
|
section.ignored_pages = vec![];
|
||||||
|
}
|
||||||
|
|
||||||
for page in self.pages.values() {
|
for page in self.pages.values() {
|
||||||
if self.sections.contains_key(&page.parent_path) {
|
if self.sections.contains_key(&page.parent_path.join("_index.md")) {
|
||||||
self.sections.get_mut(&page.parent_path).unwrap().pages.push(page.clone());
|
self.sections.get_mut(&page.parent_path.join("_index.md")).unwrap().pages.push(page.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut grandparent_paths = HashMap::new();
|
for section in self.sections.values_mut() {
|
||||||
for section in self.sections.values() {
|
match grandparent_paths.get(§ion.parent_path) {
|
||||||
let grand_parent = section.parent_path.parent().unwrap().to_path_buf();
|
|
||||||
grandparent_paths.entry(grand_parent).or_insert_with(|| vec![]).push(section.clone());
|
|
||||||
}
|
|
||||||
|
|
||||||
for (parent_path, section) in &mut self.sections {
|
|
||||||
// TODO: avoid this clone
|
|
||||||
let (mut sorted_pages, cannot_be_sorted_pages) = sort_pages(section.pages.clone(), section.meta.sort_by());
|
|
||||||
sorted_pages = populate_previous_and_next_pages(&sorted_pages);
|
|
||||||
section.pages = sorted_pages;
|
|
||||||
section.ignored_pages = cannot_be_sorted_pages;
|
|
||||||
|
|
||||||
match grandparent_paths.get(parent_path) {
|
|
||||||
Some(paths) => section.subsections.extend(paths.clone()),
|
Some(paths) => section.subsections.extend(paths.clone()),
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.sort_sections_pages(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sorts the pages of the section at the given path
|
||||||
|
/// By default will sort all sections but can be made to only sort a single one by providing a path
|
||||||
|
pub fn sort_sections_pages(&mut self, only: Option<&Path>) {
|
||||||
|
for (path, section) in &mut self.sections {
|
||||||
|
if let Some(p) = only {
|
||||||
|
if p != path {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let (sorted_pages, cannot_be_sorted_pages) = sort_pages(section.pages.clone(), section.meta.sort_by());
|
||||||
|
section.pages = populate_previous_and_next_pages(&sorted_pages);
|
||||||
|
section.ignored_pages = cannot_be_sorted_pages;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Separated from `parse` for easier testing
|
/// Separated from `parse` for easier testing
|
||||||
|
@ -285,7 +283,7 @@ impl Site {
|
||||||
html
|
html
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ensure_public_directory_exists(&self) -> Result<()> {
|
fn ensure_public_directory_exists(&self) -> Result<()> {
|
||||||
let public = self.output_path.clone();
|
let public = self.output_path.clone();
|
||||||
if !public.exists() {
|
if !public.exists() {
|
||||||
create_directory(&public)?;
|
create_directory(&public)?;
|
||||||
|
@ -332,57 +330,6 @@ impl Site {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rebuild_after_content_change(&mut self, path: &Path) -> Result<()> {
|
|
||||||
let is_section = path.ends_with("_index.md");
|
|
||||||
|
|
||||||
if path.exists() {
|
|
||||||
// file exists, either a new one or updating content
|
|
||||||
if is_section {
|
|
||||||
self.add_section(path)?;
|
|
||||||
} else {
|
|
||||||
// probably just an update so just re-parse that page
|
|
||||||
let (frontmatter_changed, page) = self.add_page_and_render(path)?;
|
|
||||||
// TODO: can probably be smarter and check what changed
|
|
||||||
if frontmatter_changed {
|
|
||||||
self.populate_sections();
|
|
||||||
self.populate_tags_and_categories();
|
|
||||||
self.build()?;
|
|
||||||
} else {
|
|
||||||
self.render_page(&page)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// File doesn't exist -> a deletion so we remove it from everything
|
|
||||||
let relative_path = if is_section {
|
|
||||||
self.sections[path].relative_path.clone()
|
|
||||||
} else {
|
|
||||||
self.pages[path].relative_path.clone()
|
|
||||||
};
|
|
||||||
self.permalinks.remove(&relative_path);
|
|
||||||
|
|
||||||
if is_section {
|
|
||||||
self.sections.remove(path);
|
|
||||||
} else {
|
|
||||||
self.pages.remove(path);
|
|
||||||
}
|
|
||||||
// TODO: probably no need to do that, we should be able to only re-render a page or a section.
|
|
||||||
self.populate_sections();
|
|
||||||
self.populate_tags_and_categories();
|
|
||||||
self.build()?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn rebuild_after_template_change(&mut self, path: &Path) -> Result<()> {
|
|
||||||
self.tera.full_reload()?;
|
|
||||||
match path.file_name().unwrap().to_str().unwrap() {
|
|
||||||
"sitemap.xml" => self.render_sitemap(),
|
|
||||||
"rss.xml" => self.render_rss_feed(),
|
|
||||||
_ => self.build() // TODO: change that
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Renders a single content page
|
/// Renders a single content page
|
||||||
pub fn render_page(&self, page: &Page) -> Result<()> {
|
pub fn render_page(&self, page: &Page) -> Result<()> {
|
||||||
self.ensure_public_directory_exists()?;
|
self.ensure_public_directory_exists()?;
|
||||||
|
@ -424,18 +371,16 @@ impl Site {
|
||||||
self.render_rss_feed()?;
|
self.render_rss_feed()?;
|
||||||
}
|
}
|
||||||
self.render_robots()?;
|
self.render_robots()?;
|
||||||
if self.config.generate_categories_pages.unwrap() {
|
// `render_categories` and `render_tags` will check whether the config allows
|
||||||
self.render_categories_and_tags(RenderList::Categories)?;
|
// them to render or not
|
||||||
}
|
self.render_categories()?;
|
||||||
if self.config.generate_tags_pages.unwrap() {
|
self.render_tags()?;
|
||||||
self.render_categories_and_tags(RenderList::Tags)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.copy_static_directory()
|
self.copy_static_directory()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Renders robots.txt
|
/// Renders robots.txt
|
||||||
fn render_robots(&self) -> Result<()> {
|
pub fn render_robots(&self) -> Result<()> {
|
||||||
self.ensure_public_directory_exists()?;
|
self.ensure_public_directory_exists()?;
|
||||||
create_file(
|
create_file(
|
||||||
self.output_path.join("robots.txt"),
|
self.output_path.join("robots.txt"),
|
||||||
|
@ -443,8 +388,27 @@ impl Site {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Renders all categories if the config allows it
|
||||||
|
pub fn render_categories(&self) -> Result<()> {
|
||||||
|
if self.config.generate_categories_pages.unwrap() {
|
||||||
|
self.render_categories_and_tags(RenderList::Categories)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Renders all tags if the config allows it
|
||||||
|
pub fn render_tags(&self) -> Result<()> {
|
||||||
|
if self.config.generate_tags_pages.unwrap() {
|
||||||
|
self.render_categories_and_tags(RenderList::Tags)
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Render the /{categories, list} pages and each individual category/tag page
|
/// Render the /{categories, list} pages and each individual category/tag page
|
||||||
/// They are the same thing fundamentally, a list of pages with something in common
|
/// They are the same thing fundamentally, a list of pages with something in common
|
||||||
|
/// TODO: revisit this function, lots of things have changed since then
|
||||||
fn render_categories_and_tags(&self, kind: RenderList) -> Result<()> {
|
fn render_categories_and_tags(&self, kind: RenderList) -> Result<()> {
|
||||||
let items = match kind {
|
let items = match kind {
|
||||||
RenderList::Categories => &self.categories,
|
RenderList::Categories => &self.categories,
|
||||||
|
@ -516,7 +480,8 @@ impl Site {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_sitemap(&self) -> Result<()> {
|
/// What it says on the tin
|
||||||
|
pub fn render_sitemap(&self) -> Result<()> {
|
||||||
self.ensure_public_directory_exists()?;
|
self.ensure_public_directory_exists()?;
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.add("pages", &self.pages.values().collect::<Vec<&Page>>());
|
context.add("pages", &self.pages.values().collect::<Vec<&Page>>());
|
||||||
|
@ -551,7 +516,7 @@ impl Site {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_rss_feed(&self) -> Result<()> {
|
pub fn render_rss_feed(&self) -> Result<()> {
|
||||||
self.ensure_public_directory_exists()?;
|
self.ensure_public_directory_exists()?;
|
||||||
|
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
|
@ -584,49 +549,67 @@ impl Site {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_sections(&self) -> Result<()> {
|
/// Create a hashmap of paths to section
|
||||||
self.ensure_public_directory_exists()?;
|
/// For example `content/posts/_index.md` key will be `posts`
|
||||||
let public = self.output_path.clone();
|
fn get_sections_map(&self) -> HashMap<String, Section> {
|
||||||
let sections: HashMap<String, Section> = self.sections
|
self.sections
|
||||||
.values()
|
.values()
|
||||||
.map(|s| (s.components.join("/"), s.clone()))
|
.map(|s| (s.components.join("/"), s.clone()))
|
||||||
.collect();
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
for section in self.sections.values() {
|
/// Renders a single section
|
||||||
let mut output_path = public.to_path_buf();
|
pub fn render_section(&self, section: &Section, render_pages: bool) -> Result<()> {
|
||||||
for component in §ion.components {
|
self.ensure_public_directory_exists()?;
|
||||||
output_path.push(component);
|
let public = self.output_path.clone();
|
||||||
|
|
||||||
if !output_path.exists() {
|
let mut output_path = public.to_path_buf();
|
||||||
create_directory(&output_path)?;
|
for component in §ion.components {
|
||||||
}
|
output_path.push(component);
|
||||||
|
|
||||||
|
if !output_path.exists() {
|
||||||
|
create_directory(&output_path)?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if render_pages {
|
||||||
for page in §ion.pages {
|
for page in §ion.pages {
|
||||||
self.render_page(page)?;
|
self.render_page(page)?;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !section.meta.should_render() {
|
if !section.meta.should_render() {
|
||||||
continue;
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
if section.meta.is_paginated() {
|
if section.meta.is_paginated() {
|
||||||
self.render_paginated(&output_path, section)?;
|
self.render_paginated(&output_path, section)?;
|
||||||
} else {
|
} else {
|
||||||
let output = section.render_html(
|
let output = section.render_html(
|
||||||
§ions,
|
if section.is_index() { self.get_sections_map() } else { HashMap::new() },
|
||||||
&self.tera,
|
&self.tera,
|
||||||
&self.config,
|
&self.config,
|
||||||
)?;
|
)?;
|
||||||
create_file(output_path.join("index.html"), &self.inject_livereload(output))?;
|
create_file(output_path.join("index.html"), &self.inject_livereload(output))?;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn render_index(&self) -> Result<()> {
|
||||||
|
self.render_section(&self.sections[&self.base_path.join("content").join("_index.md")], false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Renders all sections
|
||||||
|
pub fn render_sections(&self) -> Result<()> {
|
||||||
|
for section in self.sections.values() {
|
||||||
|
self.render_section(section, true)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Renders all pages that do not belong to any sections
|
/// Renders all pages that do not belong to any sections
|
||||||
fn render_orphan_pages(&self) -> Result<()> {
|
pub fn render_orphan_pages(&self) -> Result<()> {
|
||||||
self.ensure_public_directory_exists()?;
|
self.ensure_public_directory_exists()?;
|
||||||
|
|
||||||
for page in self.get_all_orphan_pages() {
|
for page in self.get_all_orphan_pages() {
|
||||||
|
@ -646,7 +629,6 @@ impl Site {
|
||||||
};
|
};
|
||||||
|
|
||||||
let paginator = Paginator::new(§ion.pages, section);
|
let paginator = Paginator::new(§ion.pages, section);
|
||||||
|
|
||||||
for (i, pager) in paginator.pagers.iter().enumerate() {
|
for (i, pager) in paginator.pagers.iter().enumerate() {
|
||||||
let folder_path = output_path.join(&paginate_path);
|
let folder_path = output_path.join(&paginate_path);
|
||||||
let page_path = folder_path.join(&format!("{}", i + 1));
|
let page_path = folder_path.join(&format!("{}", i + 1));
|
||||||
|
@ -657,7 +639,7 @@ impl Site {
|
||||||
create_file(page_path.join("index.html"), &self.inject_livereload(output))?;
|
create_file(page_path.join("index.html"), &self.inject_livereload(output))?;
|
||||||
} else {
|
} else {
|
||||||
create_file(output_path.join("index.html"), &self.inject_livereload(output))?;
|
create_file(output_path.join("index.html"), &self.inject_livereload(output))?;
|
||||||
create_file(page_path.join("index.html"), &render_alias(§ion.permalink, &self.tera)?)?;
|
create_file(page_path.join("index.html"), &render_redirect_template(§ion.permalink, &self.tera)?)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ use std::path::{PathBuf};
|
||||||
|
|
||||||
use tera::{GlobalFn, Value, from_value, to_value, Result};
|
use tera::{GlobalFn, Value, from_value, to_value, Result};
|
||||||
|
|
||||||
use page::Page;
|
use content::Page;
|
||||||
|
|
||||||
|
|
||||||
pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn {
|
pub fn make_get_page(all_pages: &HashMap<PathBuf, Page>) -> GlobalFn {
|
39
src/templates/mod.rs
Normal file
39
src/templates/mod.rs
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
use tera::{Tera, Context};
|
||||||
|
|
||||||
|
use errors::{Result, ResultExt};
|
||||||
|
|
||||||
|
pub mod filters;
|
||||||
|
pub mod global_fns;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
pub static ref GUTENBERG_TERA: Tera = {
|
||||||
|
let mut tera = Tera::default();
|
||||||
|
tera.add_raw_templates(vec![
|
||||||
|
("rss.xml", include_str!("builtins/rss.xml")),
|
||||||
|
("sitemap.xml", include_str!("builtins/sitemap.xml")),
|
||||||
|
("robots.txt", include_str!("builtins/robots.txt")),
|
||||||
|
("anchor-link.html", include_str!("builtins/anchor-link.html")),
|
||||||
|
|
||||||
|
("shortcodes/youtube.html", include_str!("builtins/shortcodes/youtube.html")),
|
||||||
|
("shortcodes/vimeo.html", include_str!("builtins/shortcodes/vimeo.html")),
|
||||||
|
("shortcodes/gist.html", include_str!("builtins/shortcodes/gist.html")),
|
||||||
|
|
||||||
|
("internal/alias.html", include_str!("builtins/internal/alias.html")),
|
||||||
|
]).unwrap();
|
||||||
|
tera.register_filter("markdown", filters::markdown);
|
||||||
|
tera.register_filter("base64_encode", filters::base64_encode);
|
||||||
|
tera.register_filter("base64_decode", filters::base64_decode);
|
||||||
|
tera
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/// Renders the `internal/alias.html` template that will redirect
|
||||||
|
/// via refresh to the url given
|
||||||
|
pub fn render_redirect_template(url: &str, tera: &Tera) -> Result<String> {
|
||||||
|
let mut context = Context::new();
|
||||||
|
context.add("url", &url);
|
||||||
|
|
||||||
|
tera.render("internal/alias.html", &context)
|
||||||
|
.chain_err(|| format!("Failed to render alias for '{}'", url))
|
||||||
|
}
|
|
@ -21,7 +21,6 @@ pub fn create_directory<P: AsRef<Path>>(path: P) -> Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Return the content of a file, with error handling added
|
/// Return the content of a file, with error handling added
|
||||||
pub fn read_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
pub fn read_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
|
@ -35,7 +34,7 @@ pub fn read_file<P: AsRef<Path>>(path: P) -> Result<String> {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Takes a full path to a .md and returns only the components after the `content` directory
|
/// Takes a full path to a .md and returns only the components after the first `content` directory
|
||||||
/// Will not return the filename as last component
|
/// Will not return the filename as last component
|
||||||
pub fn find_content_components<P: AsRef<Path>>(path: P) -> Vec<String> {
|
pub fn find_content_components<P: AsRef<Path>>(path: P) -> Vec<String> {
|
||||||
let path = path.as_ref();
|
let path = path.as_ref();
|
||||||
|
|
180
sublime_syntaxes/Jinj2.sublime-syntax
Normal file
180
sublime_syntaxes/Jinj2.sublime-syntax
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
%YAML 1.2
|
||||||
|
---
|
||||||
|
# http://www.sublimetext.com/docs/3/syntax.html
|
||||||
|
name: Jinja2
|
||||||
|
file_extensions:
|
||||||
|
- j2
|
||||||
|
- jinja2
|
||||||
|
scope: source.jinja2
|
||||||
|
contexts:
|
||||||
|
main:
|
||||||
|
- match: '({%)\s*(raw)\s*(%})'
|
||||||
|
captures:
|
||||||
|
1: entity.other.jinja2.delimiter.tag
|
||||||
|
2: keyword.control.jinja2
|
||||||
|
3: entity.other.jinja2.delimiter.tag
|
||||||
|
push:
|
||||||
|
- meta_scope: comment.block.jinja2.raw
|
||||||
|
- match: '({%)\s*(endraw)\s*(%})'
|
||||||
|
captures:
|
||||||
|
1: entity.other.jinja2.delimiter.tag
|
||||||
|
2: keyword.control.jinja2
|
||||||
|
3: entity.other.jinja2.delimiter.tag
|
||||||
|
pop: true
|
||||||
|
- match: "{#-?"
|
||||||
|
captures:
|
||||||
|
0: entity.other.jinja2.delimiter.comment
|
||||||
|
push:
|
||||||
|
- meta_scope: comment.block.jinja2
|
||||||
|
- match: "-?#}"
|
||||||
|
captures:
|
||||||
|
0: entity.other.jinja2.delimiter.comment
|
||||||
|
pop: true
|
||||||
|
- match: "{{-?"
|
||||||
|
captures:
|
||||||
|
0: entity.other.jinja2.delimiter.variable
|
||||||
|
push:
|
||||||
|
- meta_scope: meta.scope.jinja2.variable
|
||||||
|
- match: "-?}}"
|
||||||
|
captures:
|
||||||
|
0: entity.other.jinja2.delimiter.variable
|
||||||
|
pop: true
|
||||||
|
- include: expression
|
||||||
|
- match: "{%-?"
|
||||||
|
captures:
|
||||||
|
0: entity.other.jinja2.delimiter.tag
|
||||||
|
push:
|
||||||
|
- meta_scope: meta.scope.jinja2.tag
|
||||||
|
- match: "-?%}"
|
||||||
|
captures:
|
||||||
|
0: entity.other.jinja2.delimiter.tag
|
||||||
|
pop: true
|
||||||
|
- include: expression
|
||||||
|
escaped_char:
|
||||||
|
- match: '\\x[0-9A-F]{2}'
|
||||||
|
scope: constant.character.escape.hex.jinja2
|
||||||
|
escaped_unicode_char:
|
||||||
|
- match: '(\\U[0-9A-Fa-f]{8})|(\\u[0-9A-Fa-f]{4})|(\\N\{[a-zA-Z ]+\})'
|
||||||
|
captures:
|
||||||
|
1: constant.character.escape.unicode.16-bit-hex.jinja2
|
||||||
|
2: constant.character.escape.unicode.32-bit-hex.jinja2
|
||||||
|
3: constant.character.escape.unicode.name.jinja2
|
||||||
|
expression:
|
||||||
|
- match: '\s*\b(macro)\s+([a-zA-Z_][a-zA-Z0-9_]*)\b'
|
||||||
|
captures:
|
||||||
|
1: keyword.control.jinja2
|
||||||
|
2: variable.other.jinja2.macro
|
||||||
|
- match: '\s*\b(block)\s+([a-zA-Z_][a-zA-Z0-9_]*)\b'
|
||||||
|
captures:
|
||||||
|
1: keyword.control.jinja2
|
||||||
|
2: variable.other.jinja2.block
|
||||||
|
- match: '\s*\b(filter)\s+([a-zA-Z_][a-zA-Z0-9_]*)\b'
|
||||||
|
captures:
|
||||||
|
1: keyword.control.jinja2
|
||||||
|
2: variable.other.jinja2.filter
|
||||||
|
- match: '\s*\b(is)\s+([a-zA-Z_][a-zA-Z0-9_]*)\b'
|
||||||
|
captures:
|
||||||
|
1: keyword.control.jinja2
|
||||||
|
2: variable.other.jinja2.test
|
||||||
|
- match: '(?<=\{\%-|\{\%)\s*\b([a-zA-Z_][a-zA-Z0-9_]*)\b(?!\s*[,=])'
|
||||||
|
captures:
|
||||||
|
1: keyword.control.jinja2
|
||||||
|
- match: \b(and|else|if|in|import|not|or|recursive|with(out)?\s+context)\b
|
||||||
|
scope: keyword.control.jinja2
|
||||||
|
- match: '\b([Tt]rue|[Ff]alse|[Nn]one)\b'
|
||||||
|
scope: constant.language.jinja2
|
||||||
|
- match: \b(loop|super|self|varargs|kwargs)\b
|
||||||
|
scope: variable.language.jinja2
|
||||||
|
- match: "[a-zA-Z_][a-zA-Z0-9_]*"
|
||||||
|
scope: variable.other.jinja2
|
||||||
|
- match: (\+|\-|\*\*|\*|//|/|%)
|
||||||
|
scope: keyword.operator.arithmetic.jinja2
|
||||||
|
- match: '(\|)([a-zA-Z_][a-zA-Z0-9_]*)'
|
||||||
|
captures:
|
||||||
|
1: punctuation.other.jinja2
|
||||||
|
2: variable.other.jinja2.filter
|
||||||
|
- match: '(\.)([a-zA-Z_][a-zA-Z0-9_]*)'
|
||||||
|
captures:
|
||||||
|
1: punctuation.other.jinja2
|
||||||
|
2: variable.other.jinja2.attribute
|
||||||
|
- match: '\['
|
||||||
|
captures:
|
||||||
|
0: punctuation.other.jinja2
|
||||||
|
push:
|
||||||
|
- match: '\]'
|
||||||
|
captures:
|
||||||
|
0: punctuation.other.jinja2
|
||||||
|
pop: true
|
||||||
|
- include: expression
|
||||||
|
- match: \(
|
||||||
|
captures:
|
||||||
|
0: punctuation.other.jinja2
|
||||||
|
push:
|
||||||
|
- match: \)
|
||||||
|
captures:
|
||||||
|
0: punctuation.other.jinja2
|
||||||
|
pop: true
|
||||||
|
- include: expression
|
||||||
|
- match: '\{'
|
||||||
|
captures:
|
||||||
|
0: punctuation.other.jinja2
|
||||||
|
push:
|
||||||
|
- match: '\}'
|
||||||
|
captures:
|
||||||
|
0: punctuation.other.jinja2
|
||||||
|
pop: true
|
||||||
|
- include: expression
|
||||||
|
- match: (\.|:|\||,)
|
||||||
|
scope: punctuation.other.jinja2
|
||||||
|
- match: (==|<=|=>|<|>|!=)
|
||||||
|
scope: keyword.operator.comparison.jinja2
|
||||||
|
- match: "="
|
||||||
|
scope: keyword.operator.assignment.jinja2
|
||||||
|
- match: '"'
|
||||||
|
captures:
|
||||||
|
0: punctuation.definition.string.begin.jinja2
|
||||||
|
push:
|
||||||
|
- meta_scope: string.quoted.double.jinja2
|
||||||
|
- match: '"'
|
||||||
|
captures:
|
||||||
|
0: punctuation.definition.string.end.jinja2
|
||||||
|
pop: true
|
||||||
|
- include: string
|
||||||
|
- match: "'"
|
||||||
|
captures:
|
||||||
|
0: punctuation.definition.string.begin.jinja2
|
||||||
|
push:
|
||||||
|
- meta_scope: string.quoted.single.jinja2
|
||||||
|
- match: "'"
|
||||||
|
captures:
|
||||||
|
0: punctuation.definition.string.end.jinja2
|
||||||
|
pop: true
|
||||||
|
- include: string
|
||||||
|
- match: "@/"
|
||||||
|
captures:
|
||||||
|
0: punctuation.definition.regexp.begin.jinja2
|
||||||
|
push:
|
||||||
|
- meta_scope: string.regexp.jinja2
|
||||||
|
- match: /
|
||||||
|
captures:
|
||||||
|
0: punctuation.definition.regexp.end.jinja2
|
||||||
|
pop: true
|
||||||
|
- include: simple_escapes
|
||||||
|
simple_escapes:
|
||||||
|
- match: (\\\n)|(\\\\)|(\\\")|(\\')|(\\a)|(\\b)|(\\f)|(\\n)|(\\r)|(\\t)|(\\v)
|
||||||
|
captures:
|
||||||
|
1: constant.character.escape.newline.jinja2
|
||||||
|
2: constant.character.escape.backlash.jinja2
|
||||||
|
3: constant.character.escape.double-quote.jinja2
|
||||||
|
4: constant.character.escape.single-quote.jinja2
|
||||||
|
5: constant.character.escape.bell.jinja2
|
||||||
|
6: constant.character.escape.backspace.jinja2
|
||||||
|
7: constant.character.escape.formfeed.jinja2
|
||||||
|
8: constant.character.escape.linefeed.jinja2
|
||||||
|
9: constant.character.escape.return.jinja2
|
||||||
|
10: constant.character.escape.tab.jinja2
|
||||||
|
11: constant.character.escape.vertical-tab.jinja2
|
||||||
|
string:
|
||||||
|
- include: simple_escapes
|
||||||
|
- include: escaped_char
|
||||||
|
- include: escaped_unicode_char
|
Binary file not shown.
Binary file not shown.
4
test_site/content/paginated/_index.md
Normal file
4
test_site/content/paginated/_index.md
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
+++
|
||||||
|
paginate_by = 10
|
||||||
|
template = "section_paginated.html"
|
||||||
|
+++
|
|
@ -1,4 +1,5 @@
|
||||||
+++
|
+++
|
||||||
title = "Posts"
|
title = "Posts"
|
||||||
description = ""
|
paginate_by = 2
|
||||||
|
template = "section_paginated.html"
|
||||||
+++
|
+++
|
||||||
|
|
|
@ -1,236 +0,0 @@
|
||||||
extern crate gutenberg;
|
|
||||||
extern crate tera;
|
|
||||||
|
|
||||||
use std::path::Path;
|
|
||||||
|
|
||||||
use gutenberg::{FrontMatter, split_content, SortBy};
|
|
||||||
use tera::to_value;
|
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_parse_a_valid_front_matter() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there""#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
println!("{:?}", res);
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let res = res.unwrap();
|
|
||||||
assert_eq!(res.title.unwrap(), "Hello".to_string());
|
|
||||||
assert_eq!(res.description.unwrap(), "hey there".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_parse_tags() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
slug = "hello-world"
|
|
||||||
tags = ["rust", "html"]"#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let res = res.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(res.title.unwrap(), "Hello".to_string());
|
|
||||||
assert_eq!(res.slug.unwrap(), "hello-world".to_string());
|
|
||||||
assert_eq!(res.tags.unwrap(), ["rust".to_string(), "html".to_string()]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_parse_extra_attributes_in_frontmatter() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
slug = "hello-world"
|
|
||||||
|
|
||||||
[extra]
|
|
||||||
language = "en"
|
|
||||||
authors = ["Bob", "Alice"]"#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let res = res.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(res.title.unwrap(), "Hello".to_string());
|
|
||||||
assert_eq!(res.slug.unwrap(), "hello-world".to_string());
|
|
||||||
let extra = res.extra.unwrap();
|
|
||||||
assert_eq!(extra["language"], to_value("en").unwrap());
|
|
||||||
assert_eq!(
|
|
||||||
extra["authors"],
|
|
||||||
to_value(["Bob".to_string(), "Alice".to_string()]).unwrap()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_is_ok_with_url_instead_of_slug() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
url = "hello-world""#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let res = res.unwrap();
|
|
||||||
assert!(res.slug.is_none());
|
|
||||||
assert_eq!(res.url.unwrap(), "hello-world".to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_is_ok_with_empty_front_matter() {
|
|
||||||
let content = r#" "#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_ok());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_errors_with_invalid_front_matter() {
|
|
||||||
let content = r#"title = 1\n"#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_errors_on_non_string_tag() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
slug = "hello-world"
|
|
||||||
tags = ["rust", 1]"#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_errors_on_present_but_empty_slug() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
slug = """#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_errors_on_present_but_empty_url() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
url = """#;
|
|
||||||
let res = FrontMatter::parse(content);
|
|
||||||
assert!(res.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_date_yyyy_mm_dd() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2016-10-10""#;
|
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
|
||||||
assert!(res.date().is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_date_rfc3339() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2002-10-02T15:00:00Z""#;
|
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
|
||||||
assert!(res.date().is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_cant_parse_random_date_format() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2002/10/12""#;
|
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
|
||||||
assert!(res.date().is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_cant_parse_sort_by_date() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
sort_by = "date""#;
|
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
|
||||||
assert!(res.sort_by.is_some());
|
|
||||||
assert_eq!(res.sort_by.unwrap(), SortBy::Date);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_cant_parse_sort_by_order() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
sort_by = "order""#;
|
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
|
||||||
assert!(res.sort_by.is_some());
|
|
||||||
assert_eq!(res.sort_by.unwrap(), SortBy::Order);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_cant_parse_sort_by_none() {
|
|
||||||
let content = r#"
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
sort_by = "none""#;
|
|
||||||
let res = FrontMatter::parse(content).unwrap();
|
|
||||||
assert!(res.sort_by.is_some());
|
|
||||||
assert_eq!(res.sort_by.unwrap(), SortBy::None);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_split_content_valid() {
|
|
||||||
let content = r#"
|
|
||||||
+++
|
|
||||||
title = "Title"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2002/10/12"
|
|
||||||
+++
|
|
||||||
Hello
|
|
||||||
"#;
|
|
||||||
let (front_matter, content) = split_content(Path::new(""), content).unwrap();
|
|
||||||
assert_eq!(content, "Hello\n");
|
|
||||||
assert_eq!(front_matter.title.unwrap(), "Title");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_split_content_with_only_frontmatter_valid() {
|
|
||||||
let content = r#"
|
|
||||||
+++
|
|
||||||
title = "Title"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2002/10/12"
|
|
||||||
+++"#;
|
|
||||||
let (front_matter, content) = split_content(Path::new(""), content).unwrap();
|
|
||||||
assert_eq!(content, "");
|
|
||||||
assert_eq!(front_matter.title.unwrap(), "Title");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_can_split_content_lazily() {
|
|
||||||
let content = r#"
|
|
||||||
+++
|
|
||||||
title = "Title"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2002-10-02T15:00:00Z"
|
|
||||||
+++
|
|
||||||
+++"#;
|
|
||||||
let (front_matter, content) = split_content(Path::new(""), content).unwrap();
|
|
||||||
assert_eq!(content, "+++");
|
|
||||||
assert_eq!(front_matter.title.unwrap(), "Title");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_error_if_cannot_locate_frontmatter() {
|
|
||||||
let content = r#"
|
|
||||||
+++
|
|
||||||
title = "Title"
|
|
||||||
description = "hey there"
|
|
||||||
date = "2002/10/12"
|
|
||||||
"#;
|
|
||||||
let res = split_content(Path::new(""), content);
|
|
||||||
assert!(res.is_err());
|
|
||||||
}
|
|
|
@ -163,43 +163,6 @@ Hello world"#;
|
||||||
assert_eq!(page.permalink, format!("{}{}", Config::default().base_url, "file-with-space"));
|
assert_eq!(page.permalink, format!("{}{}", Config::default().base_url, "file-with-space"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_reading_analytics_short() {
|
|
||||||
let content = r#"
|
|
||||||
+++
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
+++
|
|
||||||
Hello world"#;
|
|
||||||
let res = Page::parse(Path::new("hello.md"), content, &Config::default());
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let mut page = res.unwrap();
|
|
||||||
page.render_markdown(&HashMap::default(), &Tera::default(), &Config::default()).unwrap();
|
|
||||||
let (word_count, reading_time) = page.get_reading_analytics();
|
|
||||||
assert_eq!(word_count, 2);
|
|
||||||
assert_eq!(reading_time, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_reading_analytics_long() {
|
|
||||||
let mut content = r#"
|
|
||||||
+++
|
|
||||||
title = "Hello"
|
|
||||||
description = "hey there"
|
|
||||||
+++
|
|
||||||
Hello world"#.to_string();
|
|
||||||
for _ in 0..1000 {
|
|
||||||
content.push_str(" Hello world");
|
|
||||||
}
|
|
||||||
let res = Page::parse(Path::new("hello.md"), &content, &Config::default());
|
|
||||||
assert!(res.is_ok());
|
|
||||||
let mut page = res.unwrap();
|
|
||||||
page.render_markdown(&HashMap::default(), &Tera::default(), &Config::default()).unwrap();
|
|
||||||
let (word_count, reading_time) = page.get_reading_analytics();
|
|
||||||
assert_eq!(word_count, 2002);
|
|
||||||
assert_eq!(reading_time, 10);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_automatic_summary_is_empty_string() {
|
fn test_automatic_summary_is_empty_string() {
|
||||||
let content = r#"
|
let content = r#"
|
||||||
|
|
|
@ -35,26 +35,26 @@ fn test_can_parse_site() {
|
||||||
assert_eq!(asset_folder_post.components, vec!["posts".to_string()]);
|
assert_eq!(asset_folder_post.components, vec!["posts".to_string()]);
|
||||||
|
|
||||||
// That we have the right number of sections
|
// That we have the right number of sections
|
||||||
assert_eq!(site.sections.len(), 5);
|
assert_eq!(site.sections.len(), 6);
|
||||||
|
|
||||||
// And that the sections are correct
|
// And that the sections are correct
|
||||||
let index_section = &site.sections[&path.join("content")];
|
let index_section = &site.sections[&path.join("content").join("_index.md")];
|
||||||
assert_eq!(index_section.subsections.len(), 1);
|
assert_eq!(index_section.subsections.len(), 2);
|
||||||
assert_eq!(index_section.pages.len(), 1);
|
assert_eq!(index_section.pages.len(), 1);
|
||||||
|
|
||||||
let posts_section = &site.sections[&posts_path];
|
let posts_section = &site.sections[&posts_path.join("_index.md")];
|
||||||
assert_eq!(posts_section.subsections.len(), 1);
|
assert_eq!(posts_section.subsections.len(), 1);
|
||||||
assert_eq!(posts_section.pages.len(), 5);
|
assert_eq!(posts_section.pages.len(), 5);
|
||||||
|
|
||||||
let tutorials_section = &site.sections[&posts_path.join("tutorials")];
|
let tutorials_section = &site.sections[&posts_path.join("tutorials").join("_index.md")];
|
||||||
assert_eq!(tutorials_section.subsections.len(), 2);
|
assert_eq!(tutorials_section.subsections.len(), 2);
|
||||||
assert_eq!(tutorials_section.pages.len(), 0);
|
assert_eq!(tutorials_section.pages.len(), 0);
|
||||||
|
|
||||||
let devops_section = &site.sections[&posts_path.join("tutorials").join("devops")];
|
let devops_section = &site.sections[&posts_path.join("tutorials").join("devops").join("_index.md")];
|
||||||
assert_eq!(devops_section.subsections.len(), 0);
|
assert_eq!(devops_section.subsections.len(), 0);
|
||||||
assert_eq!(devops_section.pages.len(), 2);
|
assert_eq!(devops_section.pages.len(), 2);
|
||||||
|
|
||||||
let prog_section = &site.sections[&posts_path.join("tutorials").join("programming")];
|
let prog_section = &site.sections[&posts_path.join("tutorials").join("programming").join("_index.md")];
|
||||||
assert_eq!(prog_section.subsections.len(), 0);
|
assert_eq!(prog_section.subsections.len(), 0);
|
||||||
assert_eq!(prog_section.pages.len(), 2);
|
assert_eq!(prog_section.pages.len(), 2);
|
||||||
}
|
}
|
||||||
|
@ -173,6 +173,7 @@ fn test_can_build_site_with_categories() {
|
||||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||||
path.push("test_site");
|
path.push("test_site");
|
||||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
|
site.config.generate_categories_pages = Some(true);
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
|
|
||||||
for (i, page) in site.pages.values_mut().enumerate() {
|
for (i, page) in site.pages.values_mut().enumerate() {
|
||||||
|
@ -224,6 +225,7 @@ fn test_can_build_site_with_tags() {
|
||||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||||
path.push("test_site");
|
path.push("test_site");
|
||||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
|
site.config.generate_tags_pages = Some(true);
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
|
|
||||||
for (i, page) in site.pages.values_mut().enumerate() {
|
for (i, page) in site.pages.values_mut().enumerate() {
|
||||||
|
@ -294,6 +296,9 @@ fn test_can_build_site_with_pagination_for_section() {
|
||||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
for section in site.sections.values_mut(){
|
for section in site.sections.values_mut(){
|
||||||
|
if section.is_index() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
section.meta.paginate_by = Some(2);
|
section.meta.paginate_by = Some(2);
|
||||||
section.meta.template = Some("section_paginated.html".to_string());
|
section.meta.template = Some("section_paginated.html".to_string());
|
||||||
}
|
}
|
||||||
|
@ -316,6 +321,9 @@ fn test_can_build_site_with_pagination_for_section() {
|
||||||
assert!(file_exists!(public, "posts/index.html"));
|
assert!(file_exists!(public, "posts/index.html"));
|
||||||
// And pagination!
|
// And pagination!
|
||||||
assert!(file_exists!(public, "posts/page/1/index.html"));
|
assert!(file_exists!(public, "posts/page/1/index.html"));
|
||||||
|
// even if there is no pages, only the section!
|
||||||
|
assert!(file_exists!(public, "paginated/page/1/index.html"));
|
||||||
|
assert!(file_exists!(public, "paginated/index.html"));
|
||||||
// should redirect to posts/
|
// should redirect to posts/
|
||||||
assert!(file_contains!(
|
assert!(file_contains!(
|
||||||
public,
|
public,
|
||||||
|
@ -347,7 +355,7 @@ fn test_can_build_site_with_pagination_for_index() {
|
||||||
let mut site = Site::new(&path, "config.toml").unwrap();
|
let mut site = Site::new(&path, "config.toml").unwrap();
|
||||||
site.load().unwrap();
|
site.load().unwrap();
|
||||||
{
|
{
|
||||||
let mut index = site.sections.get_mut(&path.join("content")).unwrap();
|
let mut index = site.sections.get_mut(&path.join("content").join("_index.md")).unwrap();
|
||||||
index.meta.paginate_by = Some(2);
|
index.meta.paginate_by = Some(2);
|
||||||
index.meta.template = Some("index_paginated.html".to_string());
|
index.meta.template = Some("index_paginated.html".to_string());
|
||||||
}
|
}
|
||||||
|
@ -368,6 +376,9 @@ fn test_can_build_site_with_pagination_for_index() {
|
||||||
|
|
||||||
// And pagination!
|
// And pagination!
|
||||||
assert!(file_exists!(public, "page/1/index.html"));
|
assert!(file_exists!(public, "page/1/index.html"));
|
||||||
|
// even if there is no pages, only the section!
|
||||||
|
assert!(file_exists!(public, "paginated/page/1/index.html"));
|
||||||
|
assert!(file_exists!(public, "paginated/index.html"));
|
||||||
// should redirect to index
|
// should redirect to index
|
||||||
assert!(file_contains!(
|
assert!(file_contains!(
|
||||||
public,
|
public,
|
||||||
|
|
Loading…
Reference in a new issue