Remove all draft specific code

This commit is contained in:
Vincent Prouillet 2019-07-19 11:10:28 +02:00
parent 99abbb87a1
commit 0e4e0c35b3
15 changed files with 53 additions and 99 deletions

View file

@ -1,12 +1,16 @@
# Changelog # Changelog
## 0.8.1 (unreleased) ## 0.9.0 (unreleased)
### Breaking
- Pages with draft=true are now only loaded/rendered in `zola serve`
### Other
- Add `--open` flag to open server URL in default browser - Add `--open` flag to open server URL in default browser
- Fix sitemaps namespace - Fix sitemaps namespace
- Update livereload - Update livereload
- Add `hard_link_static` config option to hard link things in the static directory instead of copying - Add `hard_link_static` config option to hard link things in the static directory instead of copying
- Pages with draft=true are not longer rendered in `zola build`
- Add warning for old style internal links since they would still function silently - Add warning for old style internal links since they would still function silently
- Add some counts to `zola check` - Add some counts to `zola check`

2
Cargo.lock generated
View file

@ -3202,7 +3202,7 @@ dependencies = [
[[package]] [[package]]
name = "zola" name = "zola"
version = "0.8.1" version = "0.9.0"
dependencies = [ dependencies = [
"actix-files 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "actix-files 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"actix-web 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "actix-web 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",

View file

@ -1,6 +1,6 @@
[package] [package]
name = "zola" name = "zola"
version = "0.8.1" version = "0.9.0"
authors = ["Vincent Prouillet <hello@vincentprouillet.com>"] authors = ["Vincent Prouillet <hello@vincentprouillet.com>"]
license = "MIT" license = "MIT"
readme = "README.md" readme = "README.md"

View file

@ -236,18 +236,7 @@ impl Library {
for (key, (sorted, cannot_be_sorted, sort_by)) in updates { for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
// Find sibling between sorted pages first // Find sibling between sorted pages first
let with_siblings = find_siblings( let with_siblings = find_siblings(&sorted);
sorted
.iter()
.map(|k| {
if let Some(page) = self.pages.get(*k) {
(k, page.is_draft())
} else {
unreachable!("Sorting got an unknown page")
}
})
.collect(),
);
for (k2, val1, val2) in with_siblings { for (k2, val1, val2) in with_siblings {
if let Some(page) = self.pages.get_mut(k2) { if let Some(page) = self.pages.get_mut(k2) {

View file

@ -57,53 +57,21 @@ pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key
} }
/// Find the lighter/heavier and earlier/later pages for all pages having a date/weight /// Find the lighter/heavier and earlier/later pages for all pages having a date/weight
/// and that are not drafts. pub fn find_siblings(sorted: &[Key]) -> Vec<(Key, Option<Key>, Option<Key>)> {
pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option<Key>)> {
let mut res = Vec::with_capacity(sorted.len()); let mut res = Vec::with_capacity(sorted.len());
let length = sorted.len(); let length = sorted.len();
for (i, (key, is_draft)) in sorted.iter().enumerate() { for (i, key) in sorted.iter().enumerate() {
if *is_draft { let mut with_siblings = (*key, None, None);
res.push((**key, None, None));
continue;
}
let mut with_siblings = (**key, None, None);
if i > 0 { if i > 0 {
let mut j = i;
loop {
if j == 0 {
break;
}
j -= 1;
if sorted[j].1 {
continue;
}
// lighter / later // lighter / later
with_siblings.1 = Some(*sorted[j].0); with_siblings.1 = Some(sorted[i - 1]);
break;
}
} }
if i < length - 1 { if i < length - 1 {
let mut j = i;
loop {
if j == length - 1 {
break;
}
j += 1;
if sorted[j].1 {
continue;
}
// heavier/earlier // heavier/earlier
with_siblings.2 = Some(*sorted[j].0); with_siblings.2 = Some(sorted[i + 1]);
break;
}
} }
res.push(with_siblings); res.push(with_siblings);
} }
@ -208,10 +176,9 @@ mod tests {
let page3 = create_page_with_weight(3); let page3 = create_page_with_weight(3);
let key3 = dense.insert(page3.clone()); let key3 = dense.insert(page3.clone());
let input = let input = vec![key1, key2, key3];
vec![(&key1, page1.is_draft()), (&key2, page2.is_draft()), (&key3, page3.is_draft())];
let pages = find_siblings(input); let pages = find_siblings(&input);
assert_eq!(pages[0].1, None); assert_eq!(pages[0].1, None);
assert_eq!(pages[0].2, Some(key2)); assert_eq!(pages[0].2, Some(key2));

View file

@ -820,7 +820,6 @@ fn doesnt_try_to_highlight_content_from_shortcode() {
// assert_eq!(res.body, expected); // assert_eq!(res.body, expected);
//} //}
// https://github.com/getzola/zola/issues/747 // https://github.com/getzola/zola/issues/747
#[test] #[test]
fn leaves_custom_url_scheme_untouched() { fn leaves_custom_url_scheme_untouched() {

View file

@ -72,7 +72,7 @@ fn add_section_to_index(index: &mut Index, section: &Section, library: &Library)
for key in &section.pages { for key in &section.pages {
let page = library.get_page_by_key(*key); let page = library.get_page_by_key(*key);
if !page.meta.in_search_index || page.meta.draft { if !page.meta.in_search_index {
continue; continue;
} }

View file

@ -230,7 +230,7 @@ impl Site {
for page in pages { for page in pages {
let p = page?; let p = page?;
// Draft pages are not rendered in zola build so we just discard them // Draft pages are not rendered in zola build so we just discard them
if p.meta.draft && self.config.is_in_build_mode() { if p.meta.draft && !self.config.is_in_serve_mode() {
continue; continue;
} }
pages_insert_anchors.insert( pages_insert_anchors.insert(
@ -323,7 +323,11 @@ impl Site {
.collect(); .collect();
if self.config.is_in_check_mode() { if self.config.is_in_check_mode() {
println!("> Checked {} internal link(s) with an anchor: {} error(s) found.", all_links.len(), errors.len()); println!(
"> Checked {} internal link(s) with an anchor: {} error(s) found.",
all_links.len(),
errors.len()
);
} }
if errors.is_empty() { if errors.is_empty() {
@ -392,7 +396,11 @@ impl Site {
.collect() .collect()
}); });
println!("> Checked {} external link(s): {} error(s) found.", all_links.len(), errors.len()); println!(
"> Checked {} external link(s): {} error(s) found.",
all_links.len(),
errors.len()
);
if errors.is_empty() { if errors.is_empty() {
return Ok(()); return Ok(());
@ -624,7 +632,7 @@ impl Site {
copy_directory( copy_directory(
&self.base_path.join("themes").join(theme).join("static"), &self.base_path.join("themes").join(theme).join("static"),
&self.output_path, &self.output_path,
false false,
)?; )?;
} }
// We're fine with missing static folders // We're fine with missing static folders
@ -901,7 +909,7 @@ impl Site {
) )
} }
/// Renders all taxonomies with at least one non-draft post /// Renders all taxonomies
pub fn render_taxonomies(&self) -> Result<()> { pub fn render_taxonomies(&self) -> Result<()> {
for taxonomy in &self.taxonomies { for taxonomy in &self.taxonomies {
self.render_taxonomy(taxonomy)?; self.render_taxonomy(taxonomy)?;
@ -1018,10 +1026,7 @@ impl Site {
ensure_directory_exists(&self.output_path)?; ensure_directory_exists(&self.output_path)?;
let mut context = Context::new(); let mut context = Context::new();
let mut pages = all_pages let mut pages = all_pages.into_iter().filter(|p| p.meta.date.is_some()).collect::<Vec<_>>();
.into_iter()
.filter(|p| p.meta.date.is_some() && !p.is_draft())
.collect::<Vec<_>>();
// Don't generate a RSS feed if none of the pages has a date // Don't generate a RSS feed if none of the pages has a date
if pages.is_empty() { if pages.is_empty() {

View file

@ -62,7 +62,6 @@ pub fn find_entries<'a>(
let pages = library let pages = library
.pages_values() .pages_values()
.iter() .iter()
.filter(|p| !p.is_draft())
.map(|p| { .map(|p| {
let date = match p.meta.date { let date = match p.meta.date {
Some(ref d) => Some(d.to_string()), Some(ref d) => Some(d.to_string()),

View file

@ -55,7 +55,7 @@ date =
# will not be rendered. # will not be rendered.
weight = 0 weight = 0
# A draft page will not be present in prev/next pagination # A draft page is only rendered in `zola serve`, they are ignored in `zola build` and `zola check`
draft = false draft = false
# If filled, it will use that slug instead of the filename to make up the URL # If filled, it will use that slug instead of the filename to make up the URL

View file

@ -28,8 +28,7 @@ categories = ["programming"]
+++ +++
``` ```
The taxonomy pages will only be created if at least one non-draft page is found and The taxonomy pages are available at the following paths:
are available at the following paths:
```plain ```plain
$BASE_URL/$NAME/ $BASE_URL/$NAME/

View file

@ -8,7 +8,7 @@ generate an `rss.xml` page for the site, which will live at `base_url/rss.xml`.
generate the `rss.xml` page, Zola will look for a `rss.xml` file in the `templates` generate the `rss.xml` page, Zola will look for a `rss.xml` file in the `templates`
directory or, if one does not exist, will use the use the built-in rss template. directory or, if one does not exist, will use the use the built-in rss template.
**Only pages with a date and that are not draft will be available.** **Only pages with a date will be available.**
The RSS template gets two variables in addition of the config: The RSS template gets two variables in addition of the config:

View file

@ -62,7 +62,7 @@ struct ErrorFilePaths {
} }
fn not_found<B>( fn not_found<B>(
res: dev::ServiceResponse<B> res: dev::ServiceResponse<B>,
) -> std::result::Result<ErrorHandlerResponse<B>, actix_web::Error> { ) -> std::result::Result<ErrorHandlerResponse<B>, actix_web::Error> {
let buf: Vec<u8> = { let buf: Vec<u8> = {
let error_files: &ErrorFilePaths = res.request().app_data().unwrap(); let error_files: &ErrorFilePaths = res.request().app_data().unwrap();
@ -74,15 +74,10 @@ fn not_found<B>(
}; };
let new_resp = HttpResponse::build(http::StatusCode::NOT_FOUND) let new_resp = HttpResponse::build(http::StatusCode::NOT_FOUND)
.header( .header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("text/html"))
http::header::CONTENT_TYPE,
http::header::HeaderValue::from_static("text/html"),
)
.body(buf); .body(buf);
Ok(ErrorHandlerResponse::Response( Ok(ErrorHandlerResponse::Response(res.into_response(new_resp.into_body())))
res.into_response(new_resp.into_body()),
))
} }
fn livereload_handler() -> HttpResponse { fn livereload_handler() -> HttpResponse {
@ -192,23 +187,15 @@ pub fn serve(
let broadcaster = if !watch_only { let broadcaster = if !watch_only {
thread::spawn(move || { thread::spawn(move || {
let s = HttpServer::new(move || { let s = HttpServer::new(move || {
let error_handlers = ErrorHandlers::new() let error_handlers =
.handler(http::StatusCode::NOT_FOUND, not_found); ErrorHandlers::new().handler(http::StatusCode::NOT_FOUND, not_found);
App::new() App::new()
.data(ErrorFilePaths { .data(ErrorFilePaths { not_found: static_root.join("404.html") })
not_found: static_root.join("404.html"),
})
.wrap(error_handlers) .wrap(error_handlers)
.route( .route("/livereload.js", web::get().to(livereload_handler))
"/livereload.js",
web::get().to(livereload_handler)
)
// Start a webserver that serves the `output_dir` directory // Start a webserver that serves the `output_dir` directory
.service( .service(fs::Files::new("/", &static_root).index_file("index.html"))
fs::Files::new("/", &static_root)
.index_file("index.html"),
)
}) })
.bind(&address) .bind(&address)
.expect("Can't start the webserver") .expect("Can't start the webserver")
@ -326,7 +313,12 @@ pub fn serve(
} else { } else {
rebuild_done_handling( rebuild_done_handling(
&broadcaster, &broadcaster,
copy_file(&path, &site.output_path, &site.static_path, site.config.hard_link_static), copy_file(
&path,
&site.output_path,
&site.static_path,
site.config.hard_link_static,
),
&partial_path.to_string_lossy(), &partial_path.to_string_lossy(),
); );
} }