Remove all draft specific code
This commit is contained in:
parent
99abbb87a1
commit
0e4e0c35b3
|
@ -1,12 +1,16 @@
|
|||
# Changelog
|
||||
|
||||
## 0.8.1 (unreleased)
|
||||
## 0.9.0 (unreleased)
|
||||
|
||||
### Breaking
|
||||
|
||||
- Pages with draft=true are now only loaded/rendered in `zola serve`
|
||||
|
||||
### Other
|
||||
- Add `--open` flag to open server URL in default browser
|
||||
- Fix sitemaps namespace
|
||||
- Update livereload
|
||||
- Add `hard_link_static` config option to hard link things in the static directory instead of copying
|
||||
- Pages with draft=true are not longer rendered in `zola build`
|
||||
- Add warning for old style internal links since they would still function silently
|
||||
- Add some counts to `zola check`
|
||||
|
||||
|
|
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -3202,7 +3202,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zola"
|
||||
version = "0.8.1"
|
||||
version = "0.9.0"
|
||||
dependencies = [
|
||||
"actix-files 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"actix-web 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "zola"
|
||||
version = "0.8.1"
|
||||
version = "0.9.0"
|
||||
authors = ["Vincent Prouillet <hello@vincentprouillet.com>"]
|
||||
license = "MIT"
|
||||
readme = "README.md"
|
||||
|
|
|
@ -236,18 +236,7 @@ impl Library {
|
|||
|
||||
for (key, (sorted, cannot_be_sorted, sort_by)) in updates {
|
||||
// Find sibling between sorted pages first
|
||||
let with_siblings = find_siblings(
|
||||
sorted
|
||||
.iter()
|
||||
.map(|k| {
|
||||
if let Some(page) = self.pages.get(*k) {
|
||||
(k, page.is_draft())
|
||||
} else {
|
||||
unreachable!("Sorting got an unknown page")
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
let with_siblings = find_siblings(&sorted);
|
||||
|
||||
for (k2, val1, val2) in with_siblings {
|
||||
if let Some(page) = self.pages.get_mut(k2) {
|
||||
|
|
|
@ -57,53 +57,21 @@ pub fn sort_pages_by_weight(pages: Vec<(&Key, Option<usize>, &str)>) -> (Vec<Key
|
|||
}
|
||||
|
||||
/// Find the lighter/heavier and earlier/later pages for all pages having a date/weight
|
||||
/// and that are not drafts.
|
||||
pub fn find_siblings(sorted: Vec<(&Key, bool)>) -> Vec<(Key, Option<Key>, Option<Key>)> {
|
||||
pub fn find_siblings(sorted: &[Key]) -> Vec<(Key, Option<Key>, Option<Key>)> {
|
||||
let mut res = Vec::with_capacity(sorted.len());
|
||||
let length = sorted.len();
|
||||
|
||||
for (i, (key, is_draft)) in sorted.iter().enumerate() {
|
||||
if *is_draft {
|
||||
res.push((**key, None, None));
|
||||
continue;
|
||||
}
|
||||
let mut with_siblings = (**key, None, None);
|
||||
for (i, key) in sorted.iter().enumerate() {
|
||||
let mut with_siblings = (*key, None, None);
|
||||
|
||||
if i > 0 {
|
||||
let mut j = i;
|
||||
loop {
|
||||
if j == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
j -= 1;
|
||||
|
||||
if sorted[j].1 {
|
||||
continue;
|
||||
}
|
||||
// lighter / later
|
||||
with_siblings.1 = Some(*sorted[j].0);
|
||||
break;
|
||||
}
|
||||
// lighter / later
|
||||
with_siblings.1 = Some(sorted[i - 1]);
|
||||
}
|
||||
|
||||
if i < length - 1 {
|
||||
let mut j = i;
|
||||
loop {
|
||||
if j == length - 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
j += 1;
|
||||
|
||||
if sorted[j].1 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// heavier/earlier
|
||||
with_siblings.2 = Some(*sorted[j].0);
|
||||
break;
|
||||
}
|
||||
// heavier/earlier
|
||||
with_siblings.2 = Some(sorted[i + 1]);
|
||||
}
|
||||
res.push(with_siblings);
|
||||
}
|
||||
|
@ -208,10 +176,9 @@ mod tests {
|
|||
let page3 = create_page_with_weight(3);
|
||||
let key3 = dense.insert(page3.clone());
|
||||
|
||||
let input =
|
||||
vec![(&key1, page1.is_draft()), (&key2, page2.is_draft()), (&key3, page3.is_draft())];
|
||||
let input = vec![key1, key2, key3];
|
||||
|
||||
let pages = find_siblings(input);
|
||||
let pages = find_siblings(&input);
|
||||
|
||||
assert_eq!(pages[0].1, None);
|
||||
assert_eq!(pages[0].2, Some(key2));
|
||||
|
|
|
@ -820,7 +820,6 @@ fn doesnt_try_to_highlight_content_from_shortcode() {
|
|||
// assert_eq!(res.body, expected);
|
||||
//}
|
||||
|
||||
|
||||
// https://github.com/getzola/zola/issues/747
|
||||
#[test]
|
||||
fn leaves_custom_url_scheme_untouched() {
|
||||
|
|
|
@ -72,7 +72,7 @@ fn add_section_to_index(index: &mut Index, section: &Section, library: &Library)
|
|||
|
||||
for key in §ion.pages {
|
||||
let page = library.get_page_by_key(*key);
|
||||
if !page.meta.in_search_index || page.meta.draft {
|
||||
if !page.meta.in_search_index {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -230,7 +230,7 @@ impl Site {
|
|||
for page in pages {
|
||||
let p = page?;
|
||||
// Draft pages are not rendered in zola build so we just discard them
|
||||
if p.meta.draft && self.config.is_in_build_mode() {
|
||||
if p.meta.draft && !self.config.is_in_serve_mode() {
|
||||
continue;
|
||||
}
|
||||
pages_insert_anchors.insert(
|
||||
|
@ -323,7 +323,11 @@ impl Site {
|
|||
.collect();
|
||||
|
||||
if self.config.is_in_check_mode() {
|
||||
println!("> Checked {} internal link(s) with an anchor: {} error(s) found.", all_links.len(), errors.len());
|
||||
println!(
|
||||
"> Checked {} internal link(s) with an anchor: {} error(s) found.",
|
||||
all_links.len(),
|
||||
errors.len()
|
||||
);
|
||||
}
|
||||
|
||||
if errors.is_empty() {
|
||||
|
@ -392,7 +396,11 @@ impl Site {
|
|||
.collect()
|
||||
});
|
||||
|
||||
println!("> Checked {} external link(s): {} error(s) found.", all_links.len(), errors.len());
|
||||
println!(
|
||||
"> Checked {} external link(s): {} error(s) found.",
|
||||
all_links.len(),
|
||||
errors.len()
|
||||
);
|
||||
|
||||
if errors.is_empty() {
|
||||
return Ok(());
|
||||
|
@ -624,7 +632,7 @@ impl Site {
|
|||
copy_directory(
|
||||
&self.base_path.join("themes").join(theme).join("static"),
|
||||
&self.output_path,
|
||||
false
|
||||
false,
|
||||
)?;
|
||||
}
|
||||
// We're fine with missing static folders
|
||||
|
@ -901,7 +909,7 @@ impl Site {
|
|||
)
|
||||
}
|
||||
|
||||
/// Renders all taxonomies with at least one non-draft post
|
||||
/// Renders all taxonomies
|
||||
pub fn render_taxonomies(&self) -> Result<()> {
|
||||
for taxonomy in &self.taxonomies {
|
||||
self.render_taxonomy(taxonomy)?;
|
||||
|
@ -1018,10 +1026,7 @@ impl Site {
|
|||
ensure_directory_exists(&self.output_path)?;
|
||||
|
||||
let mut context = Context::new();
|
||||
let mut pages = all_pages
|
||||
.into_iter()
|
||||
.filter(|p| p.meta.date.is_some() && !p.is_draft())
|
||||
.collect::<Vec<_>>();
|
||||
let mut pages = all_pages.into_iter().filter(|p| p.meta.date.is_some()).collect::<Vec<_>>();
|
||||
|
||||
// Don't generate a RSS feed if none of the pages has a date
|
||||
if pages.is_empty() {
|
||||
|
|
|
@ -62,7 +62,6 @@ pub fn find_entries<'a>(
|
|||
let pages = library
|
||||
.pages_values()
|
||||
.iter()
|
||||
.filter(|p| !p.is_draft())
|
||||
.map(|p| {
|
||||
let date = match p.meta.date {
|
||||
Some(ref d) => Some(d.to_string()),
|
||||
|
|
|
@ -42,7 +42,7 @@ fn can_parse_site() {
|
|||
|
||||
let posts_section = library.get_section(&posts_path.join("_index.md")).unwrap();
|
||||
assert_eq!(posts_section.subsections.len(), 2);
|
||||
assert_eq!(posts_section.pages.len(), 9); // 10 with 1 draft == 9
|
||||
assert_eq!(posts_section.pages.len(), 9); // 10 with 1 draft == 9
|
||||
assert_eq!(
|
||||
posts_section.ancestors,
|
||||
vec![*library.get_section_key(&index_section.file.path).unwrap()]
|
||||
|
|
|
@ -55,7 +55,7 @@ date =
|
|||
# will not be rendered.
|
||||
weight = 0
|
||||
|
||||
# A draft page will not be present in prev/next pagination
|
||||
# A draft page is only rendered in `zola serve`, they are ignored in `zola build` and `zola check`
|
||||
draft = false
|
||||
|
||||
# If filled, it will use that slug instead of the filename to make up the URL
|
||||
|
|
|
@ -28,8 +28,7 @@ categories = ["programming"]
|
|||
+++
|
||||
```
|
||||
|
||||
The taxonomy pages will only be created if at least one non-draft page is found and
|
||||
are available at the following paths:
|
||||
The taxonomy pages are available at the following paths:
|
||||
|
||||
```plain
|
||||
$BASE_URL/$NAME/
|
||||
|
|
|
@ -8,7 +8,7 @@ generate an `rss.xml` page for the site, which will live at `base_url/rss.xml`.
|
|||
generate the `rss.xml` page, Zola will look for a `rss.xml` file in the `templates`
|
||||
directory or, if one does not exist, will use the use the built-in rss template.
|
||||
|
||||
**Only pages with a date and that are not draft will be available.**
|
||||
**Only pages with a date will be available.**
|
||||
|
||||
The RSS template gets two variables in addition of the config:
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ struct ErrorFilePaths {
|
|||
}
|
||||
|
||||
fn not_found<B>(
|
||||
res: dev::ServiceResponse<B>
|
||||
res: dev::ServiceResponse<B>,
|
||||
) -> std::result::Result<ErrorHandlerResponse<B>, actix_web::Error> {
|
||||
let buf: Vec<u8> = {
|
||||
let error_files: &ErrorFilePaths = res.request().app_data().unwrap();
|
||||
|
@ -74,15 +74,10 @@ fn not_found<B>(
|
|||
};
|
||||
|
||||
let new_resp = HttpResponse::build(http::StatusCode::NOT_FOUND)
|
||||
.header(
|
||||
http::header::CONTENT_TYPE,
|
||||
http::header::HeaderValue::from_static("text/html"),
|
||||
)
|
||||
.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("text/html"))
|
||||
.body(buf);
|
||||
|
||||
Ok(ErrorHandlerResponse::Response(
|
||||
res.into_response(new_resp.into_body()),
|
||||
))
|
||||
Ok(ErrorHandlerResponse::Response(res.into_response(new_resp.into_body())))
|
||||
}
|
||||
|
||||
fn livereload_handler() -> HttpResponse {
|
||||
|
@ -192,23 +187,15 @@ pub fn serve(
|
|||
let broadcaster = if !watch_only {
|
||||
thread::spawn(move || {
|
||||
let s = HttpServer::new(move || {
|
||||
let error_handlers = ErrorHandlers::new()
|
||||
.handler(http::StatusCode::NOT_FOUND, not_found);
|
||||
let error_handlers =
|
||||
ErrorHandlers::new().handler(http::StatusCode::NOT_FOUND, not_found);
|
||||
|
||||
App::new()
|
||||
.data(ErrorFilePaths {
|
||||
not_found: static_root.join("404.html"),
|
||||
})
|
||||
.data(ErrorFilePaths { not_found: static_root.join("404.html") })
|
||||
.wrap(error_handlers)
|
||||
.route(
|
||||
"/livereload.js",
|
||||
web::get().to(livereload_handler)
|
||||
)
|
||||
.route("/livereload.js", web::get().to(livereload_handler))
|
||||
// Start a webserver that serves the `output_dir` directory
|
||||
.service(
|
||||
fs::Files::new("/", &static_root)
|
||||
.index_file("index.html"),
|
||||
)
|
||||
.service(fs::Files::new("/", &static_root).index_file("index.html"))
|
||||
})
|
||||
.bind(&address)
|
||||
.expect("Can't start the webserver")
|
||||
|
@ -272,7 +259,7 @@ pub fn serve(
|
|||
println!("Press Ctrl+C to stop\n");
|
||||
// Delete the output folder on ctrl+C
|
||||
ctrlc::set_handler(move || {
|
||||
let _ =remove_dir_all(&output_path);
|
||||
let _ = remove_dir_all(&output_path);
|
||||
::std::process::exit(0);
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
|
@ -326,7 +313,12 @@ pub fn serve(
|
|||
} else {
|
||||
rebuild_done_handling(
|
||||
&broadcaster,
|
||||
copy_file(&path, &site.output_path, &site.static_path, site.config.hard_link_static),
|
||||
copy_file(
|
||||
&path,
|
||||
&site.output_path,
|
||||
&site.static_path,
|
||||
site.config.hard_link_static,
|
||||
),
|
||||
&partial_path.to_string_lossy(),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -74,7 +74,7 @@ fn main() {
|
|||
::std::process::exit(1);
|
||||
}
|
||||
|
||||
if !watch_only && !port_is_available(port) {
|
||||
if !watch_only && !port_is_available(port) {
|
||||
port = if let Some(p) = get_available_port(1111) {
|
||||
p
|
||||
} else {
|
||||
|
|
Loading…
Reference in a new issue