Put back a limit on rss feed number of pages

This commit is contained in:
Vincent Prouillet 2017-07-05 12:18:37 +09:00
parent 0b38568015
commit 015f146792
4 changed files with 15 additions and 3 deletions

View file

@ -6,7 +6,7 @@
- Add weight sorting
- Remove `section` from the `page` rendering context: this is too expensive. Use
the global function `get_section` if you need to get it
- Fix next/previous in pagination being incorrect
- Put back a 20 page limit on rss feed by default (configurable)
## 0.0.7 (2017-06-19)

View file

@ -68,7 +68,7 @@ impl Config {
set_default!(config.language_code, "en".to_string());
set_default!(config.highlight_code, false);
set_default!(config.generate_rss, false);
set_default!(config.rss_limit, <usize>::max_value());
set_default!(config.rss_limit, 20);
set_default!(config.generate_tags_pages, false);
set_default!(config.generate_categories_pages, false);
set_default!(config.insert_anchor_links, false);

View file

@ -35,3 +35,12 @@ fn bench_render_sitemap(b: &mut test::Bencher) {
site.set_output_path(&public);
b.iter(|| site.render_sitemap().unwrap());
}
#[bench]
fn bench_render_rss_feed(b: &mut test::Bencher) {
let mut site = setup_site("huge-blog");
let tmp_dir = TempDir::new("benches").expect("create temp dir");
let public = &tmp_dir.path().join("public");
site.set_output_path(&public);
b.iter(|| site.render_rss_feed().unwrap());
}

View file

@ -39,6 +39,8 @@ use pagination::Paginator;
use rayon::prelude::*;
/// The sitemap only needs links and potentially date so we trim down
/// all pages to only that
#[derive(Debug, Serialize)]
struct SitemapEntry {
permalink: String,
@ -566,8 +568,9 @@ impl Site {
if pages.is_empty() {
return Ok(());
}
context.add("last_build_date", &pages[0].meta.date);
let (sorted_pages, _) = sort_pages(pages, SortBy::Date);
context.add("last_build_date", &sorted_pages[0].meta.date);
context.add("pages", &sorted_pages);
context.add("config", &self.config);