From 2d7315676b40cbce8eb3d33a390410d2bc68b41b Mon Sep 17 00:00:00 2001 From: Thomas Hurst Date: Wed, 12 Sep 2018 23:52:51 +0100 Subject: [PATCH] Use a HashSet for detecting orphan pages This offers a big performance bump for large sites: small-kb (100 pages) before: 530ms after: 500ms medium-kb (1000 pages) before: 2.9s after: 1.5s huge-kb (10,000 pages) before: 150s after: 9.1s --- components/site/src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/components/site/src/lib.rs b/components/site/src/lib.rs index 29d59167..f5a72a4a 100644 --- a/components/site/src/lib.rs +++ b/components/site/src/lib.rs @@ -21,7 +21,7 @@ extern crate imageproc; #[cfg(test)] extern crate tempfile; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::fs::{create_dir_all, remove_dir_all, copy}; use std::mem; use std::path::{Path, PathBuf}; @@ -149,7 +149,7 @@ impl Site { /// Get all the orphan (== without section) pages in the site pub fn get_all_orphan_pages(&self) -> Vec<&Page> { - let mut pages_in_sections = vec![]; + let mut pages_in_sections = HashSet::new(); let mut orphans = vec![]; for s in self.sections.values() {