Use a HashSet for detecting orphan pages
This offers a big performance bump for large sites: small-kb (100 pages) before: 530ms after: 500ms medium-kb (1000 pages) before: 2.9s after: 1.5s huge-kb (10,000 pages) before: 150s after: 9.1s
This commit is contained in:
parent
67698a3aa1
commit
2d7315676b
|
@ -21,7 +21,7 @@ extern crate imageproc;
|
|||
#[cfg(test)]
|
||||
extern crate tempfile;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::{create_dir_all, remove_dir_all, copy};
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
@ -149,7 +149,7 @@ impl Site {
|
|||
|
||||
/// Get all the orphan (== without section) pages in the site
|
||||
pub fn get_all_orphan_pages(&self) -> Vec<&Page> {
|
||||
let mut pages_in_sections = vec![];
|
||||
let mut pages_in_sections = HashSet::new();
|
||||
let mut orphans = vec![];
|
||||
|
||||
for s in self.sections.values() {
|
||||
|
|
Loading…
Reference in a new issue