Revamp the images template functions
This commit is contained in:
parent
b0937fa5b7
commit
7fb99eaa44
|
@ -5,6 +5,7 @@
|
|||
### Breaking
|
||||
|
||||
- Newlines are now required after the closing `+++` of front-matter
|
||||
- `resize_image` now returns a map: `{url, static_path}` instead of just the URL so you can follow up with other functions
|
||||
- i18n rework: languages now have their sections in `config.toml` to set up all their options
|
||||
1. taxonomies don't have a `lang` anymore in the config, you need to declare them in their respective language section
|
||||
2. the `config` variable in templates has been changed and is now a stripped down language aware version of the previous `config`
|
||||
|
|
3
Cargo.lock
generated
3
Cargo.lock
generated
|
@ -1022,6 +1022,7 @@ dependencies = [
|
|||
name = "imageproc"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"config",
|
||||
"errors",
|
||||
"image",
|
||||
"lazy_static",
|
||||
|
@ -2592,6 +2593,8 @@ dependencies = [
|
|||
"nom-bibtex",
|
||||
"rendering",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
"svg_metadata",
|
||||
|
|
|
@ -10,7 +10,8 @@ regex = "1.0"
|
|||
tera = "1"
|
||||
image = "0.23"
|
||||
rayon = "1"
|
||||
webp="0.1.1"
|
||||
webp = "0.1.1"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
utils = { path = "../utils" }
|
||||
config = { path = "../config" }
|
||||
|
|
|
@ -11,10 +11,12 @@ use lazy_static::lazy_static;
|
|||
use rayon::prelude::*;
|
||||
use regex::Regex;
|
||||
|
||||
use config::Config;
|
||||
use errors::{Error, Result};
|
||||
use utils::fs as ufs;
|
||||
|
||||
static RESIZED_SUBDIR: &str = "processed_images";
|
||||
const DEFAULT_Q_JPG: u8 = 75;
|
||||
|
||||
lazy_static! {
|
||||
pub static ref RESIZED_FILENAME: Regex =
|
||||
|
@ -51,14 +53,12 @@ impl ResizeOp {
|
|||
match op {
|
||||
"fit_width" => {
|
||||
if width.is_none() {
|
||||
return Err("op=\"fit_width\" requires a `width` argument".to_string().into());
|
||||
return Err("op=\"fit_width\" requires a `width` argument".into());
|
||||
}
|
||||
}
|
||||
"fit_height" => {
|
||||
if height.is_none() {
|
||||
return Err("op=\"fit_height\" requires a `height` argument"
|
||||
.to_string()
|
||||
.into());
|
||||
return Err("op=\"fit_height\" requires a `height` argument".into());
|
||||
}
|
||||
}
|
||||
"scale" | "fit" | "fill" => {
|
||||
|
@ -132,8 +132,6 @@ impl Hash for ResizeOp {
|
|||
}
|
||||
}
|
||||
}
|
||||
const DEFAULT_Q_JPG: u8 = 75;
|
||||
|
||||
/// Thumbnail image format
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Format {
|
||||
|
@ -215,6 +213,7 @@ impl Hash for Format {
|
|||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ImageOp {
|
||||
source: String,
|
||||
input_path: PathBuf,
|
||||
op: ResizeOp,
|
||||
format: Format,
|
||||
/// Hash of the above parameters
|
||||
|
@ -227,18 +226,9 @@ pub struct ImageOp {
|
|||
}
|
||||
|
||||
impl ImageOp {
|
||||
pub fn new(source: String, op: ResizeOp, format: Format) -> ImageOp {
|
||||
let mut hasher = DefaultHasher::new();
|
||||
hasher.write(source.as_ref());
|
||||
op.hash(&mut hasher);
|
||||
format.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
ImageOp { source, op, format, hash, collision_id: 0 }
|
||||
}
|
||||
|
||||
pub fn from_args(
|
||||
source: String,
|
||||
input_path: PathBuf,
|
||||
op: &str,
|
||||
width: Option<u32>,
|
||||
height: Option<u32>,
|
||||
|
@ -247,18 +237,24 @@ impl ImageOp {
|
|||
) -> Result<ImageOp> {
|
||||
let op = ResizeOp::from_args(op, width, height)?;
|
||||
let format = Format::from_args(&source, format, quality)?;
|
||||
Ok(Self::new(source, op, format))
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
hasher.write(source.as_ref());
|
||||
op.hash(&mut hasher);
|
||||
format.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
|
||||
Ok(ImageOp { source, input_path, op, format, hash, collision_id: 0 })
|
||||
}
|
||||
|
||||
fn perform(&self, content_path: &Path, target_path: &Path) -> Result<()> {
|
||||
fn perform(&self, target_path: &Path) -> Result<()> {
|
||||
use ResizeOp::*;
|
||||
|
||||
let src_path = content_path.join(&self.source);
|
||||
if !ufs::file_stale(&src_path, target_path) {
|
||||
if !ufs::file_stale(&self.input_path, target_path) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut img = image::open(&src_path)?;
|
||||
let mut img = image::open(&self.input_path)?;
|
||||
let (img_w, img_h) = img.dimensions();
|
||||
|
||||
const RESIZE_FILTER: FilterType = FilterType::Lanczos3;
|
||||
|
@ -266,8 +262,8 @@ impl ImageOp {
|
|||
|
||||
let img = match self.op {
|
||||
Scale(w, h) => img.resize_exact(w, h, RESIZE_FILTER),
|
||||
FitWidth(w) => img.resize(w, u32::max_value(), RESIZE_FILTER),
|
||||
FitHeight(h) => img.resize(u32::max_value(), h, RESIZE_FILTER),
|
||||
FitWidth(w) => img.resize(w, u32::MAX, RESIZE_FILTER),
|
||||
FitHeight(h) => img.resize(u32::MAX, h, RESIZE_FILTER),
|
||||
Fit(w, h) => {
|
||||
if img_w > w || img_h > h {
|
||||
img.resize(w, h, RESIZE_FILTER)
|
||||
|
@ -328,14 +324,15 @@ impl ImageOp {
|
|||
}
|
||||
}
|
||||
|
||||
/// A strcture into which image operations can be enqueued and then performed.
|
||||
/// A struct into which image operations can be enqueued and then performed.
|
||||
/// All output is written in a subdirectory in `static_path`,
|
||||
/// taking care of file stale status based on timestamps and possible hash collisions.
|
||||
#[derive(Debug)]
|
||||
pub struct Processor {
|
||||
content_path: PathBuf,
|
||||
resized_path: PathBuf,
|
||||
resized_url: String,
|
||||
/// The base path of the Zola site
|
||||
base_path: PathBuf,
|
||||
base_url: String,
|
||||
output_dir: PathBuf,
|
||||
/// A map of a ImageOps by their stored hash.
|
||||
/// Note that this cannot be a HashSet, because hashset handles collisions and we don't want that,
|
||||
/// we need to be aware of and handle collisions ourselves.
|
||||
|
@ -345,30 +342,18 @@ pub struct Processor {
|
|||
}
|
||||
|
||||
impl Processor {
|
||||
pub fn new(content_path: PathBuf, static_path: &Path, base_url: &str) -> Processor {
|
||||
pub fn new(base_path: PathBuf, config: &Config) -> Processor {
|
||||
Processor {
|
||||
content_path,
|
||||
resized_path: static_path.join(RESIZED_SUBDIR),
|
||||
resized_url: Self::resized_url(base_url),
|
||||
output_dir: base_path.join("static").join(RESIZED_SUBDIR),
|
||||
base_url: config.make_permalink(RESIZED_SUBDIR),
|
||||
base_path,
|
||||
img_ops: HashMap::new(),
|
||||
img_ops_collisions: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn resized_url(base_url: &str) -> String {
|
||||
if base_url.ends_with('/') {
|
||||
format!("{}{}", base_url, RESIZED_SUBDIR)
|
||||
} else {
|
||||
format!("{}/{}", base_url, RESIZED_SUBDIR)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_base_url(&mut self, base_url: &str) {
|
||||
self.resized_url = Self::resized_url(base_url);
|
||||
}
|
||||
|
||||
pub fn source_exists(&self, source: &str) -> bool {
|
||||
self.content_path.join(source).exists()
|
||||
pub fn set_base_url(&mut self, config: &Config) {
|
||||
self.base_url = config.make_permalink(RESIZED_SUBDIR);
|
||||
}
|
||||
|
||||
pub fn num_img_ops(&self) -> usize {
|
||||
|
@ -427,25 +412,25 @@ impl Processor {
|
|||
format!("{:016x}{:02x}.{}", hash, collision_id, format.extension())
|
||||
}
|
||||
|
||||
fn op_url(&self, hash: u64, collision_id: u32, format: Format) -> String {
|
||||
format!("{}/{}", &self.resized_url, Self::op_filename(hash, collision_id, format))
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, img_op: ImageOp) -> String {
|
||||
/// Adds the given operation to the queue but do not process it immediately.
|
||||
/// Returns (path in static folder, final URL).
|
||||
pub fn insert(&mut self, img_op: ImageOp) -> (PathBuf, String) {
|
||||
let hash = img_op.hash;
|
||||
let format = img_op.format;
|
||||
let collision_id = self.insert_with_collisions(img_op);
|
||||
self.op_url(hash, collision_id, format)
|
||||
let filename = Self::op_filename(hash, collision_id, format);
|
||||
let url = format!("{}{}", self.base_url, filename);
|
||||
(Path::new("static").join(RESIZED_SUBDIR).join(filename), url)
|
||||
}
|
||||
|
||||
pub fn prune(&self) -> Result<()> {
|
||||
// Do not create folders if they don't exist
|
||||
if !self.resized_path.exists() {
|
||||
if !self.output_dir.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
ufs::ensure_directory_exists(&self.resized_path)?;
|
||||
let entries = fs::read_dir(&self.resized_path)?;
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
let entries = fs::read_dir(&self.output_dir)?;
|
||||
for entry in entries {
|
||||
let entry_path = entry?.path();
|
||||
if entry_path.is_file() {
|
||||
|
@ -466,15 +451,15 @@ impl Processor {
|
|||
|
||||
pub fn do_process(&mut self) -> Result<()> {
|
||||
if !self.img_ops.is_empty() {
|
||||
ufs::ensure_directory_exists(&self.resized_path)?;
|
||||
ufs::ensure_directory_exists(&self.output_dir)?;
|
||||
}
|
||||
|
||||
self.img_ops
|
||||
.par_iter()
|
||||
.map(|(hash, op)| {
|
||||
let target =
|
||||
self.resized_path.join(Self::op_filename(*hash, op.collision_id, op.format));
|
||||
op.perform(&self.content_path, &target)
|
||||
self.output_dir.join(Self::op_filename(*hash, op.collision_id, op.format));
|
||||
op.perform(&target)
|
||||
.map_err(|e| Error::chain(format!("Failed to process image: {}", op.source), e))
|
||||
})
|
||||
.collect::<Result<()>>()
|
||||
|
|
|
@ -3,11 +3,15 @@ mod page;
|
|||
mod section;
|
||||
mod ser;
|
||||
|
||||
use std::fs::read_dir;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub use self::file_info::FileInfo;
|
||||
pub use self::page::Page;
|
||||
pub use self::section::Section;
|
||||
pub use self::ser::{SerializingPage, SerializingSection};
|
||||
|
||||
use config::Config;
|
||||
use rendering::Heading;
|
||||
|
||||
pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {
|
||||
|
@ -23,9 +27,67 @@ pub fn has_anchor(headings: &[Heading], anchor: &str) -> bool {
|
|||
false
|
||||
}
|
||||
|
||||
/// Looks into the current folder for the path and see if there's anything that is not a .md
|
||||
/// file. Those will be copied next to the rendered .html file
|
||||
pub fn find_related_assets(path: &Path, config: &Config) -> Vec<PathBuf> {
|
||||
let mut assets = vec![];
|
||||
|
||||
for entry in read_dir(path).unwrap().filter_map(std::result::Result::ok) {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.is_file() {
|
||||
match entry_path.extension() {
|
||||
Some(e) => match e.to_str() {
|
||||
Some("md") => continue,
|
||||
_ => assets.push(entry_path.to_path_buf()),
|
||||
},
|
||||
None => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||
// against the remaining path. Note that the current behaviour effectively means that
|
||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||
// patterns (no "**" patterns).
|
||||
assets = assets
|
||||
.into_iter()
|
||||
.filter(|path| match path.file_name() {
|
||||
None => false,
|
||||
Some(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
assets
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::fs::File;
|
||||
|
||||
use config::Config;
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[test]
|
||||
fn can_find_related_assets() {
|
||||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
File::create(tmp_dir.path().join("index.md")).unwrap();
|
||||
File::create(tmp_dir.path().join("example.js")).unwrap();
|
||||
File::create(tmp_dir.path().join("graph.jpg")).unwrap();
|
||||
File::create(tmp_dir.path().join("fail.png")).unwrap();
|
||||
|
||||
let assets = find_related_assets(tmp_dir.path(), &Config::default());
|
||||
assert_eq!(assets.len(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap() != "md").count(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "example.js").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_find_anchor_at_root() {
|
||||
|
|
|
@ -12,14 +12,14 @@ use config::Config;
|
|||
use errors::{Error, Result};
|
||||
use front_matter::{split_page_content, InsertAnchor, PageFrontMatter};
|
||||
use rendering::{render_content, Heading, RenderContext};
|
||||
use utils::fs::{find_related_assets, read_file};
|
||||
use utils::site::get_reading_analytics;
|
||||
use utils::slugs::slugify_paths;
|
||||
use utils::templates::render_template;
|
||||
|
||||
use crate::content::file_info::FileInfo;
|
||||
use crate::content::has_anchor;
|
||||
use crate::content::ser::SerializingPage;
|
||||
use utils::slugs::slugify_paths;
|
||||
use crate::content::{find_related_assets, has_anchor};
|
||||
use utils::fs::read_file;
|
||||
|
||||
lazy_static! {
|
||||
// Based on https://regex101.com/r/H2n38Z/1/tests
|
||||
|
@ -43,7 +43,7 @@ pub struct Page {
|
|||
pub raw_content: String,
|
||||
/// All the non-md files we found next to the .md file
|
||||
pub assets: Vec<PathBuf>,
|
||||
/// All the non-md files we found next to the .md file as string for use in templates
|
||||
/// All the non-md files we found next to the .md file
|
||||
pub serialized_assets: Vec<String>,
|
||||
/// The HTML rendered of the page
|
||||
pub content: String,
|
||||
|
@ -216,27 +216,7 @@ impl Page {
|
|||
|
||||
if page.file.name == "index" {
|
||||
let parent_dir = path.parent().unwrap();
|
||||
let assets = find_related_assets(parent_dir);
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||
// against the remaining path. Note that the current behaviour effectively means that
|
||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||
// patterns (no "**" patterns).
|
||||
page.assets = assets
|
||||
.into_iter()
|
||||
.filter(|path| match path.file_name() {
|
||||
None => false,
|
||||
Some(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
} else {
|
||||
page.assets = assets;
|
||||
}
|
||||
|
||||
page.assets = find_related_assets(parent_dir, config);
|
||||
page.serialized_assets = page.serialize_assets(&base_path);
|
||||
} else {
|
||||
page.assets = vec![];
|
||||
|
|
|
@ -8,13 +8,13 @@ use config::Config;
|
|||
use errors::{Error, Result};
|
||||
use front_matter::{split_section_content, SectionFrontMatter};
|
||||
use rendering::{render_content, Heading, RenderContext};
|
||||
use utils::fs::{find_related_assets, read_file};
|
||||
use utils::fs::read_file;
|
||||
use utils::site::get_reading_analytics;
|
||||
use utils::templates::render_template;
|
||||
|
||||
use crate::content::file_info::FileInfo;
|
||||
use crate::content::has_anchor;
|
||||
use crate::content::ser::SerializingSection;
|
||||
use crate::content::{find_related_assets, has_anchor};
|
||||
use crate::library::Library;
|
||||
|
||||
// Default is used to create a default index section if there is no _index.md in the root content directory
|
||||
|
@ -36,7 +36,7 @@ pub struct Section {
|
|||
pub content: String,
|
||||
/// All the non-md files we found next to the .md file
|
||||
pub assets: Vec<PathBuf>,
|
||||
/// All the non-md files we found next to the .md file as string for use in templates
|
||||
/// All the non-md files we found next to the .md file as string
|
||||
pub serialized_assets: Vec<String>,
|
||||
/// All direct pages of that section
|
||||
pub pages: Vec<DefaultKey>,
|
||||
|
@ -122,27 +122,7 @@ impl Section {
|
|||
let mut section = Section::parse(path, &content, config, base_path)?;
|
||||
|
||||
let parent_dir = path.parent().unwrap();
|
||||
let assets = find_related_assets(parent_dir);
|
||||
|
||||
if let Some(ref globset) = config.ignored_content_globset {
|
||||
// `find_related_assets` only scans the immediate directory (it is not recursive) so our
|
||||
// filtering only needs to work against the file_name component, not the full suffix. If
|
||||
// `find_related_assets` was changed to also return files in subdirectories, we could
|
||||
// use `PathBuf.strip_prefix` to remove the parent directory and then glob-filter
|
||||
// against the remaining path. Note that the current behaviour effectively means that
|
||||
// the `ignored_content` setting in the config file is limited to single-file glob
|
||||
// patterns (no "**" patterns).
|
||||
section.assets = assets
|
||||
.into_iter()
|
||||
.filter(|path| match path.file_name() {
|
||||
None => false,
|
||||
Some(file) => !globset.is_match(file),
|
||||
})
|
||||
.collect();
|
||||
} else {
|
||||
section.assets = assets;
|
||||
}
|
||||
|
||||
section.assets = find_related_assets(parent_dir, config);
|
||||
section.serialized_assets = section.serialize_assets();
|
||||
|
||||
Ok(section)
|
||||
|
|
|
@ -85,8 +85,7 @@ impl Site {
|
|||
|
||||
let content_path = path.join("content");
|
||||
let static_path = path.join("static");
|
||||
let imageproc =
|
||||
imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);
|
||||
let imageproc = imageproc::Processor::new(path.to_path_buf(), &config);
|
||||
let output_path = path.join(config.output_dir.clone());
|
||||
|
||||
let site = Site {
|
||||
|
@ -152,9 +151,9 @@ impl Site {
|
|||
}
|
||||
|
||||
pub fn set_base_url(&mut self, base_url: String) {
|
||||
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (set_base_url)");
|
||||
imageproc.set_base_url(&base_url);
|
||||
self.config.base_url = base_url;
|
||||
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (set_base_url)");
|
||||
imageproc.set_base_url(&self.config);
|
||||
}
|
||||
|
||||
pub fn set_output_path<P: AsRef<Path>>(&mut self, path: P) {
|
||||
|
|
|
@ -21,11 +21,13 @@ pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> {
|
|||
vec![site.static_path.clone(), site.output_path.clone(), site.content_path.clone()],
|
||||
),
|
||||
);
|
||||
site.tera
|
||||
.register_function("resize_image", global_fns::ResizeImage::new(site.imageproc.clone()));
|
||||
site.tera.register_function(
|
||||
"resize_image",
|
||||
global_fns::ResizeImage::new(site.base_path.clone(), site.imageproc.clone()),
|
||||
);
|
||||
site.tera.register_function(
|
||||
"get_image_metadata",
|
||||
global_fns::GetImageMeta::new(site.content_path.clone()),
|
||||
global_fns::GetImageMetadata::new(site.base_path.clone()),
|
||||
);
|
||||
site.tera.register_function("load_data", global_fns::LoadData::new(site.base_path.clone()));
|
||||
site.tera.register_function("trans", global_fns::Trans::new(site.config.clone()));
|
||||
|
|
|
@ -11,7 +11,9 @@ lazy_static = "1"
|
|||
toml = "0.5"
|
||||
csv = "1"
|
||||
image = "0.23"
|
||||
serde_json = "1.0"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
serde_derive = "1"
|
||||
sha2 = "0.9"
|
||||
url = "2"
|
||||
nom-bibtex = "0.3"
|
||||
|
|
BIN
components/templates/gutenberg.jpg
Normal file
BIN
components/templates/gutenberg.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 47 KiB |
|
@ -4,16 +4,31 @@ use std::path::PathBuf;
|
|||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use image::GenericImageView;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use svg_metadata as svg;
|
||||
use tera::{from_value, to_value, Error, Function as TeraFn, Result, Value};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct ResizeImageResponse {
|
||||
/// The final URL for that asset
|
||||
url: String,
|
||||
/// The path to the static asset generated
|
||||
static_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResizeImage {
|
||||
/// The base path of the Zola site
|
||||
base_path: PathBuf,
|
||||
search_paths: [PathBuf; 2],
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
}
|
||||
|
||||
impl ResizeImage {
|
||||
pub fn new(imageproc: Arc<Mutex<imageproc::Processor>>) -> Self {
|
||||
Self { imageproc }
|
||||
pub fn new(base_path: PathBuf, imageproc: Arc<Mutex<imageproc::Processor>>) -> Self {
|
||||
let search_paths =
|
||||
[base_path.join("static").to_path_buf(), base_path.join("content").to_path_buf()];
|
||||
Self { base_path, imageproc, search_paths }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -22,7 +37,7 @@ static DEFAULT_FMT: &str = "auto";
|
|||
|
||||
impl TeraFn for ResizeImage {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
let mut path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`resize_image` requires a `path` argument with a string value"
|
||||
|
@ -53,45 +68,38 @@ impl TeraFn for ResizeImage {
|
|||
}
|
||||
|
||||
let mut imageproc = self.imageproc.lock().unwrap();
|
||||
if !imageproc.source_exists(&path) {
|
||||
if path.starts_with("@/") {
|
||||
path = path.replace("@/", "content/");
|
||||
}
|
||||
|
||||
let mut file_path = self.base_path.join(&path);
|
||||
let mut file_exists = file_path.exists();
|
||||
if !file_exists {
|
||||
// we need to search in both search folders now
|
||||
for dir in &self.search_paths {
|
||||
let p = dir.join(&path);
|
||||
if p.exists() {
|
||||
file_path = p;
|
||||
file_exists = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !file_exists {
|
||||
return Err(format!("`resize_image`: Cannot find path: {}", path).into());
|
||||
}
|
||||
|
||||
let imageop = imageproc::ImageOp::from_args(path, &op, width, height, &format, quality)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
let url = imageproc.insert(imageop);
|
||||
let imageop =
|
||||
imageproc::ImageOp::from_args(path, file_path, &op, width, height, &format, quality)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?;
|
||||
let (static_path, url) = imageproc.insert(imageop);
|
||||
|
||||
to_value(url).map_err(|err| err.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetImageMeta {
|
||||
content_path: PathBuf,
|
||||
}
|
||||
|
||||
impl GetImageMeta {
|
||||
pub fn new(content_path: PathBuf) -> Self {
|
||||
Self { content_path }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFn for GetImageMeta {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_image_metadata` requires a `path` argument with a string value"
|
||||
);
|
||||
let src_path = self.content_path.join(&path);
|
||||
if !src_path.exists() {
|
||||
return Err(format!("`get_image_metadata`: Cannot find path: {}", path).into());
|
||||
}
|
||||
let (height, width) = image_dimensions(&src_path)?;
|
||||
let mut map = tera::Map::new();
|
||||
map.insert(String::from("height"), Value::Number(tera::Number::from(height)));
|
||||
map.insert(String::from("width"), Value::Number(tera::Number::from(width)));
|
||||
Ok(Value::Object(map))
|
||||
to_value(ResizeImageResponse {
|
||||
static_path: static_path.to_string_lossy().into_owned(),
|
||||
url,
|
||||
})
|
||||
.map_err(|err| err.into())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -112,9 +120,163 @@ fn image_dimensions(path: &PathBuf) -> Result<(u32, u32)> {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct GetImageMetadata {
|
||||
/// The base path of the Zola site
|
||||
base_path: PathBuf,
|
||||
}
|
||||
|
||||
impl GetImageMetadata {
|
||||
pub fn new(base_path: PathBuf) -> Self {
|
||||
Self { base_path }
|
||||
}
|
||||
}
|
||||
|
||||
impl TeraFn for GetImageMetadata {
|
||||
fn call(&self, args: &HashMap<String, Value>) -> Result<Value> {
|
||||
let mut path = required_arg!(
|
||||
String,
|
||||
args.get("path"),
|
||||
"`get_image_metadata` requires a `path` argument with a string value"
|
||||
);
|
||||
if path.starts_with("@/") {
|
||||
path = path.replace("@/", "content/");
|
||||
}
|
||||
let src_path = self.base_path.join(&path);
|
||||
if !src_path.exists() {
|
||||
return Err(format!("`get_image_metadata`: Cannot find path: {}", path).into());
|
||||
}
|
||||
let (height, width) = image_dimensions(&src_path)?;
|
||||
let mut map = tera::Map::new();
|
||||
map.insert(String::from("height"), Value::Number(tera::Number::from(height)));
|
||||
map.insert(String::from("width"), Value::Number(tera::Number::from(width)));
|
||||
Ok(Value::Object(map))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use super::{GetImageMetadata, ResizeImage};
|
||||
|
||||
// TODO
|
||||
use std::collections::HashMap;
|
||||
use std::fs::{copy, create_dir_all};
|
||||
|
||||
use config::Config;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use tera::{to_value, Function};
|
||||
|
||||
fn create_dir_with_image() -> TempDir {
|
||||
let dir = tempdir().unwrap();
|
||||
create_dir_all(dir.path().join("content").join("gallery")).unwrap();
|
||||
create_dir_all(dir.path().join("static")).unwrap();
|
||||
copy("gutenberg.jpg", dir.path().join("content").join("gutenberg.jpg")).unwrap();
|
||||
copy("gutenberg.jpg", dir.path().join("content").join("gallery").join("asset.jpg"))
|
||||
.unwrap();
|
||||
copy("gutenberg.jpg", dir.path().join("static").join("gutenberg.jpg")).unwrap();
|
||||
dir
|
||||
}
|
||||
|
||||
// https://github.com/getzola/zola/issues/788
|
||||
// https://github.com/getzola/zola/issues/1035
|
||||
#[test]
|
||||
fn can_resize_image() {
|
||||
let dir = create_dir_with_image();
|
||||
let imageproc = imageproc::Processor::new(dir.path().to_path_buf(), &Config::default());
|
||||
|
||||
let static_fn = ResizeImage::new(dir.path().to_path_buf(), Arc::new(Mutex::new(imageproc)));
|
||||
let mut args = HashMap::new();
|
||||
args.insert("height".to_string(), to_value(40).unwrap());
|
||||
args.insert("width".to_string(), to_value(40).unwrap());
|
||||
|
||||
// hashing is stable based on filename and params so we can compare with hashes
|
||||
|
||||
// 1. resizing an image in static
|
||||
args.insert("path".to_string(), to_value("static/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value("static/processed_images/e49f5bd23ec5007c00.jpg").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/e49f5bd23ec5007c00.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 2. resizing an image in content with a relative path
|
||||
args.insert("path".to_string(), to_value("content/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value("static/processed_images/32454a1e0243976c00.jpg").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/32454a1e0243976c00.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 3. resizing an image in content starting with `@/`
|
||||
args.insert("path".to_string(), to_value("@/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value("static/processed_images/32454a1e0243976c00.jpg").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/32454a1e0243976c00.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 4. resizing an image with a relative path not starting with static or content
|
||||
args.insert("path".to_string(), to_value("gallery/asset.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(
|
||||
data["static_path"],
|
||||
to_value("static/processed_images/c8aaba7b0593a60b00.jpg").unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
data["url"],
|
||||
to_value("http://a-website.com/processed_images/c8aaba7b0593a60b00.jpg").unwrap()
|
||||
);
|
||||
|
||||
// 5. resizing with an absolute path
|
||||
args.insert("path".to_string(), to_value("/content/gutenberg.jpg").unwrap());
|
||||
assert!(static_fn.call(&args).is_err());
|
||||
}
|
||||
|
||||
// TODO: consider https://github.com/getzola/zola/issues/1161
|
||||
#[test]
|
||||
fn can_get_image_metadata() {
|
||||
let dir = create_dir_with_image();
|
||||
|
||||
let static_fn = GetImageMetadata::new(dir.path().to_path_buf());
|
||||
|
||||
// Let's test a few scenarii
|
||||
let mut args = HashMap::new();
|
||||
|
||||
// 1. a call to something in `static` with a relative path
|
||||
args.insert("path".to_string(), to_value("static/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
|
||||
// 2. a call to something in `static` with an absolute path is not handled currently
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("/static/gutenberg.jpg").unwrap());
|
||||
assert!(static_fn.call(&args).is_err());
|
||||
|
||||
// 3. a call to something in `content` with a relative path
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("content/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
|
||||
// 4. a call to something in `content` with a @/ path corresponds to
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/gutenberg.jpg").unwrap());
|
||||
let data = static_fn.call(&args).unwrap().as_object().unwrap().clone();
|
||||
assert_eq!(data["height"], to_value(380).unwrap());
|
||||
assert_eq!(data["width"], to_value(300).unwrap());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ mod load_data;
|
|||
|
||||
pub use self::content::{GetPage, GetSection, GetTaxonomy, GetTaxonomyUrl};
|
||||
pub use self::i18n::Trans;
|
||||
pub use self::images::{GetImageMeta, ResizeImage};
|
||||
pub use self::images::{GetImageMetadata, ResizeImage};
|
||||
pub use self::load_data::LoadData;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use filetime::{set_file_mtime, FileTime};
|
||||
use std::fs::{copy, create_dir_all, metadata, read_dir, File};
|
||||
use std::fs::{copy, create_dir_all, metadata, File};
|
||||
use std::io::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::SystemTime;
|
||||
|
@ -60,27 +60,6 @@ pub fn read_file(path: &Path) -> Result<String> {
|
|||
Ok(content)
|
||||
}
|
||||
|
||||
/// Looks into the current folder for the path and see if there's anything that is not a .md
|
||||
/// file. Those will be copied next to the rendered .html file
|
||||
pub fn find_related_assets(path: &Path) -> Vec<PathBuf> {
|
||||
let mut assets = vec![];
|
||||
|
||||
for entry in read_dir(path).unwrap().filter_map(std::result::Result::ok) {
|
||||
let entry_path = entry.path();
|
||||
if entry_path.is_file() {
|
||||
match entry_path.extension() {
|
||||
Some(e) => match e.to_str() {
|
||||
Some("md") => continue,
|
||||
_ => assets.push(entry_path.to_path_buf()),
|
||||
},
|
||||
None => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assets
|
||||
}
|
||||
|
||||
/// Copy a file but takes into account where to start the copy as
|
||||
/// there might be folders we need to create on the way.
|
||||
pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: bool) -> Result<()> {
|
||||
|
@ -204,25 +183,9 @@ mod tests {
|
|||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use tempfile::{tempdir, tempdir_in};
|
||||
use tempfile::tempdir_in;
|
||||
|
||||
use super::{copy_file, find_related_assets};
|
||||
|
||||
#[test]
|
||||
fn can_find_related_assets() {
|
||||
let tmp_dir = tempdir().expect("create temp dir");
|
||||
File::create(tmp_dir.path().join("index.md")).unwrap();
|
||||
File::create(tmp_dir.path().join("example.js")).unwrap();
|
||||
File::create(tmp_dir.path().join("graph.jpg")).unwrap();
|
||||
File::create(tmp_dir.path().join("fail.png")).unwrap();
|
||||
|
||||
let assets = find_related_assets(tmp_dir.path());
|
||||
assert_eq!(assets.len(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.extension().unwrap() != "md").count(), 3);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "example.js").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "graph.jpg").count(), 1);
|
||||
assert_eq!(assets.iter().filter(|p| p.file_name().unwrap() == "fail.png").count(), 1);
|
||||
}
|
||||
use super::copy_file;
|
||||
|
||||
#[test]
|
||||
fn test_copy_file_timestamp_preserved() {
|
||||
|
|
Loading…
Reference in a new issue