Extract sass code out of site/lib.rs

This commit is contained in:
Vincent Prouillet 2020-07-24 23:00:00 +02:00
parent d7a557f0b8
commit 9be7bc074d
4 changed files with 137 additions and 98 deletions

45
Cargo.lock generated
View file

@ -73,12 +73,6 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8aac770f1885fd7e387acedd76065302551364496e46b3dd00860b2f8359b9d"
[[package]]
name = "base64"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7"
[[package]]
name = "base64"
version = "0.12.3"
@ -830,9 +824,9 @@ dependencies = [
[[package]]
name = "hyper-rustls"
version = "0.20.0"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac965ea399ec3a25ac7d13b8affd4b8f39325cca00858ddf5eb29b79e6b14b08"
checksum = "37743cc83e8ee85eacfce90f2f4102030d9ff0a95244098d781e9bee4a90abb6"
dependencies = [
"bytes 0.5.6",
"futures-util",
@ -960,6 +954,12 @@ dependencies = [
"libc",
]
[[package]]
name = "ipnet"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47be2f14c678be2fdcab04ab1171db51b2762ce6f0a8ee87c8dd4a04ed216135"
[[package]]
name = "itoa"
version = "0.4.6"
@ -1527,7 +1527,7 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b336d94e8e4ce29bf15bba393164629764744c567e8ad306cc1fdd0119967fd"
dependencies = [
"base64 0.12.3",
"base64",
"chrono",
"indexmap",
"line-wrap",
@ -1780,11 +1780,11 @@ dependencies = [
[[package]]
name = "reqwest"
version = "0.10.6"
version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b82c9238b305f26f53443e3a4bc8528d64b8d0bee408ec949eb7bf5635ec680"
checksum = "12427a5577082c24419c9c417db35cfeb65962efc7675bb6b0d5f1f9d315bfe6"
dependencies = [
"base64 0.12.3",
"base64",
"bytes 0.5.6",
"encoding_rs",
"futures-core",
@ -1793,6 +1793,7 @@ dependencies = [
"http-body",
"hyper",
"hyper-rustls",
"ipnet",
"js-sys",
"lazy_static",
"log",
@ -1849,11 +1850,11 @@ dependencies = [
[[package]]
name = "rustls"
version = "0.17.0"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0d4a31f5d68413404705d6982529b0e11a9aacd4839d1d6222ee3b8cb4015e1"
checksum = "cac94b333ee2aac3284c5b8a1b7fb4dd11cba88c244e3fe33cdbd047af0eb693"
dependencies = [
"base64 0.11.0",
"base64",
"log",
"ring",
"sct",
@ -2193,7 +2194,7 @@ dependencies = [
name = "templates"
version = "0.1.0"
dependencies = [
"base64 0.12.3",
"base64",
"config",
"csv",
"errors",
@ -2226,9 +2227,9 @@ dependencies = [
[[package]]
name = "tera"
version = "1.3.1"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55df25c7768a0fb9f165931366eb0f21587c407061e1e69c1f5c2b495adfd9bb"
checksum = "e9598067511caa7edb41886c4a29efe6d0564926837bde7dffa4a130ea6cc975"
dependencies = [
"chrono",
"chrono-tz",
@ -2332,9 +2333,9 @@ dependencies = [
[[package]]
name = "tokio-rustls"
version = "0.13.1"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15cb62a0d2770787abc96e99c1cd98fcf17f94959f3af63ca85bdfb203f051b4"
checksum = "228139ddd4fea3fa345a29233009635235833e52807af7ea6448ead03890d6a9"
dependencies = [
"futures-core",
"rustls",
@ -2373,9 +2374,9 @@ checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860"
[[package]]
name = "tracing"
version = "0.1.16"
version = "0.1.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2e2a2de6b0d5cbb13fc21193a2296888eaab62b6044479aafb3c54c01c29fcd"
checksum = "dbdf4ccd1652592b01286a5dbe1e2a77d78afaa34beadd9872a5f7396f92aaa9"
dependencies = [
"cfg-if",
"log",

View file

@ -1,13 +1,13 @@
pub mod sass;
pub mod sitemap;
use std::collections::HashMap;
use std::fs::{copy, create_dir_all, remove_dir_all};
use std::fs::{copy, remove_dir_all};
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, RwLock};
use glob::glob;
use rayon::prelude::*;
use sass_rs::{compile_file, Options as SassOptions, OutputStyle};
use serde_derive::Serialize;
use tera::{Context, Tera};
@ -47,15 +47,19 @@ pub struct Site {
}
#[derive(Debug, Clone, PartialEq, Serialize)]
struct SerializedTaxonomyItem<'a> {
struct SerializedFeedTaxonomyItem<'a> {
name: &'a str,
slug: &'a str,
permalink: &'a str,
}
impl<'a> SerializedTaxonomyItem<'a> {
impl<'a> SerializedFeedTaxonomyItem<'a> {
pub fn from_item(item: &'a TaxonomyItem) -> Self {
SerializedTaxonomyItem { name: &item.name, slug: &item.slug, permalink: &item.permalink }
SerializedFeedTaxonomyItem {
name: &item.name,
slug: &item.slug,
permalink: &item.permalink,
}
}
}
@ -748,12 +752,12 @@ impl Site {
if let Some(ref theme) = self.config.theme {
let theme_path = self.base_path.join("themes").join(theme);
if theme_path.join("sass").exists() {
self.compile_sass(&theme_path)?;
sass::compile_sass(&theme_path, &self.output_path)?;
}
}
if self.config.compile_sass {
self.compile_sass(&self.base_path)?;
sass::compile_sass(&self.base_path, &self.output_path)?;
}
if self.config.build_search_index {
@ -840,71 +844,6 @@ impl Site {
Ok(())
}
pub fn compile_sass(&self, base_path: &Path) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let sass_path = {
let mut sass_path = PathBuf::from(base_path);
sass_path.push("sass");
sass_path
};
let mut options = SassOptions::default();
options.output_style = OutputStyle::Compressed;
let mut compiled_paths = self.compile_sass_glob(&sass_path, "scss", &options.clone())?;
options.indented_syntax = true;
compiled_paths.extend(self.compile_sass_glob(&sass_path, "sass", &options)?);
compiled_paths.sort();
for window in compiled_paths.windows(2) {
if window[0].1 == window[1].1 {
bail!(
"SASS path conflict: \"{}\" and \"{}\" both compile to \"{}\"",
window[0].0.display(),
window[1].0.display(),
window[0].1.display(),
);
}
}
Ok(())
}
fn compile_sass_glob(
&self,
sass_path: &Path,
extension: &str,
options: &SassOptions,
) -> Result<Vec<(PathBuf, PathBuf)>> {
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension);
let files = glob(&glob_string)
.expect("Invalid glob for sass")
.filter_map(|e| e.ok())
.filter(|entry| {
!entry.as_path().components().any(|c| c.as_os_str().to_string_lossy().starts_with('_'))
})
.collect::<Vec<_>>();
let mut compiled_paths = Vec::new();
for file in files {
let css = compile_file(&file, options.clone())?;
let path_inside_sass = file.strip_prefix(&sass_path).unwrap();
let parent_inside_sass = path_inside_sass.parent();
let css_output_path = self.output_path.join(path_inside_sass).with_extension("css");
if parent_inside_sass.is_some() {
create_dir_all(&css_output_path.parent().unwrap())?;
}
create_file(&css_output_path, &css)?;
compiled_paths.push((path_inside_sass.to_owned(), css_output_path));
}
Ok(compiled_paths)
}
fn render_alias(&self, alias: &str, permalink: &str) -> Result<()> {
let mut output_path = self.output_path.to_path_buf();
let mut split = alias.split('/').collect::<Vec<_>>();
@ -1138,7 +1077,7 @@ impl Site {
if let Some((taxonomy, item)) = taxonomy_and_item {
context.insert("taxonomy", taxonomy);
context.insert("term", &SerializedTaxonomyItem::from_item(item));
context.insert("term", &SerializedFeedTaxonomyItem::from_item(item));
}
let feed = &render_template(feed_filename, &self.tera, context, &self.config.theme)?;
@ -1292,3 +1231,5 @@ impl Site {
.collect::<Result<()>>()
}
}
impl Site {}

View file

@ -0,0 +1,73 @@
use std::fs::create_dir_all;
use std::path::{Path, PathBuf};
use glob::glob;
use sass_rs::{compile_file, Options, OutputStyle};
use errors::{bail, Result};
use utils::fs::{create_file, ensure_directory_exists};
pub fn compile_sass(base_path: &Path, output_path: &Path) -> Result<()> {
ensure_directory_exists(&output_path)?;
let sass_path = {
let mut sass_path = PathBuf::from(base_path);
sass_path.push("sass");
sass_path
};
let mut options = Options::default();
options.output_style = OutputStyle::Compressed;
let mut compiled_paths = compile_sass_glob(&sass_path, output_path, "scss", &options.clone())?;
options.indented_syntax = true;
compiled_paths.extend(compile_sass_glob(&sass_path, output_path, "sass", &options)?);
compiled_paths.sort();
for window in compiled_paths.windows(2) {
if window[0].1 == window[1].1 {
bail!(
"SASS path conflict: \"{}\" and \"{}\" both compile to \"{}\"",
window[0].0.display(),
window[1].0.display(),
window[0].1.display(),
);
}
}
Ok(())
}
fn compile_sass_glob(
sass_path: &Path,
output_path: &Path,
extension: &str,
options: &Options,
) -> Result<Vec<(PathBuf, PathBuf)>> {
let glob_string = format!("{}/**/*.{}", sass_path.display(), extension);
let files = glob(&glob_string)
.expect("Invalid glob for sass")
.filter_map(|e| e.ok())
.filter(|entry| {
!entry.as_path().components().any(|c| c.as_os_str().to_string_lossy().starts_with('_'))
})
.collect::<Vec<_>>();
let mut compiled_paths = Vec::new();
for file in files {
let css = compile_file(&file, options.clone())?;
let path_inside_sass = file.strip_prefix(&sass_path).unwrap();
let parent_inside_sass = path_inside_sass.parent();
let css_output_path = output_path.join(path_inside_sass).with_extension("css");
if parent_inside_sass.is_some() {
create_dir_all(&css_output_path.parent().unwrap())?;
}
create_file(&css_output_path, &css)?;
compiled_paths.push((path_inside_sass.to_owned(), css_output_path));
}
Ok(compiled_paths)
}

View file

@ -1,4 +1,4 @@
// Contains an embedded version of livereload-js 3.2.1
// Contains an embedded version of livereload-js 3.2.4
//
// Copyright (c) 2010-2012 Andrey Tarantsov
//
@ -21,6 +21,7 @@
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use std::collections::HashMap;
use std::env;
use std::fs::{read_dir, remove_dir_all};
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
@ -32,6 +33,7 @@ use hyper::header;
use hyper::service::{make_service_fn, service_fn};
use hyper::{Body, Method, Request, Response, Server, StatusCode};
use hyper_staticfile::ResolveResult;
use lazy_static::lazy_static;
use tokio::io::AsyncReadExt;
use chrono::prelude::*;
@ -41,6 +43,7 @@ use ws::{Message, Sender, WebSocket};
use errors::{Error as ZolaError, Result};
use globset::GlobSet;
use site::Site;
use site::sass::compile_sass;
use utils::fs::copy_file;
use crate::console;
@ -62,9 +65,18 @@ static NOT_FOUND_TEXT: &[u8] = b"Not Found";
// This is dist/livereload.min.js from the LiveReload.js v3.2.4 release
const LIVE_RELOAD: &str = include_str!("livereload.js");
lazy_static! {
pub static ref SITE_DATA: HashMap<String, String> = {
let mut m = HashMap::new();
m.insert("/hello".to_string(), "ho".to_string());
m
};
}
async fn handle_request(req: Request<Body>, root: PathBuf) -> Result<Response<Body>> {
let path = req.uri().path();
// livereload.js is served using the LIVE_RELOAD str, not a file
if req.uri().path() == "/livereload.js" {
if path == "/livereload.js" {
if req.method() == Method::GET {
return Ok(livereload_js());
} else {
@ -72,6 +84,10 @@ async fn handle_request(req: Request<Body>, root: PathBuf) -> Result<Response<Bo
}
}
if let Some(content) = SITE_DATA.get(path) {
return Ok(in_memory_html(content));
}
let result = hyper_staticfile::resolve(&root, &req).await.unwrap();
match result {
ResolveResult::MethodNotMatched => return Ok(method_not_allowed()),
@ -92,6 +108,14 @@ fn livereload_js() -> Response<Body> {
.expect("Could not build livereload.js response")
}
fn in_memory_html(content: &str) -> Response<Body> {
Response::builder()
.header(header::CONTENT_TYPE, "text/html")
.status(StatusCode::OK)
.body(content.to_owned().into())
.expect("Could not build HTML response")
}
fn internal_server_error() -> Response<Body> {
Response::builder()
.header(header::CONTENT_TYPE, "text/plain")
@ -371,7 +395,7 @@ pub fn serve(
console::info(&msg);
rebuild_done_handling(
&broadcaster,
site.compile_sass(&site.base_path),
compile_sass(&site.base_path, &site.output_path),
&partial_path.to_string_lossy(),
);
};