commit
3dcc080f9d
|
@ -1,5 +1,14 @@
|
|||
# Changelog
|
||||
|
||||
## 0.14.1 (2021-08-24)
|
||||
|
||||
- HTML minification now respects HTML spec (it still worked before because browsers can handle invalid HTML well and minifiers take advantage of it)
|
||||
- Show all errors on `zola serve`
|
||||
- `zola serve` now properly returns a 404
|
||||
- Fix `zola serve` having issues with config files in separate dir
|
||||
- Fix code blocks content not being escaped when not using syntax highlighting
|
||||
- Add missing `draft` attribute to the `section` variable in templates
|
||||
|
||||
## 0.14.0 (2021-07-19)
|
||||
|
||||
### Breaking
|
||||
|
|
396
Cargo.lock
generated
396
Cargo.lock
generated
|
@ -118,9 +118,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.2.1"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
|
@ -190,9 +190,9 @@ checksum = "72feb31ffc86498dacdbd0fcebb56138e7177a8cc5cea4516031d15ae85a742e"
|
|||
|
||||
[[package]]
|
||||
name = "bytemuck"
|
||||
version = "1.7.0"
|
||||
version = "1.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9966d2ab714d0f785dbac0a0396251a35280aeb42413281617d0209ab4898435"
|
||||
checksum = "72957246c41db82b8ef88a5486143830adeb8227ef9837740bdec67724cf2c5b"
|
||||
|
||||
[[package]]
|
||||
name = "byteorder"
|
||||
|
@ -318,6 +318,22 @@ dependencies = [
|
|||
"utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0a89e2ae426ea83155dccf10c0fa6b1463ef6d5fcb44cee0b224a408fa640a62"
|
||||
dependencies = [
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation-sys"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ea221b5284a47e40033bf9b66f35f984ec0ea2931eb03505246cd27a963f981b"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.1.5"
|
||||
|
@ -348,9 +364,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "crossbeam-deque"
|
||||
version = "0.8.0"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94af6efb46fef72616855b036a624cf27ba656ffc9be1b9a3c931cfc7749a9a9"
|
||||
checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"crossbeam-epoch",
|
||||
|
@ -404,9 +420,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ctrlc"
|
||||
version = "3.1.9"
|
||||
version = "3.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "232295399409a8b7ae41276757b5a1cc21032848d42bff2352261f958b3ca29a"
|
||||
checksum = "377c9b002a72a0b2c1a18c62e2f3864bdfea4a015e3683a96e24aa45dd6c02d1"
|
||||
dependencies = [
|
||||
"nix",
|
||||
"winapi 0.3.9",
|
||||
|
@ -579,9 +595,9 @@ checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
|
|||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.14"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d34cfa13a63ae058bfa601fe9e313bbdb3746427c1459185464ce0fcf62e1e8"
|
||||
checksum = "975ccf83d8d9d0d84682850a38c8169027be83368805971cc4f238c2b245bc98"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"libc",
|
||||
|
@ -607,6 +623,21 @@ version = "1.0.7"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
|
||||
|
||||
[[package]]
|
||||
name = "foreign-types"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
|
||||
dependencies = [
|
||||
"foreign-types-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "foreign-types-shared"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.0.1"
|
||||
|
@ -681,30 +712,30 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-channel"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e682a68b29a882df0545c143dc3646daefe80ba479bcdede94d5a703de2871e2"
|
||||
checksum = "74ed2411805f6e4e3d9bc904c95d5d423b89b3b25dc0250aa74729de20629ff9"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "futures-core"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0402f765d8a89a26043b889b26ce3c4679d268fa6bb22cd7c6aad98340e179d1"
|
||||
checksum = "af51b1b4a7fdff033703db39de8802c673eb91855f2e0d47dcf3bf2c0ef01f99"
|
||||
|
||||
[[package]]
|
||||
name = "futures-io"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "acc499defb3b348f8d8f3f66415835a9131856ff7714bf10dadfc4ec4bdb29a1"
|
||||
checksum = "0b0e06c393068f3a6ef246c75cdca793d6a46347e75286933e5e75fd2fd11582"
|
||||
|
||||
[[package]]
|
||||
name = "futures-macro"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4c40298486cdf52cc00cd6d6987892ba502c7656a16a4192a9992b1ccedd121"
|
||||
checksum = "c54913bae956fb8df7f4dc6fc90362aa72e69148e3f39041fbe8742d21e0ac57"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"proc-macro-hack",
|
||||
|
@ -715,21 +746,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "futures-sink"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a57bead0ceff0d6dde8f465ecd96c9338121bb7717d3e7b108059531870c4282"
|
||||
checksum = "c0f30aaa67363d119812743aa5f33c201a7a66329f97d1a887022971feea4b53"
|
||||
|
||||
[[package]]
|
||||
name = "futures-task"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a16bef9fc1a4dddb5bee51c989e3fbba26569cbb0e31f5b303c184e3dd33dae"
|
||||
checksum = "bbe54a98670017f3be909561f6ad13e810d9a51f3f061b902062ca3da80799f2"
|
||||
|
||||
[[package]]
|
||||
name = "futures-util"
|
||||
version = "0.3.15"
|
||||
version = "0.3.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "feb5c238d27e2bf94ffdfd27b2c29e3df4a68c4193bb6427384259e2bf191967"
|
||||
checksum = "67eb846bfd58e44a8481a00049e82c43e0ccb5d61f8dc071057cb19249dd4d78"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"futures-core",
|
||||
|
@ -787,11 +818,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "gh-emoji"
|
||||
version = "1.0.3"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a17a050b7eb420553344e1cf1db648e8b584c79e98b74e6e6d119eeedd9ddcbc"
|
||||
checksum = "d6af39cf9a679d7195b3370f5454381ba49c4791bc7ce3ae2a7bf1a2a89c7adf"
|
||||
dependencies = [
|
||||
"phf",
|
||||
"phf 0.10.0",
|
||||
"regex",
|
||||
]
|
||||
|
||||
|
@ -837,9 +868,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.3.3"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "825343c4eef0b63f541f8903f395dc5beb362a979b5799a84062527ef1e37726"
|
||||
checksum = "d7f3675cfef6a30c8031cf9e6493ebdc3bb3272a3fea3923c4210d1830e6a472"
|
||||
dependencies = [
|
||||
"bytes 1.0.1",
|
||||
"fnv",
|
||||
|
@ -915,9 +946,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "http-body"
|
||||
version = "0.4.2"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60daa14be0e0786db0f03a9e57cb404c9d756eed2b6c62b9ea98ec5743ec75a9"
|
||||
checksum = "399c583b2979440c60be0821a6199eca73bc3c8dcd9d070d75ac726e2c6186e5"
|
||||
dependencies = [
|
||||
"bytes 1.0.1",
|
||||
"http",
|
||||
|
@ -926,9 +957,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "httparse"
|
||||
version = "1.4.1"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f3a87b616e37e93c22fb19bcd386f02f3af5ea98a25670ad0fce773de23c5e68"
|
||||
checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503"
|
||||
|
||||
[[package]]
|
||||
name = "httpdate"
|
||||
|
@ -944,9 +975,9 @@ checksum = "02296996cb8796d7c6e3bc2d9211b7802812d36999a51bb754123ead7d37d026"
|
|||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.14.10"
|
||||
version = "0.14.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7728a72c4c7d72665fde02204bcbd93b247721025b222ef78606f14513e0fd03"
|
||||
checksum = "0b61cf2d1aebcf6e6352c97b81dc2244ca29194be1b276f5d8ad5c6330fffb11"
|
||||
dependencies = [
|
||||
"bytes 1.0.1",
|
||||
"futures-channel",
|
||||
|
@ -981,6 +1012,19 @@ dependencies = [
|
|||
"webpki",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-tls"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
|
||||
dependencies = [
|
||||
"bytes 1.0.1",
|
||||
"hyper",
|
||||
"native-tls",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "0.2.3"
|
||||
|
@ -1095,9 +1139,9 @@ checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9"
|
|||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "0.4.7"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
|
||||
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
|
||||
|
||||
[[package]]
|
||||
name = "jieba-rs"
|
||||
|
@ -1108,16 +1152,16 @@ dependencies = [
|
|||
"cedarwood",
|
||||
"hashbrown 0.8.2",
|
||||
"lazy_static",
|
||||
"phf",
|
||||
"phf 0.8.0",
|
||||
"phf_codegen",
|
||||
"regex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jobserver"
|
||||
version = "0.1.22"
|
||||
version = "0.1.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "972f5ae5d1cb9c6ae417789196c803205313edde988685da5e3aae0827b9e7fd"
|
||||
checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
@ -1133,9 +1177,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "js-sys"
|
||||
version = "0.3.51"
|
||||
version = "0.3.53"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83bdfbace3a0e81a4253f73b49e960b053e396a11012cbd49b9b74d6a2b67062"
|
||||
checksum = "e4bf49d50e2961077d9c99f4b7997d770a1114f087c3c2e0069b36c13fc2979d"
|
||||
dependencies = [
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
@ -1192,9 +1236,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.98"
|
||||
version = "0.2.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "320cfe77175da3a483efed4bc0adc1968ca050b098ce4f2f1c13a56626128790"
|
||||
checksum = "a1fa8cddc8fbbee11227ef194b5317ed014b8acbf15139bd716a18ad3fe99ec5"
|
||||
|
||||
[[package]]
|
||||
name = "library"
|
||||
|
@ -1346,7 +1390,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd"
|
||||
dependencies = [
|
||||
"log",
|
||||
"phf",
|
||||
"phf 0.8.0",
|
||||
"phf_codegen",
|
||||
"string_cache",
|
||||
"string_cache_codegen",
|
||||
|
@ -1367,15 +1411,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "matches"
|
||||
version = "0.1.8"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
|
||||
checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.4.0"
|
||||
version = "2.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b16bd47d9e329435e309c58469fe0791c2d0d1ba96ec0954152a5ae2b04387dc"
|
||||
checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
|
||||
|
||||
[[package]]
|
||||
name = "memoffset"
|
||||
|
@ -1404,9 +1448,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "minify-html"
|
||||
version = "0.4.11"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee59033f9253bab9ec605e58b6a9f9ac22548346cbc88927194a7f56527b541b"
|
||||
checksum = "af97b0ff1a328e6877ad03266f51d0ee9d8b8b31aa05da6566dc1ca4ed921f0f"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"lazy_static",
|
||||
|
@ -1515,6 +1559,24 @@ dependencies = [
|
|||
"serde_urlencoded",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "native-tls"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48ba9f7719b5a0f42f338907614285fb5fd70e53858141f69898a1fb7203b24d"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"libc",
|
||||
"log",
|
||||
"openssl",
|
||||
"openssl-probe",
|
||||
"openssl-sys",
|
||||
"schannel",
|
||||
"security-framework",
|
||||
"security-framework-sys",
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "net2"
|
||||
version = "0.2.37"
|
||||
|
@ -1534,14 +1596,15 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
|
|||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.20.0"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa9b4819da1bc61c0ea48b63b7bc8604064dd43013e7cc325df098d49cd7c18a"
|
||||
checksum = "cf1e25ee6b412c2a1e3fcb6a4499a5c1bfe7f43e014bdce9a6b6666e5aa2d187"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cc",
|
||||
"cfg-if 1.0.0",
|
||||
"libc",
|
||||
"memoffset",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1747,14 +1810,47 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
|
|||
|
||||
[[package]]
|
||||
name = "open"
|
||||
version = "1.7.1"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dcea7a30d6b81a2423cc59c43554880feff7b57d12916f231a79f8d6d9470201"
|
||||
checksum = "b46b233de7d83bc167fe43ae2dda3b5b84e80e09cceba581e4decb958a4896bf"
|
||||
dependencies = [
|
||||
"pathdiff",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openssl"
|
||||
version = "0.10.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d9facdb76fec0b73c406f125d44d86fdad818d66fef0531eec9233ca425ff4a"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cfg-if 1.0.0",
|
||||
"foreign-types",
|
||||
"libc",
|
||||
"once_cell",
|
||||
"openssl-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openssl-probe"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a"
|
||||
|
||||
[[package]]
|
||||
name = "openssl-sys"
|
||||
version = "0.9.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1996d2d305e561b70d1ee0c53f1542833f4e1ac6ce9a6708b6ff2738ca67dc82"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse-zoneinfo"
|
||||
version = "0.3.0"
|
||||
|
@ -1831,7 +1927,16 @@ version = "0.8.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12"
|
||||
dependencies = [
|
||||
"phf_shared",
|
||||
"phf_shared 0.8.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9fc3db1018c4b59d7d582a739436478b6035138b6aecbce989fc91c3e98409f"
|
||||
dependencies = [
|
||||
"phf_shared 0.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1841,7 +1946,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815"
|
||||
dependencies = [
|
||||
"phf_generator",
|
||||
"phf_shared",
|
||||
"phf_shared 0.8.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1850,7 +1955,7 @@ version = "0.8.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526"
|
||||
dependencies = [
|
||||
"phf_shared",
|
||||
"phf_shared 0.8.0",
|
||||
"rand 0.7.3",
|
||||
]
|
||||
|
||||
|
@ -1863,6 +1968,15 @@ dependencies = [
|
|||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "phf_shared"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
|
||||
dependencies = [
|
||||
"siphasher",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.7"
|
||||
|
@ -1883,9 +1997,9 @@ checksum = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c"
|
|||
|
||||
[[package]]
|
||||
name = "plist"
|
||||
version = "1.1.0"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "679104537029ed2287c216bfb942bbf723f48ee98f0aef15611634173a74ef21"
|
||||
checksum = "a38d026d73eeaf2ade76309d0c65db5a35ecf649e3cec428db316243ea9d6711"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"chrono",
|
||||
|
@ -1933,9 +2047,9 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086"
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.27"
|
||||
version = "1.0.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0d8caf72986c1a598726adc988bb5984792ef84f5ee5aa50209145ee8077038"
|
||||
checksum = "5c7ed8b8c7b886ea3ed7dde405212185f423ab44682667c8c6dd14aa1d9f6612"
|
||||
dependencies = [
|
||||
"unicode-xid",
|
||||
]
|
||||
|
@ -2084,9 +2198,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.9"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ab49abadf3f9e1c4bc499e8845e152ad87d2ad2d30371841171169e9d75feee"
|
||||
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
]
|
||||
|
@ -2122,9 +2236,9 @@ checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
|
|||
|
||||
[[package]]
|
||||
name = "relative-path"
|
||||
version = "1.4.0"
|
||||
version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a479d53d7eed831f3c92ca79c61002d5987e21417d528296832f802bca532380"
|
||||
checksum = "f9629de8974fd69c97684736786b807edd3da456d3e3f95341dd9d4cbd8f5ad6"
|
||||
|
||||
[[package]]
|
||||
name = "remove_dir_all"
|
||||
|
@ -2172,17 +2286,20 @@ dependencies = [
|
|||
"http-body",
|
||||
"hyper",
|
||||
"hyper-rustls",
|
||||
"hyper-tls",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"mime",
|
||||
"native-tls",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustls",
|
||||
"serde",
|
||||
"serde_urlencoded",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
"tokio-rustls",
|
||||
"url",
|
||||
"wasm-bindgen",
|
||||
|
@ -2282,6 +2399,16 @@ dependencies = [
|
|||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schannel"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scoped_threadpool"
|
||||
version = "0.1.9"
|
||||
|
@ -2317,19 +2444,42 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.126"
|
||||
name = "security-framework"
|
||||
version = "2.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec7505abeacaec74ae4778d9d9328fe5a5d04253220a85c4ee022239fc996d03"
|
||||
checksum = "23a2ac85147a3a11d77ecf1bc7166ec0b92febfa4461c37944e180f319ece467"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"core-foundation",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
"security-framework-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "security-framework-sys"
|
||||
version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e4effb91b4b8b6fb7732e670b6cee160278ff8e6bf485c7805d9e319d76e284"
|
||||
dependencies = [
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.129"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d1f72836d2aa753853178eda473a3b9d8e4eefdaf20523b919677e6de489f8f1"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.126"
|
||||
version = "1.0.129"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "963a7dbc9895aeac7ac90e74f34a5d5261828f79df35cbed41e10189d3804d43"
|
||||
checksum = "e57ae87ad533d9a56427558b516d0adac283614e347abf85b0dc0cbbf0a249f3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -2338,9 +2488,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.64"
|
||||
version = "1.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "799e97dc9fdae36a5c8b8f2cae9ce2ee9fdce2058c57a93e6099d919fd982f79"
|
||||
checksum = "336b10da19a12ad094b59d870ebde26a45402e5b470add4b5fd03c5048a32127"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"itoa",
|
||||
|
@ -2362,12 +2512,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.8.17"
|
||||
version = "0.8.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15654ed4ab61726bf918a39cb8d98a2e2995b002387807fa6ba58fdf7f59bb23"
|
||||
checksum = "6375dbd828ed6964c3748e4ef6d18e7a175d408ffe184bca01698d0c73f915a9"
|
||||
dependencies = [
|
||||
"dtoa",
|
||||
"linked-hash-map",
|
||||
"indexmap",
|
||||
"serde",
|
||||
"yaml-rust",
|
||||
]
|
||||
|
@ -2399,9 +2549,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "siphasher"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbce6d4507c7e4a3962091436e56e95290cb71fa302d0d270e32130b75fbff27"
|
||||
checksum = "729a25c17d72b06c68cb47955d44fda88ad2d3e7d77e025663fdd69b93dd71a1"
|
||||
|
||||
[[package]]
|
||||
name = "site"
|
||||
|
@ -2433,9 +2583,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "slab"
|
||||
version = "0.4.3"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f173ac3d1a7e3b28003f40de0b5ce7fe2710f9b9dc3fc38664cebee46b3b6527"
|
||||
checksum = "c307a32c1c5c437f38c7fd45d753050587732ba8628319fbdf12a7e289ccc590"
|
||||
|
||||
[[package]]
|
||||
name = "slotmap"
|
||||
|
@ -2463,9 +2613,9 @@ checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
|
|||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.4.0"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e3dfc207c526015c632472a77be09cf1b6e46866581aecae5cc38fb4235dea2"
|
||||
checksum = "765f090f0e423d2b55843402a07915add955e7d60657db13707a159727326cad"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"winapi 0.3.9",
|
||||
|
@ -2491,7 +2641,7 @@ checksum = "8ddb1139b5353f96e429e1a5e19fbaf663bddedaa06d1dbd49f82e352601209a"
|
|||
dependencies = [
|
||||
"lazy_static",
|
||||
"new_debug_unreachable",
|
||||
"phf_shared",
|
||||
"phf_shared 0.8.0",
|
||||
"precomputed-hash",
|
||||
"serde",
|
||||
]
|
||||
|
@ -2503,7 +2653,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97"
|
||||
dependencies = [
|
||||
"phf_generator",
|
||||
"phf_shared",
|
||||
"phf_shared 0.8.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
]
|
||||
|
@ -2546,9 +2696,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.73"
|
||||
version = "1.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f71489ff30030d2ae598524f61326b902466f72a0fb1a8564c001cc63425bcc7"
|
||||
checksum = "b7f58f7e8eaa0009c5fec437aabf511bd9933e4b2d7407bd05273c01a8906ea7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -2557,8 +2707,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "syntect"
|
||||
version = "5.0.0"
|
||||
source = "git+https://github.com/Keats/syntect.git?branch=scopestack#6b36f5eb406d57e57ddb6eb51df3a5e36e52c955"
|
||||
version = "4.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b20815bbe80ee0be06e6957450a841185fcf690fe0178f14d77a05ce2caa031"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"bitflags",
|
||||
|
@ -2719,9 +2870,9 @@ checksum = "29738eedb4388d9ea620eeab9384884fc3f06f586a2eddb56bedc5885126c7c1"
|
|||
|
||||
[[package]]
|
||||
name = "tinyvec"
|
||||
version = "1.2.0"
|
||||
version = "1.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b5220f05bb7de7f3f53c7c065e1199b3172696fe2db9f9c4d8ad9b4ee74c342"
|
||||
checksum = "848a1e1181b9f6753b5e96a092749e29b11d19ede67dfbbd6c7dc7e0f49b5338"
|
||||
dependencies = [
|
||||
"tinyvec_macros",
|
||||
]
|
||||
|
@ -2734,9 +2885,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
|
|||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.8.1"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98c8b05dc14c75ea83d63dd391100353789f5f24b8b3866542a5e85c8be8e985"
|
||||
checksum = "01cf844b23c6131f624accf65ce0e4e9956a8bb329400ea5bcc26ae3a5c20b0b"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"bytes 1.0.1",
|
||||
|
@ -2748,6 +2899,16 @@ dependencies = [
|
|||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-native-tls"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7d995660bd2b7f8c1568414c1126076c13fbb725c40112dc0120b78eb9b717b"
|
||||
dependencies = [
|
||||
"native-tls",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-rustls"
|
||||
version = "0.22.0"
|
||||
|
@ -2801,9 +2962,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a9ff14f98b1a4b289c6248a023c1c2fa1491062964e9fed67ab29c4e4da4a052"
|
||||
checksum = "2ca517f43f0fb96e0c3072ed5c275fe5eece87e8cb52f4a77b69226d3b1c9df8"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
@ -2905,12 +3066,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eeb8be209bb1c96b7c177c7420d26e04eccacb0eeae6b980e35fcb74678107e0"
|
||||
dependencies = [
|
||||
"matches",
|
||||
]
|
||||
checksum = "246f4c42e67e7a4e3c6106ff716a5d067d4132a642840b242e357e468a2a0085"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
|
@ -2986,6 +3144,12 @@ dependencies = [
|
|||
"walkdir",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
version = "0.8.2"
|
||||
|
@ -3033,9 +3197,9 @@ checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen"
|
||||
version = "0.2.74"
|
||||
version = "0.2.76"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d54ee1d4ed486f78874278e63e4069fc1ab9f6a18ca492076ffb90c5eb2997fd"
|
||||
checksum = "8ce9b1b516211d33767048e5d47fa2a381ed8b76fc48d2ce4aa39877f9f183e0"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"serde",
|
||||
|
@ -3045,9 +3209,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-backend"
|
||||
version = "0.2.74"
|
||||
version = "0.2.76"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b33f6a0694ccfea53d94db8b2ed1c3a8a4c86dd936b13b9f0a15ec4a451b900"
|
||||
checksum = "cfe8dc78e2326ba5f845f4b5bf548401604fa20b1dd1d365fb73b6c1d6364041"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"lazy_static",
|
||||
|
@ -3060,9 +3224,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-futures"
|
||||
version = "0.4.24"
|
||||
version = "0.4.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5fba7978c679d53ce2d0ac80c8c175840feb849a161664365d1287b41f2e67f1"
|
||||
checksum = "95fded345a6559c2cfee778d562300c581f7d4ff3edb9b0d230d69800d213972"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"js-sys",
|
||||
|
@ -3072,9 +3236,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro"
|
||||
version = "0.2.74"
|
||||
version = "0.2.76"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "088169ca61430fe1e58b8096c24975251700e7b1f6fd91cc9d59b04fb9b18bd4"
|
||||
checksum = "44468aa53335841d9d6b6c023eaab07c0cd4bddbcfdee3e2bb1e8d2cb8069fef"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"wasm-bindgen-macro-support",
|
||||
|
@ -3082,9 +3246,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-macro-support"
|
||||
version = "0.2.74"
|
||||
version = "0.2.76"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "be2241542ff3d9f241f5e2cb6dd09b37efe786df8851c54957683a49f0987a97"
|
||||
checksum = "0195807922713af1e67dc66132c7328206ed9766af3858164fb583eedc25fbad"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -3095,15 +3259,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasm-bindgen-shared"
|
||||
version = "0.2.74"
|
||||
version = "0.2.76"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d7cff876b8f18eed75a66cf49b65e7f967cb354a7aa16003fb55dbfd25b44b4f"
|
||||
checksum = "acdb075a845574a1fa5f09fd77e43f7747599301ea3417a9fbffdeedfc1f4a29"
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
version = "0.3.51"
|
||||
version = "0.3.53"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e828417b379f3df7111d3a2a9e5753706cae29c41f7c4029ee9fd77f3e09e582"
|
||||
checksum = "224b2f6b67919060055ef1a67807367c2066ed520c3862cc013d26cf893a783c"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"wasm-bindgen",
|
||||
|
@ -3226,9 +3390,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "xml-rs"
|
||||
version = "0.8.3"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b07db065a5cf61a7e4ba64f29e67db906fb1787316516c4e6e5ff0fea1efcd8a"
|
||||
checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3"
|
||||
|
||||
[[package]]
|
||||
name = "xml5ever"
|
||||
|
@ -3259,7 +3423,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "zola"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
dependencies = [
|
||||
"atty",
|
||||
"chrono",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "zola"
|
||||
version = "0.14.0"
|
||||
version = "0.14.1"
|
||||
authors = ["Vincent Prouillet <hello@vincentprouillet.com>"]
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
|
@ -33,7 +33,7 @@ percent-encoding = "2"
|
|||
notify = "4"
|
||||
ws = "0.9"
|
||||
ctrlc = "3"
|
||||
open = "1.2"
|
||||
open = "2"
|
||||
globset = "0.4"
|
||||
relative-path = "1"
|
||||
serde_json = "1.0"
|
||||
|
@ -49,6 +49,11 @@ search = { path = "components/search" }
|
|||
[dev-dependencies]
|
||||
same-file = "1"
|
||||
|
||||
[features]
|
||||
default = ["rust-tls"]
|
||||
rust-tls = ["site/rust-tls"]
|
||||
native-tls = ["site/native-tls"]
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"components/config",
|
||||
|
|
|
@ -36,8 +36,10 @@ stages:
|
|||
echo "##vso[task.setvariable variable=PATH;]%PATH%;%USERPROFILE%\.cargo\bin"
|
||||
displayName: Windows install rust
|
||||
condition: eq( variables['Agent.OS'], 'Windows_NT' )
|
||||
- script: cargo build --all --no-default-features --features=native-tls && cargo clean
|
||||
displayName: Cargo build (Native TLS)
|
||||
- script: cargo build --all
|
||||
displayName: Cargo build
|
||||
displayName: Cargo build (Rust TLS)
|
||||
- script: cargo test --all
|
||||
displayName: Cargo test
|
||||
|
||||
|
|
|
@ -12,8 +12,7 @@ serde_derive = "1"
|
|||
chrono = "0.4"
|
||||
globset = "0.4"
|
||||
lazy_static = "1"
|
||||
# TODO: go back to version 4/5 once https://github.com/trishume/syntect/pull/337 is merged
|
||||
syntect = { git = "https://github.com/Keats/syntect.git", branch = "scopestack" }
|
||||
syntect = "4"
|
||||
unic-langid = "0.9"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
|
|
|
@ -38,7 +38,7 @@ fn main() {
|
|||
};
|
||||
|
||||
// and then the ones we add
|
||||
let mut extra = base_path.clone();
|
||||
let mut extra = base_path;
|
||||
extra.push("extra");
|
||||
match builder.add_from_folder(&extra, true) {
|
||||
Ok(_) => (),
|
||||
|
@ -60,7 +60,7 @@ fn main() {
|
|||
.or_insert_with(|| HashSet::from_iter(s.file_extensions.iter().cloned()));
|
||||
}
|
||||
let mut keys = syntaxes.keys().collect::<Vec<_>>();
|
||||
keys.sort_by(|a, b| a.to_lowercase().cmp(&b.to_lowercase()));
|
||||
keys.sort_by_key(|&a| a.to_lowercase());
|
||||
for k in keys {
|
||||
if !syntaxes[k].is_empty() {
|
||||
let mut extensions_sorted = syntaxes[k].iter().cloned().collect::<Vec<_>>();
|
||||
|
|
|
@ -117,18 +117,18 @@ impl Config {
|
|||
bail!("A base URL is required in config.toml with key `base_url`");
|
||||
}
|
||||
|
||||
if config.markdown.highlight_theme != "css" {
|
||||
if !THEME_SET.themes.contains_key(&config.markdown.highlight_theme) {
|
||||
bail!(
|
||||
"Highlight theme {} defined in config does not exist.",
|
||||
config.markdown.highlight_theme
|
||||
);
|
||||
}
|
||||
if config.markdown.highlight_theme != "css"
|
||||
&& !THEME_SET.themes.contains_key(&config.markdown.highlight_theme)
|
||||
{
|
||||
bail!(
|
||||
"Highlight theme {} defined in config does not exist.",
|
||||
config.markdown.highlight_theme
|
||||
);
|
||||
}
|
||||
|
||||
languages::validate_code(&config.default_language)?;
|
||||
for code in config.languages.keys() {
|
||||
languages::validate_code(&code)?;
|
||||
languages::validate_code(code)?;
|
||||
}
|
||||
|
||||
config.add_default_language();
|
||||
|
@ -229,8 +229,8 @@ impl Config {
|
|||
|
||||
/// Parse the theme.toml file and merges the extra data from the theme
|
||||
/// with the config extra data
|
||||
pub fn merge_with_theme(&mut self, path: &PathBuf, theme_name: &str) -> Result<()> {
|
||||
let theme = Theme::from_file(path, theme_name)?;
|
||||
pub fn merge_with_theme(&mut self, path: PathBuf, theme_name: &str) -> Result<()> {
|
||||
let theme = Theme::from_file(&path, theme_name)?;
|
||||
self.add_theme_extra(&theme)
|
||||
}
|
||||
|
||||
|
@ -414,44 +414,38 @@ hello = "world"
|
|||
|
||||
#[test]
|
||||
fn can_make_url_index_page_with_non_trailing_slash_url() {
|
||||
let mut config = Config::default();
|
||||
config.base_url = "http://vincent.is".to_string();
|
||||
let config = Config { base_url: "http://vincent.is".to_string(), ..Default::default() };
|
||||
assert_eq!(config.make_permalink(""), "http://vincent.is/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_url_index_page_with_railing_slash_url() {
|
||||
let mut config = Config::default();
|
||||
config.base_url = "http://vincent.is/".to_string();
|
||||
let config = Config { base_url: "http://vincent.is".to_string(), ..Default::default() };
|
||||
assert_eq!(config.make_permalink(""), "http://vincent.is/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_url_with_non_trailing_slash_base_url() {
|
||||
let mut config = Config::default();
|
||||
config.base_url = "http://vincent.is".to_string();
|
||||
let config = Config { base_url: "http://vincent.is".to_string(), ..Default::default() };
|
||||
assert_eq!(config.make_permalink("hello"), "http://vincent.is/hello/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_url_with_trailing_slash_path() {
|
||||
let mut config = Config::default();
|
||||
config.base_url = "http://vincent.is/".to_string();
|
||||
let config = Config { base_url: "http://vincent.is".to_string(), ..Default::default() };
|
||||
assert_eq!(config.make_permalink("/hello"), "http://vincent.is/hello/");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_make_url_with_localhost() {
|
||||
let mut config = Config::default();
|
||||
config.base_url = "http://127.0.0.1:1111".to_string();
|
||||
let config = Config { base_url: "http://127.0.0.1:1111".to_string(), ..Default::default() };
|
||||
assert_eq!(config.make_permalink("/tags/rust"), "http://127.0.0.1:1111/tags/rust/");
|
||||
}
|
||||
|
||||
// https://github.com/Keats/gutenberg/issues/486
|
||||
#[test]
|
||||
fn doesnt_add_trailing_slash_to_feed() {
|
||||
let mut config = Config::default();
|
||||
config.base_url = "http://vincent.is/".to_string();
|
||||
let config = Config { base_url: "http://vincent.is".to_string(), ..Default::default() };
|
||||
assert_eq!(config.make_permalink("atom.xml"), "http://vincent.is/atom.xml");
|
||||
}
|
||||
|
||||
|
@ -656,7 +650,7 @@ bar = "baz"
|
|||
"#;
|
||||
let theme = Theme::parse(theme_str).unwrap();
|
||||
// We expect an error here
|
||||
assert_eq!(false, config.add_theme_extra(&theme).is_ok());
|
||||
assert!(!config.add_theme_extra(&theme).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::path::Path;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use toml::Value as Toml;
|
||||
|
@ -39,7 +39,7 @@ impl Theme {
|
|||
}
|
||||
|
||||
/// Parses a theme file from the given path
|
||||
pub fn from_file(path: &PathBuf, theme_name: &str) -> Result<Theme> {
|
||||
pub fn from_file(path: &Path, theme_name: &str) -> Result<Theme> {
|
||||
let content = read_file(path)
|
||||
.map_err(|e| errors::Error::chain(format!("Failed to load theme {}", theme_name), e))?;
|
||||
Theme::parse(&content)
|
||||
|
|
|
@ -8,5 +8,4 @@ edition = "2018"
|
|||
tera = "1"
|
||||
toml = "0.5"
|
||||
image = "0.23"
|
||||
# TODO: go back to version 4/5 once https://github.com/trishume/syntect/pull/337 is merged
|
||||
syntect = { git = "https://github.com/Keats/syntect.git", branch = "scopestack" }
|
||||
syntect = "4"
|
||||
|
|
|
@ -81,7 +81,7 @@ impl PageFrontMatter {
|
|||
let mut f: PageFrontMatter = raw.deserialize()?;
|
||||
|
||||
if let Some(ref slug) = f.slug {
|
||||
if slug == "" {
|
||||
if slug.is_empty() {
|
||||
bail!("`slug` can't be empty if present")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -140,7 +140,7 @@ impl ResizeOp {
|
|||
}
|
||||
Fit(w, h) => {
|
||||
if orig_w <= w && orig_h <= h {
|
||||
return res; // ie. no-op
|
||||
return res; // ie. no-op
|
||||
}
|
||||
|
||||
let orig_w_h = orig_w as u64 * h as u64;
|
||||
|
@ -338,7 +338,7 @@ impl ImageOp {
|
|||
Some(q) => encoder.encode(q as f32),
|
||||
None => encoder.encode_lossless(),
|
||||
};
|
||||
f.write_all(&memory.as_bytes())?;
|
||||
f.write_all(memory.as_bytes())?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -408,6 +408,7 @@ impl Processor {
|
|||
self.img_ops.len() + self.img_ops_collisions.len()
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn enqueue(
|
||||
&mut self,
|
||||
input_src: String,
|
||||
|
|
|
@ -30,6 +30,7 @@ lazy_static! {
|
|||
static ref PROCESSED_PREFIX: String = format!("static{0}processed_images{0}", SLASH);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn image_op_test(
|
||||
source_img: &str,
|
||||
op: &str,
|
||||
|
@ -44,7 +45,7 @@ fn image_op_test(
|
|||
) {
|
||||
let source_path = TEST_IMGS.join(source_img);
|
||||
|
||||
let config = Config::parse(&CONFIG).unwrap();
|
||||
let config = Config::parse(CONFIG).unwrap();
|
||||
let mut proc = Processor::new(TMPDIR.clone(), &config);
|
||||
|
||||
let resp =
|
||||
|
|
|
@ -52,7 +52,7 @@ pub struct FileInfo {
|
|||
}
|
||||
|
||||
impl FileInfo {
|
||||
pub fn new_page(path: &Path, base_path: &PathBuf) -> FileInfo {
|
||||
pub fn new_page(path: &Path, base_path: &Path) -> FileInfo {
|
||||
let file_path = path.to_path_buf();
|
||||
let mut parent = file_path.parent().expect("Get parent of page").to_path_buf();
|
||||
let name = path.file_stem().unwrap().to_string_lossy().to_string();
|
||||
|
@ -87,7 +87,7 @@ impl FileInfo {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn new_section(path: &Path, base_path: &PathBuf) -> FileInfo {
|
||||
pub fn new_section(path: &Path, base_path: &Path) -> FileInfo {
|
||||
let file_path = path.to_path_buf();
|
||||
let parent = path.parent().expect("Get parent of section").to_path_buf();
|
||||
let name = path.file_stem().unwrap().to_string_lossy().to_string();
|
||||
|
@ -166,7 +166,7 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_components_in_page_with_assets() {
|
||||
let file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
|
||||
|
@ -175,7 +175,7 @@ mod tests {
|
|||
#[test]
|
||||
fn doesnt_fail_with_multiple_content_directories_in_path() {
|
||||
let file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/content/site/content/posts/tutorials/python/index.md"),
|
||||
Path::new("/home/vincent/code/content/site/content/posts/tutorials/python/index.md"),
|
||||
&PathBuf::from("/home/vincent/code/content/site"),
|
||||
);
|
||||
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
|
||||
|
@ -186,7 +186,7 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
|
@ -199,7 +199,7 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.en.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
|
@ -212,7 +212,7 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
assert_eq!(file.components, ["posts".to_string(), "tutorials".to_string()]);
|
||||
|
@ -225,7 +225,7 @@ mod tests {
|
|||
fn do_nothing_on_unknown_language_in_page_with_i18n_off() {
|
||||
let config = Config::default();
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
|
@ -238,7 +238,7 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
config.languages.insert("it".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
|
@ -250,7 +250,7 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_section(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/_index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
|
@ -262,7 +262,7 @@ mod tests {
|
|||
#[test]
|
||||
fn correct_canonical_for_index() {
|
||||
let file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -277,7 +277,7 @@ mod tests {
|
|||
let mut config = Config::default();
|
||||
config.languages.insert("fr".to_owned(), LanguageOptions::default());
|
||||
let mut file = FileInfo::new_page(
|
||||
&Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
Path::new("/home/vincent/code/site/content/posts/tutorials/python/index.fr.md"),
|
||||
&PathBuf::new(),
|
||||
);
|
||||
let res = file.find_language(&config);
|
||||
|
|
|
@ -97,16 +97,12 @@ pub struct Page {
|
|||
}
|
||||
|
||||
impl Page {
|
||||
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter, base_path: &PathBuf) -> Page {
|
||||
pub fn new<P: AsRef<Path>>(file_path: P, meta: PageFrontMatter, base_path: &Path) -> Page {
|
||||
let file_path = file_path.as_ref();
|
||||
|
||||
Page { file: FileInfo::new_page(file_path, base_path), meta, ..Self::default() }
|
||||
}
|
||||
|
||||
pub fn is_draft(&self) -> bool {
|
||||
self.meta.draft
|
||||
}
|
||||
|
||||
/// Parse a page given the content of the .md file
|
||||
/// Files without front matter or with invalid front matter are considered
|
||||
/// erroneous
|
||||
|
@ -114,7 +110,7 @@ impl Page {
|
|||
file_path: &Path,
|
||||
content: &str,
|
||||
config: &Config,
|
||||
base_path: &PathBuf,
|
||||
base_path: &Path,
|
||||
) -> Result<Page> {
|
||||
let (meta, content) = split_page_content(file_path, content)?;
|
||||
let mut page = Page::new(file_path, meta, base_path);
|
||||
|
@ -205,11 +201,7 @@ impl Page {
|
|||
}
|
||||
|
||||
/// Read and parse a .md file into a Page struct
|
||||
pub fn from_file<P: AsRef<Path>>(
|
||||
path: P,
|
||||
config: &Config,
|
||||
base_path: &PathBuf,
|
||||
) -> Result<Page> {
|
||||
pub fn from_file<P: AsRef<Path>>(path: P, config: &Config, base_path: &Path) -> Result<Page> {
|
||||
let path = path.as_ref();
|
||||
let content = read_file(path)?;
|
||||
let mut page = Page::parse(path, &content, config, base_path)?;
|
||||
|
@ -217,7 +209,7 @@ impl Page {
|
|||
if page.file.name == "index" {
|
||||
let parent_dir = path.parent().unwrap();
|
||||
page.assets = find_related_assets(parent_dir, config);
|
||||
page.serialized_assets = page.serialize_assets(&base_path);
|
||||
page.serialized_assets = page.serialize_assets(base_path);
|
||||
} else {
|
||||
page.assets = vec![];
|
||||
}
|
||||
|
@ -249,11 +241,10 @@ impl Page {
|
|||
Error::chain(format!("Failed to render content of {}", self.file.path.display()), e)
|
||||
})?;
|
||||
|
||||
self.summary = if let Some(s) = res.summary_len.map(|l| &res.body[0..l]) {
|
||||
Some(FOOTNOTES_RE.replace(s, "").into_owned())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
self.summary = res
|
||||
.summary_len
|
||||
.map(|l| &res.body[0..l])
|
||||
.map(|s| FOOTNOTES_RE.replace(s, "").into_owned());
|
||||
self.content = res.body;
|
||||
self.toc = res.toc;
|
||||
self.external_links = res.external_links;
|
||||
|
@ -276,13 +267,13 @@ impl Page {
|
|||
context.insert("page", &self.to_serialized(library));
|
||||
context.insert("lang", &self.lang);
|
||||
|
||||
render_template(&tpl_name, tera, context, &config.theme).map_err(|e| {
|
||||
render_template(tpl_name, tera, context, &config.theme).map_err(|e| {
|
||||
Error::chain(format!("Failed to render page '{}'", self.file.path.display()), e)
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a vectors of asset URLs.
|
||||
fn serialize_assets(&self, base_path: &PathBuf) -> Vec<String> {
|
||||
fn serialize_assets(&self, base_path: &Path) -> Vec<String> {
|
||||
self.assets
|
||||
.iter()
|
||||
.filter_map(|asset| asset.file_name())
|
||||
|
|
|
@ -68,7 +68,7 @@ impl Section {
|
|||
pub fn new<P: AsRef<Path>>(
|
||||
file_path: P,
|
||||
meta: SectionFrontMatter,
|
||||
base_path: &PathBuf,
|
||||
base_path: &Path,
|
||||
) -> Section {
|
||||
let file_path = file_path.as_ref();
|
||||
|
||||
|
@ -79,7 +79,7 @@ impl Section {
|
|||
file_path: &Path,
|
||||
content: &str,
|
||||
config: &Config,
|
||||
base_path: &PathBuf,
|
||||
base_path: &Path,
|
||||
) -> Result<Section> {
|
||||
let (meta, content) = split_section_content(file_path, content)?;
|
||||
let mut section = Section::new(file_path, meta, base_path);
|
||||
|
@ -115,7 +115,7 @@ impl Section {
|
|||
pub fn from_file<P: AsRef<Path>>(
|
||||
path: P,
|
||||
config: &Config,
|
||||
base_path: &PathBuf,
|
||||
base_path: &Path,
|
||||
) -> Result<Section> {
|
||||
let path = path.as_ref();
|
||||
let content = read_file(path)?;
|
||||
|
|
|
@ -25,6 +25,7 @@ impl<'a> TranslatedContent<'a> {
|
|||
pub fn find_all_sections(section: &'a Section, library: &'a Library) -> Vec<Self> {
|
||||
let mut translations = vec![];
|
||||
|
||||
#[allow(clippy::or_fun_call)]
|
||||
for key in library
|
||||
.translations
|
||||
.get(§ion.file.canonical)
|
||||
|
@ -47,6 +48,7 @@ impl<'a> TranslatedContent<'a> {
|
|||
pub fn find_all_pages(page: &'a Page, library: &'a Library) -> Vec<Self> {
|
||||
let mut translations = vec![];
|
||||
|
||||
#[allow(clippy::or_fun_call)]
|
||||
for key in
|
||||
library.translations.get(&page.file.canonical).or(Some(&HashSet::new())).unwrap().iter()
|
||||
{
|
||||
|
@ -165,7 +167,7 @@ impl<'a> SerializingPage<'a> {
|
|||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
assets: &page.serialized_assets,
|
||||
draft: page.is_draft(),
|
||||
draft: page.meta.draft,
|
||||
lang: &page.lang,
|
||||
lighter,
|
||||
heavier,
|
||||
|
@ -181,7 +183,7 @@ impl<'a> SerializingPage<'a> {
|
|||
|
||||
/// currently only used in testing
|
||||
pub fn get_title(&'a self) -> &'a Option<String> {
|
||||
&self.title
|
||||
self.title
|
||||
}
|
||||
|
||||
/// Same as from_page but does not fill sibling pages
|
||||
|
@ -194,7 +196,7 @@ impl<'a> SerializingPage<'a> {
|
|||
month = Some(d.1);
|
||||
day = Some(d.2);
|
||||
}
|
||||
let ancestors = if let Some(ref lib) = library {
|
||||
let ancestors = if let Some(lib) = library {
|
||||
page.ancestors
|
||||
.iter()
|
||||
.map(|k| lib.get_section_by_key(*k).file.relative.as_str())
|
||||
|
@ -203,7 +205,7 @@ impl<'a> SerializingPage<'a> {
|
|||
vec![]
|
||||
};
|
||||
|
||||
let translations = if let Some(ref lib) = library {
|
||||
let translations = if let Some(lib) = library {
|
||||
TranslatedContent::find_all_pages(page, lib)
|
||||
} else {
|
||||
vec![]
|
||||
|
@ -231,7 +233,7 @@ impl<'a> SerializingPage<'a> {
|
|||
word_count: page.word_count,
|
||||
reading_time: page.reading_time,
|
||||
assets: &page.serialized_assets,
|
||||
draft: page.is_draft(),
|
||||
draft: page.meta.draft,
|
||||
lang: &page.lang,
|
||||
lighter: None,
|
||||
heavier: None,
|
||||
|
@ -251,6 +253,7 @@ pub struct SerializingSection<'a> {
|
|||
relative_path: &'a str,
|
||||
content: &'a str,
|
||||
permalink: &'a str,
|
||||
draft: bool,
|
||||
ancestors: Vec<&'a str>,
|
||||
title: &'a Option<String>,
|
||||
description: &'a Option<String>,
|
||||
|
@ -290,6 +293,7 @@ impl<'a> SerializingSection<'a> {
|
|||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
draft: section.meta.draft,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
|
@ -313,7 +317,7 @@ impl<'a> SerializingSection<'a> {
|
|||
let mut ancestors = vec![];
|
||||
let mut translations = vec![];
|
||||
let mut subsections = vec![];
|
||||
if let Some(ref lib) = library {
|
||||
if let Some(lib) = library {
|
||||
ancestors = section
|
||||
.ancestors
|
||||
.iter()
|
||||
|
@ -327,6 +331,7 @@ impl<'a> SerializingSection<'a> {
|
|||
SerializingSection {
|
||||
relative_path: §ion.file.relative,
|
||||
ancestors,
|
||||
draft: section.meta.draft,
|
||||
content: §ion.content,
|
||||
permalink: §ion.permalink,
|
||||
title: §ion.meta.title,
|
||||
|
|
|
@ -84,7 +84,7 @@ impl Library {
|
|||
let rel_path = section.path.clone();
|
||||
|
||||
let mut entries = vec![rel_path];
|
||||
entries.extend(section.meta.aliases.iter().map(|a| a.clone()).collect::<Vec<String>>());
|
||||
entries.extend(section.meta.aliases.to_vec());
|
||||
self.insert_reverse_aliases(entries, §ion.file.relative);
|
||||
|
||||
let key = self.sections.insert(section);
|
||||
|
@ -98,7 +98,7 @@ impl Library {
|
|||
let rel_path = page.path.clone();
|
||||
|
||||
let mut entries = vec![rel_path];
|
||||
entries.extend(page.meta.aliases.iter().map(|a| a.clone()).collect::<Vec<String>>());
|
||||
entries.extend(page.meta.aliases.to_vec());
|
||||
self.insert_reverse_aliases(entries, &page.file.relative);
|
||||
|
||||
let key = self.pages.insert(page);
|
||||
|
@ -252,7 +252,7 @@ impl Library {
|
|||
}
|
||||
|
||||
for section in self.sections.values_mut() {
|
||||
if let Some(ref children) = subsections.get(§ion.file.path) {
|
||||
if let Some(children) = subsections.get(§ion.file.path) {
|
||||
let mut children: Vec<_> = children.iter().map(|p| sections[p]).collect();
|
||||
children.sort_by(|a, b| sections_weight[a].cmp(§ions_weight[b]));
|
||||
section.subsections = children;
|
||||
|
@ -446,12 +446,9 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_no_collisions() {
|
||||
let mut library = Library::new(10, 10, false);
|
||||
let mut page = Page::default();
|
||||
page.path = "hello".to_string();
|
||||
let mut page2 = Page::default();
|
||||
page2.path = "hello-world".to_string();
|
||||
let mut section = Section::default();
|
||||
section.path = "blog".to_string();
|
||||
let page = Page { path: "hello".to_string(), ..Default::default() };
|
||||
let page2 = Page { path: "hello-world".to_string(), ..Default::default() };
|
||||
let section = Section { path: "blog".to_string(), ..Default::default() };
|
||||
library.insert_page(page);
|
||||
library.insert_page(page2);
|
||||
library.insert_section(section);
|
||||
|
@ -463,14 +460,11 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_collisions_between_pages() {
|
||||
let mut library = Library::new(10, 10, false);
|
||||
let mut page = Page::default();
|
||||
page.path = "hello".to_string();
|
||||
let mut page = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page.file.relative = "hello".to_string();
|
||||
let mut page2 = Page::default();
|
||||
page2.path = "hello".to_string();
|
||||
let mut page2 = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page2.file.relative = "hello-world".to_string();
|
||||
let mut section = Section::default();
|
||||
section.path = "blog".to_string();
|
||||
let mut section = Section { path: "blog".to_string(), ..Default::default() };
|
||||
section.file.relative = "hello-world".to_string();
|
||||
library.insert_page(page.clone());
|
||||
library.insert_page(page2.clone());
|
||||
|
@ -486,15 +480,12 @@ mod tests {
|
|||
#[test]
|
||||
fn can_find_collisions_with_an_alias() {
|
||||
let mut library = Library::new(10, 10, false);
|
||||
let mut page = Page::default();
|
||||
page.path = "hello".to_string();
|
||||
let mut page = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page.file.relative = "hello".to_string();
|
||||
let mut page2 = Page::default();
|
||||
page2.path = "hello-world".to_string();
|
||||
let mut page2 = Page { path: "hello".to_string(), ..Default::default() };
|
||||
page2.file.relative = "hello-world".to_string();
|
||||
page2.meta.aliases = vec!["hello".to_string()];
|
||||
let mut section = Section::default();
|
||||
section.path = "blog".to_string();
|
||||
let mut section = Section { path: "blog".to_string(), ..Default::default() };
|
||||
section.file.relative = "hello-world".to_string();
|
||||
library.insert_page(page.clone());
|
||||
library.insert_page(page2.clone());
|
||||
|
|
|
@ -255,15 +255,17 @@ mod tests {
|
|||
use crate::library::Library;
|
||||
use crate::taxonomies::{Taxonomy, TaxonomyItem};
|
||||
use config::Taxonomy as TaxonomyConfig;
|
||||
use front_matter::SectionFrontMatter;
|
||||
|
||||
use super::Paginator;
|
||||
|
||||
fn create_section(is_index: bool, paginate_reversed: bool) -> Section {
|
||||
let mut f = SectionFrontMatter::default();
|
||||
f.paginate_by = Some(2);
|
||||
f.paginate_path = "page".to_string();
|
||||
f.paginate_reversed = paginate_reversed;
|
||||
let f = front_matter::SectionFrontMatter {
|
||||
paginate_by: Some(2),
|
||||
paginate_path: "page".to_string(),
|
||||
paginate_reversed,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut s = Section::new("content/_index.md", f, &PathBuf::new());
|
||||
if !is_index {
|
||||
s.path = "/posts/".to_string();
|
||||
|
|
|
@ -31,7 +31,7 @@ pub fn sort_pages_by_date(
|
|||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = b.1.unwrap().cmp(&a.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(&b.2)
|
||||
a.2.cmp(b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ pub fn sort_pages_by_title(
|
|||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = natural_lexical_cmp(a.1.unwrap(), b.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(&b.2)
|
||||
a.2.cmp(b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ pub fn sort_pages_by_weight(
|
|||
can_be_sorted.par_sort_unstable_by(|a, b| {
|
||||
let ord = a.1.unwrap().cmp(&b.1.unwrap());
|
||||
if ord == Ordering::Equal {
|
||||
a.2.cmp(&b.2)
|
||||
a.2.cmp(b.2)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
|
@ -119,21 +119,19 @@ mod tests {
|
|||
use front_matter::PageFrontMatter;
|
||||
|
||||
fn create_page_with_date(date: &str) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.date = Some(date.to_string());
|
||||
let mut front_matter =
|
||||
PageFrontMatter { date: Some(date.to_string()), ..Default::default() };
|
||||
front_matter.date_to_datetime();
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_title(title: &str) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.title = Some(title.to_string());
|
||||
let front_matter = PageFrontMatter { title: Some(title.to_string()), ..Default::default() };
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
fn create_page_with_weight(weight: usize) -> Page {
|
||||
let mut front_matter = PageFrontMatter::default();
|
||||
front_matter.weight = Some(weight);
|
||||
let front_matter = PageFrontMatter { weight: Some(weight), ..Default::default() };
|
||||
Page::new("content/hello.md", front_matter, &PathBuf::new())
|
||||
}
|
||||
|
||||
|
@ -251,11 +249,11 @@ mod tests {
|
|||
fn can_find_siblings() {
|
||||
let mut dense = DenseSlotMap::new();
|
||||
let page1 = create_page_with_weight(1);
|
||||
let key1 = dense.insert(page1.clone());
|
||||
let key1 = dense.insert(page1);
|
||||
let page2 = create_page_with_weight(2);
|
||||
let key2 = dense.insert(page2.clone());
|
||||
let key2 = dense.insert(page2);
|
||||
let page3 = create_page_with_weight(3);
|
||||
let key3 = dense.insert(page3.clone());
|
||||
let key3 = dense.insert(page3);
|
||||
|
||||
let input = vec![key1, key2, key3];
|
||||
|
||||
|
|
|
@ -263,7 +263,7 @@ pub fn find_taxonomies(config: &Config, library: &Library) -> Result<Vec<Taxonom
|
|||
let mut all_taxonomies = HashMap::new();
|
||||
for (key, page) in library.pages() {
|
||||
for (name, taxo_term) in &page.meta.taxonomies {
|
||||
let taxo_slug = slugify_paths(&name, config.slugify.taxonomies);
|
||||
let taxo_slug = slugify_paths(name, config.slugify.taxonomies);
|
||||
let taxo_key = format!("{}-{}", &taxo_slug, page.lang);
|
||||
if taxonomies_def.contains_key(&taxo_key) {
|
||||
all_taxonomies.entry(taxo_key.clone()).or_insert_with(HashMap::new);
|
||||
|
@ -539,8 +539,7 @@ mod tests {
|
|||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
page3.lang = "fr".to_string();
|
||||
let mut page3 = Page { lang: "fr".to_string(), ..Default::default() };
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]);
|
||||
taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
|
@ -617,8 +616,7 @@ mod tests {
|
|||
};
|
||||
config.languages.insert("fr".to_owned(), lang_options);
|
||||
|
||||
let mut page = Page::default();
|
||||
page.lang = "fr".to_string();
|
||||
let mut page = Page { lang: "fr".to_string(), ..Default::default() };
|
||||
let mut taxo_page = HashMap::new();
|
||||
taxo_page.insert("catégories".to_string(), vec!["Écologie".to_string()]);
|
||||
page.meta.taxonomies = taxo_page;
|
||||
|
@ -668,8 +666,7 @@ mod tests {
|
|||
page2.lang = config.default_language.clone();
|
||||
library.insert_page(page2);
|
||||
|
||||
let mut page3 = Page::default();
|
||||
page3.lang = "fr".to_string();
|
||||
let mut page3 = Page { lang: "fr".to_string(), ..Default::default() };
|
||||
let mut taxo_page3 = HashMap::new();
|
||||
taxo_page3.insert("tags".to_string(), vec!["rust".to_string()]);
|
||||
taxo_page3.insert("auteurs".to_string(), vec!["Vincent Prouillet".to_string()]);
|
||||
|
|
|
@ -13,7 +13,11 @@ errors = { path = "../errors" }
|
|||
[dependencies.reqwest]
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["blocking", "rustls-tls"]
|
||||
features = ["blocking"]
|
||||
|
||||
[dev-dependencies]
|
||||
mockito = "0.30"
|
||||
|
||||
[features]
|
||||
rust-tls = ["reqwest/rustls-tls"]
|
||||
native-tls = ["reqwest/default-tls"]
|
||||
|
|
|
@ -7,8 +7,7 @@ include = ["src/**/*"]
|
|||
|
||||
[dependencies]
|
||||
tera = { version = "1", features = ["preserve_order"] }
|
||||
# TODO: go back to version 4/5 once https://github.com/trishume/syntect/pull/337 is merged
|
||||
syntect = { git = "https://github.com/Keats/syntect.git", branch = "scopestack" }
|
||||
syntect = "4"
|
||||
pulldown-cmark = { version = "0.8", default-features = false }
|
||||
serde = "1"
|
||||
serde_derive = "1"
|
||||
|
|
|
@ -8,7 +8,7 @@ use front_matter::InsertAnchor;
|
|||
use rendering::{render_content, render_shortcodes, RenderContext};
|
||||
use tera::Tera;
|
||||
|
||||
static CONTENT: &'static str = r#"
|
||||
static CONTENT: &str = r#"
|
||||
# Modus cognitius profanam ne duae virtutis mundi
|
||||
|
||||
## Ut vita
|
||||
|
@ -86,7 +86,15 @@ fn bench_render_content_with_highlighting(b: &mut test::Bencher) {
|
|||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let config = Config::default();
|
||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
"",
|
||||
current_page_permalink,
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
b.iter(|| render_content(CONTENT, &context).unwrap());
|
||||
}
|
||||
|
||||
|
@ -97,7 +105,15 @@ fn bench_render_content_without_highlighting(b: &mut test::Bencher) {
|
|||
let permalinks_ctx = HashMap::new();
|
||||
let mut config = Config::default();
|
||||
config.markdown.highlight_code = false;
|
||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
"",
|
||||
current_page_permalink,
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
b.iter(|| render_content(CONTENT, &context).unwrap());
|
||||
}
|
||||
|
||||
|
@ -108,7 +124,15 @@ fn bench_render_content_no_shortcode(b: &mut test::Bencher) {
|
|||
let mut config = Config::default();
|
||||
config.markdown.highlight_code = false;
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
"",
|
||||
current_page_permalink,
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
|
||||
b.iter(|| render_content(&content2, &context).unwrap());
|
||||
}
|
||||
|
@ -119,7 +143,15 @@ fn bench_render_shortcodes_one_present(b: &mut test::Bencher) {
|
|||
tera.add_raw_template("shortcodes/youtube.html", "{{id}}").unwrap();
|
||||
let config = Config::default();
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
"",
|
||||
current_page_permalink,
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
|
||||
b.iter(|| render_shortcodes(CONTENT, &context));
|
||||
}
|
||||
|
@ -132,7 +164,15 @@ fn bench_render_content_no_shortcode_with_emoji(b: &mut test::Bencher) {
|
|||
config.markdown.highlight_code = false;
|
||||
config.markdown.render_emoji = true;
|
||||
let permalinks_ctx = HashMap::new();
|
||||
let context = RenderContext::new(&tera, &config, "", &permalinks_ctx, InsertAnchor::None);
|
||||
let current_page_permalink = "";
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
&config,
|
||||
"",
|
||||
current_page_permalink,
|
||||
&permalinks_ctx,
|
||||
InsertAnchor::None,
|
||||
);
|
||||
|
||||
b.iter(|| render_content(&content2, &context).unwrap());
|
||||
}
|
||||
|
|
|
@ -4,9 +4,10 @@ use config::highlighting::{SyntaxAndTheme, CLASS_STYLE};
|
|||
use syntect::easy::HighlightLines;
|
||||
use syntect::highlighting::{Color, Theme};
|
||||
use syntect::html::{
|
||||
styled_line_to_highlighted_html, tokens_to_classed_spans, ClassStyle, IncludeBackground,
|
||||
line_tokens_to_classed_spans, styled_line_to_highlighted_html, ClassStyle, IncludeBackground,
|
||||
};
|
||||
use syntect::parsing::{ParseState, ScopeStack, SyntaxReference, SyntaxSet};
|
||||
use tera::escape_html;
|
||||
|
||||
/// Not public, but from syntect::html
|
||||
fn write_css_color(s: &mut String, c: Color) {
|
||||
|
@ -35,9 +36,9 @@ impl<'config> ClassHighlighter<'config> {
|
|||
/// *Note:* This function requires `line` to include a newline at the end and
|
||||
/// also use of the `load_defaults_newlines` version of the syntaxes.
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
debug_assert!(line.ends_with("\n"));
|
||||
let parsed_line = self.parse_state.parse_line(line, &self.syntax_set);
|
||||
let (formatted_line, delta) = tokens_to_classed_spans(
|
||||
debug_assert!(line.ends_with('\n'));
|
||||
let parsed_line = self.parse_state.parse_line(line, self.syntax_set);
|
||||
let (formatted_line, delta) = line_tokens_to_classed_spans(
|
||||
line,
|
||||
parsed_line.as_slice(),
|
||||
CLASS_STYLE,
|
||||
|
@ -80,9 +81,12 @@ impl<'config> InlineHighlighter<'config> {
|
|||
}
|
||||
|
||||
pub fn highlight_line(&mut self, line: &str) -> String {
|
||||
let regions = self.h.highlight(line, &self.syntax_set);
|
||||
let regions = self.h.highlight(line, self.syntax_set);
|
||||
// TODO: add a param like `IncludeBackground` for `IncludeForeground` in syntect
|
||||
let highlighted = styled_line_to_highlighted_html(®ions, IncludeBackground::IfDifferent(self.bg_color));
|
||||
let highlighted = styled_line_to_highlighted_html(
|
||||
®ions,
|
||||
IncludeBackground::IfDifferent(self.bg_color),
|
||||
);
|
||||
highlighted.replace(&self.fg_color, "")
|
||||
}
|
||||
}
|
||||
|
@ -113,7 +117,7 @@ impl<'config> SyntaxHighlighter<'config> {
|
|||
match self {
|
||||
Inlined(h) => h.highlight_line(line),
|
||||
Classed(h) => h.highlight_line(line),
|
||||
NoHighlight => line.to_owned(),
|
||||
NoHighlight => escape_html(line),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -171,7 +175,7 @@ impl<'config> SyntaxHighlighter<'config> {
|
|||
&mut styles,
|
||||
h.theme.settings.line_highlight.unwrap_or(Color { r: 255, g: 255, b: 0, a: 0 }),
|
||||
);
|
||||
styles.push_str(";");
|
||||
styles.push(';');
|
||||
Some(styles)
|
||||
}
|
||||
}
|
||||
|
@ -194,7 +198,7 @@ mod tests {
|
|||
let mut highlighter =
|
||||
ClassHighlighter::new(syntax_and_theme.syntax, syntax_and_theme.syntax_set);
|
||||
let mut out = String::new();
|
||||
for line in LinesWithEndings::from(&code) {
|
||||
for line in LinesWithEndings::from(code) {
|
||||
out.push_str(&highlighter.highlight_line(line));
|
||||
}
|
||||
out.push_str(&highlighter.finalize());
|
||||
|
@ -216,11 +220,25 @@ mod tests {
|
|||
syntax_and_theme.theme.unwrap(),
|
||||
);
|
||||
let mut out = String::new();
|
||||
for line in LinesWithEndings::from(&code) {
|
||||
for line in LinesWithEndings::from(code) {
|
||||
out.push_str(&highlighter.highlight_line(line));
|
||||
}
|
||||
|
||||
assert!(out.starts_with(r#"<span style="color"#));
|
||||
assert!(out.ends_with("</span>"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn no_highlight_escapes_html() {
|
||||
let mut config = Config::default();
|
||||
config.markdown.highlight_code = false;
|
||||
let code = "<script>alert('hello')</script>";
|
||||
let syntax_and_theme = resolve_syntax_and_theme(Some("py"), &config);
|
||||
let mut highlighter = SyntaxHighlighter::new(false, syntax_and_theme);
|
||||
let mut out = String::new();
|
||||
for line in LinesWithEndings::from(&code) {
|
||||
out.push_str(&highlighter.highlight_line(line));
|
||||
}
|
||||
assert!(!out.contains("<script>"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,8 +24,8 @@ fn opening_html(
|
|||
|
||||
if let Some(lang) = language {
|
||||
classes.push_str("language-");
|
||||
classes.push_str(&lang);
|
||||
classes.push_str(" ");
|
||||
classes.push_str(lang);
|
||||
classes.push(' ');
|
||||
|
||||
html.push_str(" data-lang=\"");
|
||||
html.push_str(lang);
|
||||
|
@ -114,7 +114,7 @@ impl<'config> CodeBlock<'config> {
|
|||
}
|
||||
|
||||
// syntect leaking here in this file
|
||||
for (i, line) in LinesWithEndings::from(&content).enumerate() {
|
||||
for (i, line) in LinesWithEndings::from(content).enumerate() {
|
||||
let one_indexed = i + 1;
|
||||
// first do we need to skip that line?
|
||||
let mut skip = false;
|
||||
|
@ -143,10 +143,10 @@ impl<'config> CodeBlock<'config> {
|
|||
buffer.push_str("<mark");
|
||||
if let Some(ref s) = mark_style {
|
||||
buffer.push_str(" style=\"");
|
||||
buffer.push_str(&s);
|
||||
buffer.push_str(s);
|
||||
buffer.push_str("\">");
|
||||
} else {
|
||||
buffer.push_str(">")
|
||||
buffer.push('>')
|
||||
}
|
||||
buffer.push_str(&num);
|
||||
buffer.push_str("</mark>");
|
||||
|
@ -161,10 +161,10 @@ impl<'config> CodeBlock<'config> {
|
|||
buffer.push_str("<mark");
|
||||
if let Some(ref s) = mark_style {
|
||||
buffer.push_str(" style=\"");
|
||||
buffer.push_str(&s);
|
||||
buffer.push_str(s);
|
||||
buffer.push_str("\">");
|
||||
} else {
|
||||
buffer.push_str(">")
|
||||
buffer.push('>')
|
||||
}
|
||||
buffer.push_str(&highlighted_line);
|
||||
buffer.push_str("</mark>");
|
||||
|
|
|
@ -19,5 +19,5 @@ pub fn render_content(content: &str, context: &RenderContext) -> Result<markdown
|
|||
return Ok(html);
|
||||
}
|
||||
|
||||
markdown_to_html(&content, context)
|
||||
markdown_to_html(content, context)
|
||||
}
|
||||
|
|
|
@ -102,7 +102,7 @@ fn fix_link(
|
|||
// - it could be a link to a co-located asset
|
||||
// - it could be a normal link
|
||||
let result = if link.starts_with("@/") {
|
||||
match resolve_internal_link(&link, &context.permalinks) {
|
||||
match resolve_internal_link(link, &context.permalinks) {
|
||||
Ok(resolved) => {
|
||||
internal_links.push((resolved.md_path, resolved.anchor));
|
||||
resolved.permalink
|
||||
|
@ -111,7 +111,7 @@ fn fix_link(
|
|||
return Err(format!("Relative link {} not found.", link).into());
|
||||
}
|
||||
}
|
||||
} else if is_colocated_asset_link(&link) {
|
||||
} else if is_colocated_asset_link(link) {
|
||||
format!("{}{}", context.current_page_permalink, link)
|
||||
} else {
|
||||
if is_external_link(link) {
|
||||
|
@ -163,7 +163,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
let path = context
|
||||
.tera_context
|
||||
.get("page")
|
||||
.or(context.tera_context.get("section"))
|
||||
.or_else(|| context.tera_context.get("section"))
|
||||
.map(|x| x.as_object().unwrap().get("relative_path").unwrap().as_str().unwrap());
|
||||
// the rendered html
|
||||
let mut html = String::with_capacity(content.len());
|
||||
|
@ -213,7 +213,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
}
|
||||
_ => FenceSettings::new(""),
|
||||
};
|
||||
let (block, begin) = CodeBlock::new(fence, &context.config, path);
|
||||
let (block, begin) = CodeBlock::new(fence, context.config, path);
|
||||
code_block = Some(block);
|
||||
Event::Html(begin.into())
|
||||
}
|
||||
|
@ -344,7 +344,7 @@ pub fn markdown_to_html(content: &str, context: &RenderContext) -> Result<Render
|
|||
c.insert("level", &heading_ref.level);
|
||||
|
||||
let anchor_link = utils::templates::render_template(
|
||||
&ANCHOR_LINK_TEMPLATE,
|
||||
ANCHOR_LINK_TEMPLATE,
|
||||
&context.tera,
|
||||
c,
|
||||
&None,
|
||||
|
|
|
@ -110,7 +110,7 @@ fn render_shortcode(
|
|||
for (key, value) in args.iter() {
|
||||
tera_context.insert(key, value);
|
||||
}
|
||||
if let Some(ref b) = body {
|
||||
if let Some(b) = body {
|
||||
// Trimming right to avoid most shortcodes with bodies ending up with a HTML new line
|
||||
tera_context.insert("body", b.trim_end());
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ mod tests {
|
|||
let config = Config::default_for_test();
|
||||
let permalinks = HashMap::new();
|
||||
let context = RenderContext::new(
|
||||
&tera,
|
||||
tera,
|
||||
&config,
|
||||
&config.default_language,
|
||||
"",
|
||||
|
|
|
@ -106,7 +106,7 @@ fn fill_index(
|
|||
}
|
||||
|
||||
if search_config.include_content {
|
||||
let body = AMMONIA.clean(&content).to_string();
|
||||
let body = AMMONIA.clean(content).to_string();
|
||||
if let Some(truncate_len) = search_config.truncate_content_length {
|
||||
// Not great for unicode
|
||||
// TODO: fix it like the truncate in Tera
|
||||
|
|
|
@ -31,3 +31,8 @@ link_checker = { path = "../link_checker" }
|
|||
[dev-dependencies]
|
||||
tempfile = "3"
|
||||
path-slash = "0.1.4"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
rust-tls = ["templates/rust-tls", "link_checker/rust-tls"]
|
||||
native-tls = ["templates/native-tls", "link_checker/native-tls"]
|
||||
|
|
|
@ -8,7 +8,7 @@ use site::Site;
|
|||
|
||||
#[bench]
|
||||
fn bench_loading_small_blog(b: &mut test::Bencher) {
|
||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||
let mut path = env::current_dir().unwrap();
|
||||
path.push("benches");
|
||||
path.push("small-blog");
|
||||
let config_file = path.join("config.toml");
|
||||
|
@ -19,12 +19,12 @@ fn bench_loading_small_blog(b: &mut test::Bencher) {
|
|||
|
||||
#[bench]
|
||||
fn bench_loading_small_blog_with_syntax_highlighting(b: &mut test::Bencher) {
|
||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||
let mut path = env::current_dir().unwrap();
|
||||
path.push("benches");
|
||||
path.push("small-blog");
|
||||
let config_file = path.join("config.toml");
|
||||
let mut site = Site::new(&path, &config_file).unwrap();
|
||||
site.config.highlight_code = true;
|
||||
site.config.markdown.highlight_code = true;
|
||||
|
||||
b.iter(|| site.load().unwrap());
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ fn bench_loading_small_blog_with_syntax_highlighting(b: &mut test::Bencher) {
|
|||
|
||||
#[bench]
|
||||
fn bench_loading_small_kb(b: &mut test::Bencher) {
|
||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||
let mut path = env::current_dir().unwrap();
|
||||
path.push("benches");
|
||||
path.push("small-kb");
|
||||
let config_file = path.join("config.toml");
|
||||
|
@ -111,12 +111,12 @@ fn bench_loading_small_kb(b: &mut test::Bencher) {
|
|||
|
||||
#[bench]
|
||||
fn bench_loading_small_kb_with_syntax_highlighting(b: &mut test::Bencher) {
|
||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||
let mut path = env::current_dir().unwrap();
|
||||
path.push("benches");
|
||||
path.push("small-kb");
|
||||
let config_file = path.join("config.toml");
|
||||
let mut site = Site::new(&path, &config_file).unwrap();
|
||||
site.config.highlight_code = true;
|
||||
site.config.markdown.highlight_code = true;
|
||||
|
||||
b.iter(|| site.load().unwrap());
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use site::Site;
|
|||
use tempfile::tempdir;
|
||||
|
||||
fn setup_site(name: &str) -> Site {
|
||||
let mut path = env::current_dir().unwrap().to_path_buf();
|
||||
let mut path = env::current_dir().unwrap();
|
||||
path.push("benches");
|
||||
path.push(name);
|
||||
let config_file = path.join("config.toml");
|
||||
|
@ -69,7 +69,7 @@ fn bench_render_paginated(b: &mut test::Bencher) {
|
|||
site.set_output_path(&public);
|
||||
let library = site.library.read().unwrap();
|
||||
let section = library.sections_values()[0];
|
||||
let paginator = Paginator::from_section(§ion, &library);
|
||||
let paginator = Paginator::from_section(section, &library);
|
||||
|
||||
b.iter(|| site.render_paginated(Vec::new(), &paginator));
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ pub fn render_feed(
|
|||
context.insert("lang", lang);
|
||||
|
||||
let feed_filename = &site.config.feed_filename;
|
||||
let feed_url = if let Some(ref base) = base_path {
|
||||
let feed_url = if let Some(base) = base_path {
|
||||
site.config.make_permalink(&base.join(feed_filename).to_string_lossy().replace('\\', "/"))
|
||||
} else {
|
||||
site.config.make_permalink(feed_filename)
|
||||
|
|
|
@ -78,8 +78,7 @@ impl Site {
|
|||
|
||||
if let Some(theme) = config.theme.clone() {
|
||||
// Grab data from the extra section of the theme
|
||||
config
|
||||
.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"), &theme)?;
|
||||
config.merge_with_theme(path.join("themes").join(&theme).join("theme.toml"), &theme)?;
|
||||
}
|
||||
|
||||
let tera = load_tera(path, &config)?;
|
||||
|
@ -288,10 +287,10 @@ impl Site {
|
|||
tpls::register_tera_global_fns(self);
|
||||
|
||||
// Needs to be done after rendering markdown as we only get the anchors at that point
|
||||
link_checking::check_internal_links_with_anchors(&self)?;
|
||||
link_checking::check_internal_links_with_anchors(self)?;
|
||||
|
||||
if self.config.is_in_check_mode() {
|
||||
link_checking::check_external_links(&self)?;
|
||||
link_checking::check_external_links(self)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -301,7 +300,7 @@ impl Site {
|
|||
/// a _index.md to render the index page at the root of the site
|
||||
pub fn create_default_index_sections(&mut self) -> Result<()> {
|
||||
for (index_path, lang) in self.index_section_paths() {
|
||||
if let Some(ref index_section) = self.library.read().unwrap().get_section(&index_path) {
|
||||
if let Some(index_section) = self.library.read().unwrap().get_section(&index_path) {
|
||||
if self.config.build_search_index && !index_section.meta.in_search_index {
|
||||
bail!(
|
||||
"You have enabled search in the config but disabled it in the index section: \
|
||||
|
@ -406,7 +405,7 @@ impl Site {
|
|||
self.populate_taxonomies()?;
|
||||
let library = self.library.read().unwrap();
|
||||
let page = library.get_page(&path).unwrap();
|
||||
self.render_page(&page)
|
||||
self.render_page(page)
|
||||
}
|
||||
|
||||
/// Add a section to the site
|
||||
|
@ -431,14 +430,14 @@ impl Site {
|
|||
self.populate_sections();
|
||||
let library = self.library.read().unwrap();
|
||||
let section = library.get_section(&path).unwrap();
|
||||
self.render_section(§ion, true)
|
||||
self.render_section(section, true)
|
||||
}
|
||||
|
||||
/// Finds the insert_anchor for the parent section of the directory at `path`.
|
||||
/// Defaults to `AnchorInsert::None` if no parent section found
|
||||
pub fn find_parent_section_insert_anchor(
|
||||
&self,
|
||||
parent_path: &PathBuf,
|
||||
parent_path: &Path,
|
||||
lang: &str,
|
||||
) -> InsertAnchor {
|
||||
let parent = if lang != self.config.default_language {
|
||||
|
@ -578,7 +577,7 @@ impl Site {
|
|||
Ok(current_path)
|
||||
}
|
||||
|
||||
fn copy_asset(&self, src: &Path, dest: &PathBuf) -> Result<()> {
|
||||
fn copy_asset(&self, src: &Path, dest: &Path) -> Result<()> {
|
||||
copy_file_if_needed(src, dest, self.config.hard_link_static)
|
||||
}
|
||||
|
||||
|
@ -594,7 +593,7 @@ impl Site {
|
|||
for asset in &page.assets {
|
||||
let asset_path = asset.as_path();
|
||||
self.copy_asset(
|
||||
&asset_path,
|
||||
asset_path,
|
||||
¤t_path
|
||||
.join(asset_path.file_name().expect("Couldn't get filename from page asset")),
|
||||
)?;
|
||||
|
@ -664,7 +663,7 @@ impl Site {
|
|||
}
|
||||
let pages =
|
||||
library.pages_values().iter().filter(|p| &p.lang == code).cloned().collect();
|
||||
self.render_feed(pages, Some(&PathBuf::from(code)), &code, |c| c)?;
|
||||
self.render_feed(pages, Some(&PathBuf::from(code)), code, |c| c)?;
|
||||
start = log_time(start, "Generated feed in other language");
|
||||
}
|
||||
self.render_themes_css()?;
|
||||
|
@ -723,7 +722,7 @@ impl Site {
|
|||
&self.output_path.join(&format!("search_index.{}.js", &code)),
|
||||
&format!(
|
||||
"window.searchIndex = {};",
|
||||
search::build_index(&code, &self.library.read().unwrap(), &self.config)?
|
||||
search::build_index(code, &self.library.read().unwrap(), &self.config)?
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
@ -748,7 +747,7 @@ impl Site {
|
|||
}
|
||||
None => "index.html",
|
||||
};
|
||||
let content = render_redirect_template(&permalink, &self.tera)?;
|
||||
let content = render_redirect_template(permalink, &self.tera)?;
|
||||
self.write_content(&split, page_name, content, false)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -760,12 +759,12 @@ impl Site {
|
|||
let library = self.library.read().unwrap();
|
||||
for (_, page) in library.pages() {
|
||||
for alias in &page.meta.aliases {
|
||||
self.render_alias(&alias, &page.permalink)?;
|
||||
self.render_alias(alias, &page.permalink)?;
|
||||
}
|
||||
}
|
||||
for (_, section) in library.sections() {
|
||||
for alias in §ion.meta.aliases {
|
||||
self.render_alias(&alias, §ion.permalink)?;
|
||||
self.render_alias(alias, §ion.permalink)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -832,7 +831,7 @@ impl Site {
|
|||
if taxonomy.kind.is_paginated() {
|
||||
self.render_paginated(
|
||||
comp.clone(),
|
||||
&Paginator::from_taxonomy(&taxonomy, item, &library),
|
||||
&Paginator::from_taxonomy(taxonomy, item, &library),
|
||||
)?;
|
||||
} else {
|
||||
let single_output =
|
||||
|
@ -926,7 +925,7 @@ impl Site {
|
|||
};
|
||||
let feed_filename = &self.config.feed_filename;
|
||||
|
||||
if let Some(ref base) = base_path {
|
||||
if let Some(base) = base_path {
|
||||
let mut components = Vec::new();
|
||||
for component in base.components() {
|
||||
// TODO: avoid cloning the paths
|
||||
|
@ -934,12 +933,12 @@ impl Site {
|
|||
}
|
||||
self.write_content(
|
||||
&components.iter().map(|x| x.as_ref()).collect::<Vec<_>>(),
|
||||
&feed_filename,
|
||||
feed_filename,
|
||||
feed,
|
||||
false,
|
||||
)?;
|
||||
} else {
|
||||
self.write_content(&[], &feed_filename, feed, false)?;
|
||||
self.write_content(&[], feed_filename, feed, false)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -987,7 +986,7 @@ impl Site {
|
|||
for asset in §ion.assets {
|
||||
let asset_path = asset.as_path();
|
||||
self.copy_asset(
|
||||
&asset_path,
|
||||
asset_path,
|
||||
&output_path.join(
|
||||
asset_path.file_name().expect("Failed to get asset filename for section"),
|
||||
),
|
||||
|
@ -1021,7 +1020,7 @@ impl Site {
|
|||
if section.meta.is_paginated() {
|
||||
self.render_paginated(
|
||||
components,
|
||||
&Paginator::from_section(§ion, &self.library.read().unwrap()),
|
||||
&Paginator::from_section(section, &self.library.read().unwrap()),
|
||||
)?;
|
||||
} else {
|
||||
let output =
|
||||
|
|
|
@ -58,12 +58,12 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
|||
let section = library
|
||||
.get_section(&full_path)
|
||||
.expect("Couldn't find section in check_internal_links_with_anchors");
|
||||
!section.has_anchor(&anchor)
|
||||
!section.has_anchor(anchor)
|
||||
} else {
|
||||
let page = library
|
||||
.get_page(&full_path)
|
||||
.expect("Couldn't find section in check_internal_links_with_anchors");
|
||||
!page.has_anchor(&anchor)
|
||||
!page.has_anchor(anchor)
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -96,7 +96,7 @@ pub fn check_internal_links_with_anchors(site: &Site) -> Result<()> {
|
|||
}
|
||||
|
||||
fn get_link_domain(link: &str) -> Result<String> {
|
||||
return match Url::parse(&link) {
|
||||
return match Url::parse(link) {
|
||||
Ok(url) => match url.host_str().map(String::from) {
|
||||
Some(domain_str) => Ok(domain_str),
|
||||
None => bail!("could not parse domain `{}` from link", link),
|
||||
|
@ -129,7 +129,7 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
|||
let mut links_by_domain: HashMap<String, Vec<(PathBuf, String)>> = HashMap::new();
|
||||
|
||||
for link in all_links.iter() {
|
||||
links_by_domain.entry(link.2.to_string()).or_insert(Vec::new());
|
||||
links_by_domain.entry(link.2.to_string()).or_default();
|
||||
// Insert content path and link under the domain key
|
||||
links_by_domain
|
||||
.get_mut(&link.2.to_string())
|
||||
|
@ -156,7 +156,7 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
|||
.map(|(_domain, links)| {
|
||||
let mut links_to_process = links.len();
|
||||
links
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter_map(move |(page_path, link)| {
|
||||
links_to_process -= 1;
|
||||
|
||||
|
@ -170,7 +170,7 @@ pub fn check_external_links(site: &Site) -> Result<()> {
|
|||
return None;
|
||||
}
|
||||
|
||||
let res = link_checker::check_url(&link, &site.config.link_checker);
|
||||
let res = link_checker::check_url(link, &site.config.link_checker);
|
||||
|
||||
if links_to_process > 0 {
|
||||
// Prevent rate-limiting, wait before next crawl unless we're done with this domain
|
||||
|
|
|
@ -8,7 +8,7 @@ use errors::{bail, Result};
|
|||
use utils::fs::{create_file, ensure_directory_exists};
|
||||
|
||||
pub fn compile_sass(base_path: &Path, output_path: &Path) -> Result<()> {
|
||||
ensure_directory_exists(&output_path)?;
|
||||
ensure_directory_exists(output_path)?;
|
||||
|
||||
let sass_path = {
|
||||
let mut sass_path = PathBuf::from(base_path);
|
||||
|
@ -16,8 +16,7 @@ pub fn compile_sass(base_path: &Path, output_path: &Path) -> Result<()> {
|
|||
sass_path
|
||||
};
|
||||
|
||||
let mut options = Options::default();
|
||||
options.output_style = OutputStyle::Compressed;
|
||||
let mut options = Options { output_style: OutputStyle::Compressed, ..Default::default() };
|
||||
let mut compiled_paths = compile_sass_glob(&sass_path, output_path, "scss", &options)?;
|
||||
|
||||
options.indented_syntax = true;
|
||||
|
@ -91,7 +90,7 @@ fn test_get_non_partial_scss() {
|
|||
|
||||
let result = get_non_partial_scss(&path, "scss");
|
||||
|
||||
assert!(result.len() != 0);
|
||||
assert!(!result.is_empty());
|
||||
assert!(result.iter().filter_map(|path| path.file_name()).any(|file| file == "scss.scss"))
|
||||
}
|
||||
#[test]
|
||||
|
@ -106,6 +105,6 @@ fn test_get_non_partial_scss_underscores() {
|
|||
|
||||
let result = get_non_partial_scss(&path, "scss");
|
||||
|
||||
assert!(result.len() != 0);
|
||||
assert!(!result.is_empty());
|
||||
assert!(result.iter().filter_map(|path| path.file_name()).any(|file| file == "scss.scss"))
|
||||
}
|
||||
|
|
|
@ -99,8 +99,7 @@ pub fn find_entries<'a>(
|
|||
let mut taxonomies_entries = vec![];
|
||||
for taxonomy in taxonomies {
|
||||
let name = &taxonomy.kind.name;
|
||||
let mut terms = vec![];
|
||||
terms.push(SitemapEntry::new(Cow::Owned(config.make_permalink(name)), None));
|
||||
let mut terms = vec![SitemapEntry::new(Cow::Owned(config.make_permalink(name)), None)];
|
||||
for item in &taxonomy.items {
|
||||
terms.push(SitemapEntry::new(
|
||||
Cow::Owned(config.make_permalink(&format!("{}/{}", name, item.slug))),
|
||||
|
|
|
@ -23,6 +23,7 @@ pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> {
|
|||
site.base_path.clone(),
|
||||
site.config.clone(),
|
||||
site.permalinks.clone(),
|
||||
site.output_path.clone(),
|
||||
),
|
||||
);
|
||||
site.tera.register_function(
|
||||
|
@ -31,15 +32,24 @@ pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> {
|
|||
site.base_path.clone(),
|
||||
site.imageproc.clone(),
|
||||
site.config.theme.clone(),
|
||||
site.output_path.clone(),
|
||||
),
|
||||
);
|
||||
site.tera.register_function(
|
||||
"get_image_metadata",
|
||||
global_fns::GetImageMetadata::new(site.base_path.clone(), site.config.theme.clone()),
|
||||
global_fns::GetImageMetadata::new(
|
||||
site.base_path.clone(),
|
||||
site.config.theme.clone(),
|
||||
site.output_path.clone(),
|
||||
),
|
||||
);
|
||||
site.tera.register_function(
|
||||
"load_data",
|
||||
global_fns::LoadData::new(site.base_path.clone(), site.config.theme.clone()),
|
||||
global_fns::LoadData::new(
|
||||
site.base_path.clone(),
|
||||
site.config.theme.clone(),
|
||||
site.output_path.clone(),
|
||||
),
|
||||
);
|
||||
site.tera.register_function("trans", global_fns::Trans::new(site.config.clone()));
|
||||
site.tera.register_function(
|
||||
|
@ -52,7 +62,11 @@ pub fn register_early_global_fns(site: &mut Site) -> TeraResult<()> {
|
|||
);
|
||||
site.tera.register_function(
|
||||
"get_file_hash",
|
||||
global_fns::GetFileHash::new(site.base_path.clone(), site.config.theme.clone()),
|
||||
global_fns::GetFileHash::new(
|
||||
site.base_path.clone(),
|
||||
site.config.theme.clone(),
|
||||
site.output_path.clone(),
|
||||
),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
|
|
|
@ -5,6 +5,7 @@ use std::path::{Path, PathBuf};
|
|||
|
||||
use path_slash::PathExt;
|
||||
use site::Site;
|
||||
use std::ffi::OsStr;
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
// 2 helper macros to make all the build testing more bearable
|
||||
|
@ -12,7 +13,7 @@ use tempfile::{tempdir, TempDir};
|
|||
macro_rules! file_exists {
|
||||
($root: expr, $path: expr) => {{
|
||||
let mut path = $root.clone();
|
||||
for component in $path.split("/") {
|
||||
for component in $path.split('/') {
|
||||
path = path.join(component);
|
||||
}
|
||||
std::path::Path::new(&path).exists()
|
||||
|
@ -24,7 +25,7 @@ macro_rules! file_contains {
|
|||
($root: expr, $path: expr, $text: expr) => {{
|
||||
use std::io::prelude::*;
|
||||
let mut path = $root.clone();
|
||||
for component in $path.split("/") {
|
||||
for component in $path.split('/') {
|
||||
path = path.join(component);
|
||||
}
|
||||
let mut file = std::fs::File::open(&path).expect(&format!("Failed to open {:?}", $path));
|
||||
|
@ -75,16 +76,13 @@ where
|
|||
/// When the path is not a markdown file (.md), None is returned
|
||||
/// Strips base_dir from the start of path
|
||||
fn find_lang_for(entry: &Path, base_dir: &Path) -> Option<(String, Option<String>)> {
|
||||
let ext = entry.extension();
|
||||
if ext.is_none() {
|
||||
// Not a markdown file (no extension), skip
|
||||
return None;
|
||||
}
|
||||
let ext = ext.unwrap();
|
||||
if ext != "md" {
|
||||
// Not a markdown file, skip
|
||||
return None;
|
||||
}
|
||||
// continue if we have md file,
|
||||
// skip otherwise
|
||||
match entry.extension().and_then(OsStr::to_str) {
|
||||
Some("md") => (),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
let mut no_ext = entry.to_path_buf();
|
||||
let stem = entry.file_stem().unwrap();
|
||||
// Remove .md
|
||||
|
@ -101,7 +99,7 @@ fn find_lang_for(entry: &Path, base_dir: &Path) -> Option<(String, Option<String
|
|||
Ok(path_without_prefix) => path_without_prefix.to_slash_lossy(),
|
||||
_ => unified_path.to_slash_lossy(),
|
||||
};
|
||||
return Some((unified_path_str, Some(lang.to_str().unwrap().into())));
|
||||
Some((unified_path_str, Some(lang.to_str().unwrap().into())))
|
||||
} else {
|
||||
// No lang, return no_ext directly
|
||||
let mut no_ext_string = match no_ext.strip_prefix(base_dir) {
|
||||
|
@ -109,7 +107,7 @@ fn find_lang_for(entry: &Path, base_dir: &Path) -> Option<(String, Option<String
|
|||
_ => no_ext.to_slash_lossy(),
|
||||
};
|
||||
no_ext_string.push_str(".md");
|
||||
return Some((no_ext_string, None));
|
||||
Some((no_ext_string, None))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,17 +131,17 @@ pub fn add_translations_from(
|
|||
if let Some((unified_path, lang)) = find_lang_for(&entry, strip) {
|
||||
if let Some(index) = expected.get_mut(&unified_path) {
|
||||
// Insert found lang for rel_path, or DEFAULT otherwise
|
||||
index.push(lang.unwrap_or(default.to_string()));
|
||||
index.push(lang.unwrap_or_else(|| default.to_string()));
|
||||
} else {
|
||||
// rel_path is not registered yet, insert it in expected
|
||||
expected.insert(unified_path, vec![lang.unwrap_or(default.to_string())]);
|
||||
expected.insert(unified_path, vec![lang.unwrap_or_else(|| default.to_string())]);
|
||||
}
|
||||
} else {
|
||||
// Not a markdown file, skip
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return expected;
|
||||
expected
|
||||
}
|
||||
|
||||
/// Calculate output path for Markdown files
|
||||
|
@ -302,7 +300,7 @@ pub fn ensure_translations_match(
|
|||
return false;
|
||||
}
|
||||
// Everything went well
|
||||
return true;
|
||||
true
|
||||
} else {
|
||||
// Should never happen because even the default language counts as a translation
|
||||
// Reaching here means either there is a logic error in the tests themselves,
|
||||
|
@ -329,7 +327,7 @@ pub fn ensure_translations_in_output(site: &Site, path: &str, permalink: &str) -
|
|||
let output_path = site.output_path.join(output_path);
|
||||
|
||||
let output = std::fs::read_to_string(&output_path)
|
||||
.expect(&format!("Output not found in {}", output_path.display()));
|
||||
.unwrap_or_else(|_| panic!("Output not found in {}", output_path.display()));
|
||||
|
||||
for permalink in &translations_permalinks {
|
||||
if !output.contains(permalink) {
|
||||
|
@ -338,5 +336,5 @@ pub fn ensure_translations_in_output(site: &Site, path: &str, permalink: &str) -
|
|||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
true
|
||||
}
|
||||
|
|
|
@ -151,15 +151,15 @@ fn can_build_site_without_live_reload() {
|
|||
assert!(file_contains!(public, "posts/tutorials/devops/index.html", "docker"));
|
||||
|
||||
// We do have categories
|
||||
assert_eq!(file_exists!(public, "categories/index.html"), true);
|
||||
assert_eq!(file_exists!(public, "categories/a-category/index.html"), true);
|
||||
assert_eq!(file_exists!(public, "categories/a-category/atom.xml"), true);
|
||||
assert!(file_exists!(public, "categories/index.html"));
|
||||
assert!(file_exists!(public, "categories/a-category/index.html"));
|
||||
assert!(file_exists!(public, "categories/a-category/atom.xml"));
|
||||
// and podcast_authors (https://github.com/getzola/zola/issues/1177)
|
||||
assert_eq!(file_exists!(public, "podcast-authors/index.html"), true);
|
||||
assert_eq!(file_exists!(public, "podcast-authors/some-person/index.html"), true);
|
||||
assert_eq!(file_exists!(public, "podcast-authors/some-person/atom.xml"), true);
|
||||
assert!(file_exists!(public, "podcast-authors/index.html"));
|
||||
assert!(file_exists!(public, "podcast-authors/some-person/index.html"));
|
||||
assert!(file_exists!(public, "podcast-authors/some-person/atom.xml"));
|
||||
// But no tags
|
||||
assert_eq!(file_exists!(public, "tags/index.html"), false);
|
||||
assert!(!file_exists!(public, "tags/index.html"));
|
||||
|
||||
// Theme files are there
|
||||
assert!(file_exists!(public, "sample.css"));
|
||||
|
@ -181,10 +181,7 @@ fn can_build_site_without_live_reload() {
|
|||
assert!(!file_exists!(public, "secret_section/page.html"));
|
||||
assert!(!file_exists!(public, "secret_section/secret_sub_section/hello.html"));
|
||||
// no live reload code
|
||||
assert_eq!(
|
||||
file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"),
|
||||
false
|
||||
);
|
||||
assert!(!file_contains!(public, "index.html", "/livereload.js?port=1112&mindelay=10"),);
|
||||
|
||||
// Both pages and sections are in the sitemap
|
||||
assert!(file_contains!(
|
||||
|
@ -238,11 +235,11 @@ fn can_build_site_with_live_reload_and_drafts() {
|
|||
// TODO: add assertion for syntax highlighting
|
||||
|
||||
// We do have categories
|
||||
assert_eq!(file_exists!(public, "categories/index.html"), true);
|
||||
assert_eq!(file_exists!(public, "categories/a-category/index.html"), true);
|
||||
assert_eq!(file_exists!(public, "categories/a-category/atom.xml"), true);
|
||||
assert!(file_exists!(public, "categories/index.html"));
|
||||
assert!(file_exists!(public, "categories/a-category/index.html"));
|
||||
assert!(file_exists!(public, "categories/a-category/atom.xml"));
|
||||
// But no tags
|
||||
assert_eq!(file_exists!(public, "tags/index.html"), false);
|
||||
assert!(!file_exists!(public, "tags/index.html"));
|
||||
|
||||
// no live reload code
|
||||
assert!(file_contains!(public, "index.html", "/livereload.js"));
|
||||
|
@ -320,7 +317,7 @@ fn can_build_site_with_taxonomies() {
|
|||
// Extending from a theme works
|
||||
assert!(file_contains!(public, "categories/a/index.html", "EXTENDED"));
|
||||
// Tags aren't
|
||||
assert_eq!(file_exists!(public, "tags/index.html"), false);
|
||||
assert!(!file_exists!(public, "tags/index.html"));
|
||||
|
||||
// Categories are in the sitemap
|
||||
assert!(file_contains!(
|
||||
|
@ -403,7 +400,7 @@ fn can_build_site_with_pagination_for_section() {
|
|||
"posts/index.html",
|
||||
"Last: https://replace-this-with-your-url.com/posts/page/5/"
|
||||
));
|
||||
assert_eq!(file_contains!(public, "posts/index.html", "has_prev"), false);
|
||||
assert!(!file_contains!(public, "posts/index.html", "has_prev"));
|
||||
|
||||
assert!(file_exists!(public, "posts/page/2/index.html"));
|
||||
assert!(file_contains!(public, "posts/page/2/index.html", "Num pagers: 5"));
|
||||
|
@ -526,8 +523,8 @@ fn can_build_site_with_pagination_for_index() {
|
|||
"index.html",
|
||||
"Last: https://replace-this-with-your-url.com/page/2/"
|
||||
));
|
||||
assert_eq!(file_contains!(public, "index.html", "has_prev"), false);
|
||||
assert_eq!(file_contains!(public, "index.html", "has_next"), true);
|
||||
assert!(!file_contains!(public, "index.html", "has_prev"));
|
||||
assert!(file_contains!(public, "index.html", "has_next"));
|
||||
|
||||
// sitemap contains the pager pages
|
||||
assert!(file_contains!(
|
||||
|
@ -612,7 +609,7 @@ fn can_build_site_with_pagination_for_taxonomy() {
|
|||
"tags/a/index.html",
|
||||
"Last: https://replace-this-with-your-url.com/tags/a/page/8/"
|
||||
));
|
||||
assert_eq!(file_contains!(public, "tags/a/index.html", "has_prev"), false);
|
||||
assert!(!file_contains!(public, "tags/a/index.html", "has_prev"));
|
||||
|
||||
// sitemap contains the pager pages
|
||||
assert!(file_contains!(
|
||||
|
@ -735,7 +732,7 @@ fn can_build_site_with_html_minified() {
|
|||
assert!(file_contains!(
|
||||
public,
|
||||
"index.html",
|
||||
"<!DOCTYPE html><html lang=en><head><meta charset=UTF-8>"
|
||||
"<!doctype html><html lang=en><head><meta charset=UTF-8>"
|
||||
));
|
||||
}
|
||||
|
||||
|
|
|
@ -192,9 +192,9 @@ fn correct_translations_on_all_pages() {
|
|||
|
||||
// Ensure translations expected here match with those in the library
|
||||
// TODO: add constructive error message inside the function
|
||||
assert!(ensure_translations_match(&translations, &site, &path));
|
||||
assert!(ensure_translations_match(&translations, &site, path));
|
||||
|
||||
// Ensure output file contains all translations URLs
|
||||
assert!(ensure_translations_in_output(&site, &path, &link));
|
||||
assert!(ensure_translations_in_output(&site, path, &link));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,8 +28,12 @@ rendering = { path = "../rendering" }
|
|||
[dependencies.reqwest]
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["blocking", "rustls-tls"]
|
||||
features = ["blocking"]
|
||||
|
||||
[dev-dependencies]
|
||||
mockito = "0.30"
|
||||
tempfile = "3"
|
||||
|
||||
[features]
|
||||
rust-tls = ["reqwest/rustls-tls"]
|
||||
native-tls = ["reqwest/default-tls"]
|
||||
|
|
|
@ -214,8 +214,7 @@ mod tests {
|
|||
};
|
||||
|
||||
let taxonomies = vec![tags.clone(), tags_fr.clone()];
|
||||
let static_fn =
|
||||
GetTaxonomy::new(&config.default_language, taxonomies.clone(), library.clone());
|
||||
let static_fn = GetTaxonomy::new(&config.default_language, taxonomies, library);
|
||||
// can find it correctly
|
||||
let mut args = HashMap::new();
|
||||
args.insert("kind".to_string(), to_value("tags").unwrap());
|
||||
|
@ -291,7 +290,7 @@ mod tests {
|
|||
items: vec![tag_fr],
|
||||
};
|
||||
|
||||
let taxonomies = vec![tags.clone(), tags_fr.clone()];
|
||||
let taxonomies = vec![tags, tags_fr];
|
||||
let static_fn =
|
||||
GetTaxonomyUrl::new(&config.default_language, &taxonomies, config.slugify.taxonomies);
|
||||
|
||||
|
|
|
@ -31,11 +31,17 @@ pub struct GetUrl {
|
|||
base_path: PathBuf,
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
output_path: PathBuf,
|
||||
}
|
||||
|
||||
impl GetUrl {
|
||||
pub fn new(base_path: PathBuf, config: Config, permalinks: HashMap<String, String>) -> Self {
|
||||
Self { base_path, config, permalinks }
|
||||
pub fn new(
|
||||
base_path: PathBuf,
|
||||
config: Config,
|
||||
permalinks: HashMap<String, String>,
|
||||
output_path: PathBuf,
|
||||
) -> Self {
|
||||
Self { base_path, config, permalinks, output_path }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,7 +117,7 @@ impl TeraFn for GetUrl {
|
|||
}
|
||||
|
||||
if cachebust {
|
||||
match search_for_file(&self.base_path, &path_with_lang, &self.config.theme)
|
||||
match search_for_file(&self.base_path, &path_with_lang, &self.config.theme, &self.output_path)
|
||||
.map_err(|e| format!("`get_url`: {}", e))?
|
||||
.and_then(|(p, _)| fs::File::open(&p).ok())
|
||||
.and_then(|f| compute_file_hash::<Sha256>(f, false).ok())
|
||||
|
@ -142,10 +148,11 @@ impl TeraFn for GetUrl {
|
|||
pub struct GetFileHash {
|
||||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
output_path: PathBuf,
|
||||
}
|
||||
impl GetFileHash {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>) -> Self {
|
||||
Self { base_path, theme }
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>, output_path: PathBuf) -> Self {
|
||||
Self { base_path, theme, output_path }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -169,7 +176,7 @@ impl TeraFn for GetFileHash {
|
|||
)
|
||||
.unwrap_or(true);
|
||||
|
||||
let file_path = match search_for_file(&self.base_path, &path, &self.theme)
|
||||
let file_path = match search_for_file(&self.base_path, &path, &self.theme, &self.output_path)
|
||||
.map_err(|e| format!("`get_file_hash`: {}", e))?
|
||||
{
|
||||
Some((f, _)) => f,
|
||||
|
@ -204,6 +211,8 @@ mod tests {
|
|||
use super::{GetFileHash, GetUrl};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::fs::create_dir;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use tera::{to_value, Function};
|
||||
|
@ -232,7 +241,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_add_cachebust_to_url() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("cachebust".to_string(), to_value(true).unwrap());
|
||||
|
@ -242,7 +251,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_add_trailing_slashes() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap());
|
||||
|
@ -252,7 +261,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_add_slashes_and_cachebust() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("trailing_slash".to_string(), to_value(true).unwrap());
|
||||
|
@ -263,7 +272,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_link_to_some_static_file() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css");
|
||||
|
@ -273,11 +282,25 @@ title = "A title"
|
|||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/app.css");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_link_to_file_in_output_path() {
|
||||
let dir = create_temp_dir();
|
||||
let public = dir.path().join("public");
|
||||
create_dir(&public).expect("Failed to create output directory");
|
||||
create_file(&public.join("style.css"), "// Hello world")
|
||||
.expect("Failed to create file in output directory");
|
||||
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), Config::default(), HashMap::new(), public);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("style.css").unwrap());
|
||||
assert_eq!(static_fn.call(&args).unwrap(), "http://a-website.com/style.css");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn error_when_language_not_available() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("it").unwrap());
|
||||
|
@ -301,7 +324,7 @@ title = "A title"
|
|||
);
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks);
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
|
@ -324,7 +347,7 @@ title = "A title"
|
|||
"https://remplace-par-ton-url.fr/en/a_section/a_page/".to_string(),
|
||||
);
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks);
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config, permalinks, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("@/a_section/a_page.md").unwrap());
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
|
@ -338,7 +361,7 @@ title = "A title"
|
|||
fn can_get_feed_url_with_default_language() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config.clone(), HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config.clone(), HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value(config.feed_filename).unwrap());
|
||||
args.insert("lang".to_string(), to_value("fr").unwrap());
|
||||
|
@ -349,7 +372,7 @@ title = "A title"
|
|||
fn can_get_feed_url_with_other_language() {
|
||||
let config = Config::parse(CONFIG_DATA).unwrap();
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config.clone(), HashMap::new());
|
||||
let static_fn = GetUrl::new(dir.path().to_path_buf(), config.clone(), HashMap::new(), PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value(config.feed_filename).unwrap());
|
||||
args.insert("lang".to_string(), to_value("en").unwrap());
|
||||
|
@ -359,7 +382,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_get_file_hash_sha256_no_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(256).unwrap());
|
||||
|
@ -373,7 +396,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_get_file_hash_sha256_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(256).unwrap());
|
||||
|
@ -384,7 +407,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_get_file_hash_sha384_no_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("base64".to_string(), to_value(false).unwrap());
|
||||
|
@ -397,7 +420,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_get_file_hash_sha384() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
assert_eq!(
|
||||
|
@ -409,7 +432,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_get_file_hash_sha512_no_base64() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(512).unwrap());
|
||||
|
@ -423,7 +446,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn can_get_file_hash_sha512() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("app.css").unwrap());
|
||||
args.insert("sha_type".to_string(), to_value(512).unwrap());
|
||||
|
@ -436,7 +459,7 @@ title = "A title"
|
|||
#[test]
|
||||
fn error_when_file_not_found_for_hash() {
|
||||
let dir = create_temp_dir();
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None);
|
||||
let static_fn = GetFileHash::new(dir.into_path(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("doesnt-exist").unwrap());
|
||||
let err = format!("{}", static_fn.call(&args).unwrap_err());
|
||||
|
|
|
@ -5,11 +5,12 @@ use errors::{bail, Result};
|
|||
use utils::fs::is_path_in_directory;
|
||||
|
||||
/// This is used by a few Tera functions to search for files on the filesystem.
|
||||
/// This does try to find the file in 3 different spots:
|
||||
/// This does try to find the file in 5 different spots:
|
||||
/// 1. base_path + path
|
||||
/// 2. base_path + static + path
|
||||
/// 3. base_path + content + path
|
||||
/// 4. base_path + themes + {current_theme} + static + path
|
||||
/// 4. base_path + {output dir} + path
|
||||
/// 5. base_path + themes + {current_theme} + static + path
|
||||
/// A path starting with @/ will replace it with `content/` and a path starting with `/` will have
|
||||
/// it removed.
|
||||
/// It also returns the unified path so it can be used as unique hash for a given file.
|
||||
|
@ -18,8 +19,9 @@ pub fn search_for_file(
|
|||
base_path: &Path,
|
||||
path: &str,
|
||||
theme: &Option<String>,
|
||||
output_path: &Path,
|
||||
) -> Result<Option<(PathBuf, String)>> {
|
||||
let mut search_paths = vec![base_path.join("static"), base_path.join("content")];
|
||||
let mut search_paths = vec![base_path.join("static"), base_path.join("content"), base_path.join(output_path)];
|
||||
if let Some(t) = theme {
|
||||
search_paths.push(base_path.join("themes").join(t).join("static"));
|
||||
}
|
||||
|
@ -37,7 +39,7 @@ pub fn search_for_file(
|
|||
}
|
||||
|
||||
if !file_exists {
|
||||
// we need to search in both search folders now
|
||||
// we need to search in all search folders now
|
||||
for dir in &search_paths {
|
||||
let p = dir.join(&*actual_path);
|
||||
if p.exists() {
|
||||
|
|
|
@ -12,6 +12,7 @@ pub struct ResizeImage {
|
|||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
output_path: PathBuf,
|
||||
}
|
||||
|
||||
impl ResizeImage {
|
||||
|
@ -19,8 +20,9 @@ impl ResizeImage {
|
|||
base_path: PathBuf,
|
||||
imageproc: Arc<Mutex<imageproc::Processor>>,
|
||||
theme: Option<String>,
|
||||
output_path: PathBuf,
|
||||
) -> Self {
|
||||
Self { base_path, imageproc, theme }
|
||||
Self { base_path, imageproc, theme, output_path }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -60,7 +62,7 @@ impl TeraFn for ResizeImage {
|
|||
}
|
||||
|
||||
let mut imageproc = self.imageproc.lock().unwrap();
|
||||
let (file_path, unified_path) = match search_for_file(&self.base_path, &path, &self.theme)
|
||||
let (file_path, unified_path) = match search_for_file(&self.base_path, &path, &self.theme, &self.output_path)
|
||||
.map_err(|e| format!("`resize_image`: {}", e))?
|
||||
{
|
||||
Some(f) => f,
|
||||
|
@ -83,11 +85,12 @@ pub struct GetImageMetadata {
|
|||
base_path: PathBuf,
|
||||
theme: Option<String>,
|
||||
result_cache: Arc<Mutex<HashMap<String, Value>>>,
|
||||
output_path: PathBuf,
|
||||
}
|
||||
|
||||
impl GetImageMetadata {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>) -> Self {
|
||||
Self { base_path, result_cache: Arc::new(Mutex::new(HashMap::new())), theme }
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>, output_path: PathBuf) -> Self {
|
||||
Self { base_path, result_cache: Arc::new(Mutex::new(HashMap::new())), theme, output_path }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -105,7 +108,7 @@ impl TeraFn for GetImageMetadata {
|
|||
)
|
||||
.unwrap_or(false);
|
||||
|
||||
let (src_path, unified_path) = match search_for_file(&self.base_path, &path, &self.theme)
|
||||
let (src_path, unified_path) = match search_for_file(&self.base_path, &path, &self.theme, &self.output_path)
|
||||
.map_err(|e| format!("`get_image_metadata`: {}", e))?
|
||||
{
|
||||
Some((f, p)) => (f, p),
|
||||
|
@ -139,7 +142,7 @@ mod tests {
|
|||
use std::fs::{copy, create_dir_all};
|
||||
|
||||
use config::Config;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tempfile::{tempdir, TempDir};
|
||||
use tera::{to_value, Function};
|
||||
|
@ -172,6 +175,7 @@ mod tests {
|
|||
dir.path().to_path_buf(),
|
||||
Arc::new(Mutex::new(imageproc)),
|
||||
Some("name".to_owned()),
|
||||
PathBuf::new(),
|
||||
);
|
||||
let mut args = HashMap::new();
|
||||
args.insert("height".to_string(), to_value(40).unwrap());
|
||||
|
@ -246,7 +250,7 @@ mod tests {
|
|||
fn can_get_image_metadata() {
|
||||
let dir = create_dir_with_image();
|
||||
|
||||
let static_fn = GetImageMetadata::new(dir.path().to_path_buf(), None);
|
||||
let static_fn = GetImageMetadata::new(dir.path().to_path_buf(), None, PathBuf::new());
|
||||
|
||||
// Let's test a few scenarii
|
||||
let mut args = HashMap::new();
|
||||
|
|
|
@ -89,13 +89,14 @@ impl DataSource {
|
|||
url_arg: Option<String>,
|
||||
base_path: &Path,
|
||||
theme: &Option<String>,
|
||||
output_path: &Path,
|
||||
) -> Result<Option<Self>> {
|
||||
if path_arg.is_some() && url_arg.is_some() {
|
||||
return Err(GET_DATA_ARGUMENT_ERROR_MESSAGE.into());
|
||||
}
|
||||
|
||||
if let Some(path) = path_arg {
|
||||
return match search_for_file(&base_path, &path, &theme)
|
||||
return match search_for_file(&base_path, &path, &theme, &output_path)
|
||||
.map_err(|e| format!("`load_data`: {}", e))?
|
||||
{
|
||||
Some((f, _)) => Ok(Some(DataSource::Path(f))),
|
||||
|
@ -136,7 +137,7 @@ impl Hash for DataSource {
|
|||
DataSource::Url(url) => url.hash(state),
|
||||
DataSource::Path(path) => {
|
||||
path.hash(state);
|
||||
get_file_time(&path).expect("get file time").hash(state);
|
||||
get_file_time(path).expect("get file time").hash(state);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -169,9 +170,10 @@ pub struct LoadData {
|
|||
theme: Option<String>,
|
||||
client: Arc<Mutex<Client>>,
|
||||
result_cache: Arc<Mutex<HashMap<u64, Value>>>,
|
||||
output_path: PathBuf,
|
||||
}
|
||||
impl LoadData {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>) -> Self {
|
||||
pub fn new(base_path: PathBuf, theme: Option<String>, output_path: PathBuf) -> Self {
|
||||
let client = Arc::new(Mutex::new(
|
||||
Client::builder()
|
||||
.user_agent(concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")))
|
||||
|
@ -179,7 +181,7 @@ impl LoadData {
|
|||
.expect("reqwest client build"),
|
||||
));
|
||||
let result_cache = Arc::new(Mutex::new(HashMap::new()));
|
||||
Self { base_path, client, result_cache, theme }
|
||||
Self { base_path, client, result_cache, theme, output_path }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,7 +217,7 @@ impl TeraFn for LoadData {
|
|||
);
|
||||
|
||||
let method = match method_arg {
|
||||
Some(ref method_str) => match Method::from_str(&method_str) {
|
||||
Some(ref method_str) => match Method::from_str(method_str) {
|
||||
Ok(m) => m,
|
||||
Err(e) => return Err(e),
|
||||
},
|
||||
|
@ -224,7 +226,7 @@ impl TeraFn for LoadData {
|
|||
|
||||
// If the file doesn't exist, source is None
|
||||
let data_source = match (
|
||||
DataSource::from_args(path_arg.clone(), url_arg, &self.base_path, &self.theme),
|
||||
DataSource::from_args(path_arg.clone(), url_arg, &self.base_path, &self.theme, &self.output_path),
|
||||
required,
|
||||
) {
|
||||
// If the file was not required, return a Null value to the template
|
||||
|
@ -473,12 +475,12 @@ mod tests {
|
|||
|
||||
fn get_test_file(filename: &str) -> PathBuf {
|
||||
let test_files = PathBuf::from("../utils/test-files").canonicalize().unwrap();
|
||||
return test_files.join(filename);
|
||||
test_files.join(filename)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_illegal_method_parameter() {
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value("https://example.com").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -505,7 +507,7 @@ mod tests {
|
|||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -533,7 +535,7 @@ mod tests {
|
|||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4yw");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -562,7 +564,7 @@ mod tests {
|
|||
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -578,7 +580,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn fails_when_missing_file() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
|
||||
let result = static_fn.call(&args);
|
||||
|
@ -588,7 +590,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn doesnt_fail_when_missing_file_is_not_required() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("../../../READMEE.md").unwrap());
|
||||
args.insert("required".to_string(), to_value(false).unwrap());
|
||||
|
@ -607,7 +609,7 @@ mod tests {
|
|||
.unwrap();
|
||||
copy(get_test_file("test.css"), dir.path().join("static").join("test.css")).unwrap();
|
||||
|
||||
let static_fn = LoadData::new(dir.path().to_path_buf(), None);
|
||||
let static_fn = LoadData::new(dir.path().to_path_buf(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
let val = if cfg!(windows) { ".hello {}\r\n" } else { ".hello {}\n" };
|
||||
|
||||
|
@ -634,7 +636,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn cannot_load_outside_base_dir() {
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("../../README.md").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -711,7 +713,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/zpydpkjj67");
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let static_fn = LoadData::new(PathBuf::new(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -728,7 +730,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/aazeow0kog");
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let static_fn = LoadData::new(PathBuf::new(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -749,7 +751,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/aazeow0kog");
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let static_fn = LoadData::new(PathBuf::new(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -776,7 +778,7 @@ mod tests {
|
|||
.create();
|
||||
|
||||
let url = format!("{}{}", mockito::server_url(), "/chu8aizahBiy");
|
||||
let static_fn = LoadData::new(PathBuf::new(), None);
|
||||
let static_fn = LoadData::new(PathBuf::new(), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("json").unwrap());
|
||||
|
@ -786,7 +788,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_load_toml() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.toml").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -806,7 +808,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn unknown_extension_defaults_to_plain() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.css").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -821,7 +823,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_override_known_extension_with_format() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.csv").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -839,7 +841,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn will_use_format_on_unknown_extension() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.css").unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -854,7 +856,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_load_csv() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.csv").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -874,7 +876,7 @@ mod tests {
|
|||
// Test points to bad csv file with uneven row lengths
|
||||
#[test]
|
||||
fn bad_csv_should_result_in_error() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("uneven_rows.csv").unwrap());
|
||||
let result = static_fn.call(&args.clone());
|
||||
|
@ -884,7 +886,7 @@ mod tests {
|
|||
let error_kind = result.err().unwrap().kind;
|
||||
match error_kind {
|
||||
tera::ErrorKind::Msg(msg) => {
|
||||
if msg != String::from("Error encountered when parsing csv records") {
|
||||
if msg != *"Error encountered when parsing csv records" {
|
||||
panic!("Error message is wrong. Perhaps wrong error is being returned?");
|
||||
}
|
||||
}
|
||||
|
@ -894,7 +896,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn bad_csv_should_result_in_error_even_when_not_required() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("uneven_rows.csv").unwrap());
|
||||
args.insert("required".to_string(), to_value(false).unwrap());
|
||||
|
@ -905,7 +907,7 @@ mod tests {
|
|||
let error_kind = result.err().unwrap().kind;
|
||||
match error_kind {
|
||||
tera::ErrorKind::Msg(msg) => {
|
||||
if msg != String::from("Error encountered when parsing csv records") {
|
||||
if msg != *"Error encountered when parsing csv records" {
|
||||
panic!("Error message is wrong. Perhaps wrong error is being returned?");
|
||||
}
|
||||
}
|
||||
|
@ -915,7 +917,7 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn can_load_json() {
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils/test-files"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("path".to_string(), to_value("test.json").unwrap());
|
||||
let result = static_fn.call(&args.clone()).unwrap();
|
||||
|
@ -941,7 +943,7 @@ mod tests {
|
|||
.create();
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y3");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
@ -973,7 +975,7 @@ mod tests {
|
|||
.create();
|
||||
let url = format!("{}{}", mockito::server_url(), "/kr1zdgbm4y2");
|
||||
|
||||
let static_fn = LoadData::new(PathBuf::from(PathBuf::from("../utils")), None);
|
||||
let static_fn = LoadData::new(PathBuf::from("../utils"), None, PathBuf::new());
|
||||
let mut args = HashMap::new();
|
||||
args.insert("url".to_string(), to_value(&url).unwrap());
|
||||
args.insert("format".to_string(), to_value("plain").unwrap());
|
||||
|
|
|
@ -79,7 +79,7 @@ pub fn load_tera(path: &Path, config: &Config) -> Result<Tera> {
|
|||
);
|
||||
let mut tera_theme = Tera::parse(&theme_tpl_glob)
|
||||
.map_err(|e| Error::chain("Error parsing templates from themes", e))?;
|
||||
rewrite_theme_paths(&mut tera_theme, &theme);
|
||||
rewrite_theme_paths(&mut tera_theme, theme);
|
||||
|
||||
if theme_path.join("templates").join("robots.txt").exists() {
|
||||
tera_theme.add_template_file(theme_path.join("templates").join("robots.txt"), None)?;
|
||||
|
|
|
@ -14,7 +14,7 @@ serde = { version = "1.0", features = ["derive"] }
|
|||
slug = "0.1"
|
||||
percent-encoding = "2"
|
||||
filetime = "0.2.12"
|
||||
minify-html = "0.4.2"
|
||||
minify-html = "0.6"
|
||||
|
||||
errors = { path = "../errors" }
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use filetime::{set_file_mtime, FileTime};
|
||||
use std::fs::{copy, create_dir_all, metadata, File};
|
||||
use std::io::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::time::SystemTime;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
|
@ -53,7 +53,7 @@ pub fn read_file(path: &Path) -> Result<String> {
|
|||
.read_to_string(&mut content)?;
|
||||
|
||||
// Remove utf-8 BOM if any.
|
||||
if content.starts_with("\u{feff}") {
|
||||
if content.starts_with('\u{feff}') {
|
||||
content.drain(..3);
|
||||
}
|
||||
|
||||
|
@ -62,7 +62,7 @@ pub fn read_file(path: &Path) -> Result<String> {
|
|||
|
||||
/// Copy a file but takes into account where to start the copy as
|
||||
/// there might be folders we need to create on the way.
|
||||
pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: bool) -> Result<()> {
|
||||
pub fn copy_file(src: &Path, dest: &Path, base_path: &Path, hard_link: bool) -> Result<()> {
|
||||
let relative_path = src.strip_prefix(base_path).unwrap();
|
||||
let target_path = dest.join(relative_path);
|
||||
|
||||
|
@ -79,7 +79,7 @@ pub fn copy_file(src: &Path, dest: &PathBuf, base_path: &PathBuf, hard_link: boo
|
|||
/// 1. A file with the same name already exists in the dest path.
|
||||
/// 2. Its modification timestamp is identical to that of the src file.
|
||||
/// 3. Its filesize is identical to that of the src file.
|
||||
pub fn copy_file_if_needed(src: &Path, dest: &PathBuf, hard_link: bool) -> Result<()> {
|
||||
pub fn copy_file_if_needed(src: &Path, dest: &Path, hard_link: bool) -> Result<()> {
|
||||
if let Some(parent_directory) = dest.parent() {
|
||||
create_dir_all(parent_directory).map_err(|e| {
|
||||
Error::chain(format!("Was not able to create folder {}", parent_directory.display()), e)
|
||||
|
@ -120,7 +120,7 @@ pub fn copy_file_if_needed(src: &Path, dest: &PathBuf, hard_link: bool) -> Resul
|
|||
Ok(())
|
||||
}
|
||||
|
||||
pub fn copy_directory(src: &PathBuf, dest: &PathBuf, hard_link: bool) -> Result<()> {
|
||||
pub fn copy_directory(src: &Path, dest: &Path, hard_link: bool) -> Result<()> {
|
||||
for entry in WalkDir::new(src).into_iter().filter_map(std::result::Result::ok) {
|
||||
let relative_path = entry.path().strip_prefix(src).unwrap();
|
||||
let target_path = dest.join(relative_path);
|
||||
|
|
|
@ -1,23 +1,14 @@
|
|||
use errors::{bail, Result};
|
||||
use minify_html::{with_friendly_error, Cfg};
|
||||
use minify_html::{minify, Cfg};
|
||||
|
||||
pub fn html(html: String) -> Result<String> {
|
||||
let cfg = &Cfg { minify_js: false, minify_css: false };
|
||||
let mut input_bytes = html.as_bytes().to_vec();
|
||||
let mut cfg = Cfg::spec_compliant();
|
||||
cfg.keep_html_and_head_opening_tags = true;
|
||||
|
||||
match with_friendly_error(&mut input_bytes, cfg) {
|
||||
Ok(len) => match std::str::from_utf8(&input_bytes[..len]) {
|
||||
Ok(result) => Ok(result.to_string()),
|
||||
Err(err) => bail!("Failed to convert bytes to string : {}", err),
|
||||
},
|
||||
Err(minify_error) => {
|
||||
bail!(
|
||||
"Failed to truncate html at character {}: {} \n {}",
|
||||
minify_error.position,
|
||||
minify_error.message,
|
||||
minify_error.code_context
|
||||
);
|
||||
}
|
||||
let minified = minify(html.as_bytes(), &cfg);
|
||||
match std::str::from_utf8(&minified) {
|
||||
Ok(result) => Ok(result.to_string()),
|
||||
Err(err) => bail!("Failed to convert bytes to string : {}", err),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ fn strip_invalid_paths_chars(s: &str) -> String {
|
|||
// NTFS forbidden characters : https://gist.github.com/doctaphred/d01d05291546186941e1b7ddc02034d3
|
||||
// Also we need to trim whitespaces and `.` from the end of filename
|
||||
let trimmed = s.trim_end_matches(|c| c == ' ' || c == '.');
|
||||
strip_chars(&trimmed, r#"<>:"/\|?*"#)
|
||||
strip_chars(trimmed, r#"<>:"/\|?*"#)
|
||||
}
|
||||
|
||||
pub fn slugify_paths(s: &str, strategy: SlugifyStrategy) -> String {
|
||||
|
|
|
@ -155,6 +155,12 @@ From a terminal, you can now run the following command:
|
|||
$ cargo build --release
|
||||
```
|
||||
|
||||
If you encountered compilation errors like `error: failed to run custom build command for 'ring v0.16.20'`, you can try the command below instead:
|
||||
|
||||
```sh
|
||||
$ cargo build --release --no-default-features --features=native-tls
|
||||
```
|
||||
|
||||
The binary will be available in the `target/release` directory. You can move it in your `$PATH` to have the
|
||||
`zola` command available globally or in a directory if you want for example to have the binary in the
|
||||
same repository as the site.
|
||||
|
|
|
@ -109,11 +109,12 @@ logic applies.
|
|||
|
||||
1. The base directory is the Zola root directory, where the `config.toml` is
|
||||
2. For the given path: if it starts with `@/`, replace that with `content/` instead and trim any leading `/`
|
||||
3. Search in the following 3 or 4 locations in this order, returning the first where the file exists:
|
||||
a. $base_directory + $path
|
||||
b. $base_directory + "static/" + $path
|
||||
c. $base_directory + "content/" + $path
|
||||
d. $base_directory + "themes" + $theme + "static/" + $path only if using a theme
|
||||
3. Search in the following locations in this order, returning the first where the file exists:
|
||||
1. $base_directory + $path
|
||||
2. $base_directory + "static/" + $path
|
||||
3. $base_directory + "content/" + $path
|
||||
4. $base_directory + $output_path + $path
|
||||
5. $base_directory + "themes" + $theme + "static/" + $path (only if using a theme)
|
||||
|
||||
In practice this means that `@/some/image.jpg`, `/content/some/image.jpg` and `content/some/image.jpg` will point to the
|
||||
same thing.
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
name: zola
|
||||
version: 0.14.0
|
||||
version: 0.14.1
|
||||
summary: A fast static site generator in a single binary with everything built-in.
|
||||
description: |
|
||||
A fast static site generator in a single binary with everything built-in.
|
||||
|
@ -21,7 +21,7 @@ parts:
|
|||
zola:
|
||||
source-type: git
|
||||
source: https://github.com/getzola/zola.git
|
||||
source-tag: v0.14.0
|
||||
source-tag: v0.14.1
|
||||
plugin: rust
|
||||
rust-channel: stable
|
||||
build-packages:
|
||||
|
|
|
@ -76,7 +76,7 @@ pub fn create_new_project(name: &str, force: bool) -> Result<()> {
|
|||
let path = Path::new(name);
|
||||
|
||||
// Better error message than the rust default
|
||||
if path.exists() && !is_directory_quasi_empty(&path)? && !force {
|
||||
if path.exists() && !is_directory_quasi_empty(path)? && !force {
|
||||
if name == "." {
|
||||
bail!("The current directory is not an empty folder (hidden files are ignored).");
|
||||
} else {
|
||||
|
@ -103,7 +103,7 @@ pub fn create_new_project(name: &str, force: bool) -> Result<()> {
|
|||
.replace("%SEARCH%", &format!("{}", search))
|
||||
.replace("%HIGHLIGHT%", &format!("{}", highlight));
|
||||
|
||||
populate(&path, compile_sass, &config)?;
|
||||
populate(path, compile_sass, &config)?;
|
||||
|
||||
println!();
|
||||
console::success(&format!(
|
||||
|
@ -122,7 +122,7 @@ fn populate(path: &Path, compile_sass: bool, config: &str) -> Result<()> {
|
|||
if !path.exists() {
|
||||
create_dir(path)?;
|
||||
}
|
||||
create_file(&path.join("config.toml"), &config)?;
|
||||
create_file(&path.join("config.toml"), config)?;
|
||||
create_dir(path.join("content"))?;
|
||||
create_dir(path.join("templates"))?;
|
||||
create_dir(path.join("static"))?;
|
||||
|
@ -152,7 +152,7 @@ mod tests {
|
|||
let allowed = is_directory_quasi_empty(&dir)
|
||||
.expect("An error happened reading the directory's contents");
|
||||
remove_dir(&dir).unwrap();
|
||||
assert_eq!(true, allowed);
|
||||
assert!(allowed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -170,7 +170,7 @@ mod tests {
|
|||
.expect("An error happened reading the directory's contents");
|
||||
remove_dir(&content).unwrap();
|
||||
remove_dir(&dir).unwrap();
|
||||
assert_eq!(false, allowed);
|
||||
assert!(!allowed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -188,7 +188,7 @@ mod tests {
|
|||
.expect("An error happened reading the directory's contents");
|
||||
remove_dir(&git).unwrap();
|
||||
remove_dir(&dir).unwrap();
|
||||
assert_eq!(true, allowed);
|
||||
assert!(allowed);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -201,12 +201,12 @@ mod tests {
|
|||
create_dir(&dir).expect("Could not create test directory");
|
||||
populate(&dir, true, "").expect("Could not populate zola directories");
|
||||
|
||||
assert_eq!(true, dir.join("config.toml").exists());
|
||||
assert_eq!(true, dir.join("content").exists());
|
||||
assert_eq!(true, dir.join("templates").exists());
|
||||
assert_eq!(true, dir.join("static").exists());
|
||||
assert_eq!(true, dir.join("themes").exists());
|
||||
assert_eq!(true, dir.join("sass").exists());
|
||||
assert!(dir.join("config.toml").exists());
|
||||
assert!(dir.join("content").exists());
|
||||
assert!(dir.join("templates").exists());
|
||||
assert!(dir.join("static").exists());
|
||||
assert!(dir.join("themes").exists());
|
||||
assert!(dir.join("sass").exists());
|
||||
|
||||
remove_dir_all(&dir).unwrap();
|
||||
}
|
||||
|
@ -220,13 +220,13 @@ mod tests {
|
|||
}
|
||||
populate(&dir, true, "").expect("Could not populate zola directories");
|
||||
|
||||
assert_eq!(true, dir.exists());
|
||||
assert_eq!(true, dir.join("config.toml").exists());
|
||||
assert_eq!(true, dir.join("content").exists());
|
||||
assert_eq!(true, dir.join("templates").exists());
|
||||
assert_eq!(true, dir.join("static").exists());
|
||||
assert_eq!(true, dir.join("themes").exists());
|
||||
assert_eq!(true, dir.join("sass").exists());
|
||||
assert!(dir.exists());
|
||||
assert!(dir.join("config.toml").exists());
|
||||
assert!(dir.join("content").exists());
|
||||
assert!(dir.join("templates").exists());
|
||||
assert!(dir.join("static").exists());
|
||||
assert!(dir.join("themes").exists());
|
||||
assert!(dir.join("sass").exists());
|
||||
|
||||
remove_dir_all(&dir).unwrap();
|
||||
}
|
||||
|
@ -241,7 +241,7 @@ mod tests {
|
|||
create_dir(&dir).expect("Could not create test directory");
|
||||
populate(&dir, false, "").expect("Could not populate zola directories");
|
||||
|
||||
assert_eq!(false, dir.join("sass").exists());
|
||||
assert!(!dir.join("sass").exists());
|
||||
|
||||
remove_dir_all(&dir).unwrap();
|
||||
}
|
||||
|
|
|
@ -111,7 +111,10 @@ async fn handle_request(req: Request<Body>, mut root: PathBuf) -> Result<Respons
|
|||
// otherwise `PathBuf` will interpret it as an absolute path
|
||||
root.push(&decoded[1..]);
|
||||
|
||||
let metadata = tokio::fs::metadata(root.as_path()).await?;
|
||||
let metadata = match tokio::fs::metadata(root.as_path()).await {
|
||||
Err(err) => return Ok(io_error(err)),
|
||||
Ok(metadata) => metadata,
|
||||
};
|
||||
if metadata.is_dir() {
|
||||
// if root is a directory, append index.html to try to read that instead
|
||||
root.push("index.html");
|
||||
|
@ -120,16 +123,7 @@ async fn handle_request(req: Request<Body>, mut root: PathBuf) -> Result<Respons
|
|||
let result = tokio::fs::read(&root).await;
|
||||
|
||||
let contents = match result {
|
||||
Err(err) => match err.kind() {
|
||||
std::io::ErrorKind::NotFound => return Ok(not_found()),
|
||||
std::io::ErrorKind::PermissionDenied => {
|
||||
return Ok(Response::builder()
|
||||
.status(StatusCode::FORBIDDEN)
|
||||
.body(Body::empty())
|
||||
.unwrap())
|
||||
}
|
||||
_ => panic!("{}", err),
|
||||
},
|
||||
Err(err) => return Ok(io_error(err)),
|
||||
Ok(contents) => contents,
|
||||
};
|
||||
|
||||
|
@ -176,6 +170,16 @@ fn method_not_allowed() -> Response<Body> {
|
|||
.expect("Could not build Method Not Allowed response")
|
||||
}
|
||||
|
||||
fn io_error(err: std::io::Error) -> Response<Body> {
|
||||
match err.kind() {
|
||||
std::io::ErrorKind::NotFound => not_found(),
|
||||
std::io::ErrorKind::PermissionDenied => {
|
||||
Response::builder().status(StatusCode::FORBIDDEN).body(Body::empty()).unwrap()
|
||||
}
|
||||
_ => panic!("{}", err),
|
||||
}
|
||||
}
|
||||
|
||||
fn not_found() -> Response<Body> {
|
||||
let not_found_path = RelativePath::new("404.html");
|
||||
let content = SITE_CONTENT.read().unwrap().get(not_found_path).cloned();
|
||||
|
@ -218,6 +222,7 @@ fn rebuild_done_handling(broadcaster: &Sender, res: Result<()>, reload_path: &st
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn create_new_site(
|
||||
root_dir: &Path,
|
||||
interface: &str,
|
||||
|
@ -261,6 +266,7 @@ fn create_new_site(
|
|||
Ok((site, address))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn serve(
|
||||
root_dir: &Path,
|
||||
interface: &str,
|
||||
|
@ -294,12 +300,12 @@ pub fn serve(
|
|||
return Err(format!("Cannot start server on address {}.", address).into());
|
||||
}
|
||||
|
||||
let config_filename = config_file.file_name().unwrap().to_str().unwrap_or("config.toml");
|
||||
let config_path = config_file.to_str().unwrap_or("config.toml");
|
||||
|
||||
// An array of (path, bool, bool) where the path should be watched for changes, and the boolean value
|
||||
// indicates whether this file/folder must exist for zola serve to operate
|
||||
let watch_this = vec![
|
||||
(config_filename, WatchMode::Required),
|
||||
(config_path, WatchMode::Required),
|
||||
("content", WatchMode::Required),
|
||||
("sass", WatchMode::Condition(site.config.compile_sass)),
|
||||
("static", WatchMode::Optional),
|
||||
|
@ -459,12 +465,7 @@ pub fn serve(
|
|||
} else {
|
||||
rebuild_done_handling(
|
||||
&broadcaster,
|
||||
copy_file(
|
||||
&path,
|
||||
&site.output_path,
|
||||
&site.static_path,
|
||||
site.config.hard_link_static,
|
||||
),
|
||||
copy_file(path, &site.output_path, &site.static_path, site.config.hard_link_static),
|
||||
&partial_path.to_string_lossy(),
|
||||
);
|
||||
}
|
||||
|
@ -485,7 +486,7 @@ pub fn serve(
|
|||
Some(s)
|
||||
}
|
||||
Err(e) => {
|
||||
console::error(&format!("{}", e));
|
||||
console::unravel_errors("Failed to build the site", &e);
|
||||
None
|
||||
}
|
||||
};
|
||||
|
@ -517,7 +518,7 @@ pub fn serve(
|
|||
);
|
||||
|
||||
let start = Instant::now();
|
||||
match detect_change_kind(&root_dir, &path, &config_filename) {
|
||||
match detect_change_kind(root_dir, &path, config_path) {
|
||||
(ChangeKind::Content, _) => {
|
||||
console::info(&format!("-> Content changed {}", path.display()));
|
||||
|
||||
|
@ -672,9 +673,8 @@ fn detect_change_kind(pwd: &Path, path: &Path, config_filename: &str) -> (Change
|
|||
/// Check if the directory at path contains any file
|
||||
fn is_folder_empty(dir: &Path) -> bool {
|
||||
// Can panic if we don't have the rights I guess?
|
||||
let files: Vec<_> =
|
||||
read_dir(dir).expect("Failed to read a directory to see if it was empty").collect();
|
||||
files.is_empty()
|
||||
|
||||
read_dir(dir).expect("Failed to read a directory to see if it was empty").next().is_none()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -698,7 +698,7 @@ mod tests {
|
|||
];
|
||||
|
||||
for t in test_cases {
|
||||
assert!(is_temp_file(&t));
|
||||
assert!(is_temp_file(t));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -750,7 +750,7 @@ mod tests {
|
|||
];
|
||||
|
||||
for (expected, pwd, path, config_filename) in test_cases {
|
||||
assert_eq!(expected, detect_change_kind(&pwd, &path, &config_filename));
|
||||
assert_eq!(expected, detect_change_kind(pwd, path, config_filename));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue