From 0d9272ca4da28a04b28bdd72039dfeb779870318 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 2 Jan 2025 23:22:38 -0500 Subject: [PATCH 001/306] Create stubbed wws crate. --- wws/.gitignore | 2 ++ wws/Cargo.lock | 7 +++++++ wws/Cargo.toml | 15 +++++++++++++++ wws/src/main.rs | 3 +++ 4 files changed, 27 insertions(+) create mode 100644 wws/.gitignore create mode 100644 wws/Cargo.lock create mode 100644 wws/Cargo.toml create mode 100644 wws/src/main.rs diff --git a/wws/.gitignore b/wws/.gitignore new file mode 100644 index 0000000000..8cbdf2177b --- /dev/null +++ b/wws/.gitignore @@ -0,0 +1,2 @@ +# Artifacts +target/ diff --git a/wws/Cargo.lock b/wws/Cargo.lock new file mode 100644 index 0000000000..1fd0a6c676 --- /dev/null +++ b/wws/Cargo.lock @@ -0,0 +1,7 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "wws" +version = "2025.1.1" diff --git a/wws/Cargo.toml b/wws/Cargo.toml new file mode 100644 index 0000000000..e2100ea776 --- /dev/null +++ b/wws/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "wws" +description = "Wilson's Web Server - Serve a zoo of user content (files, code, etc)" +repository = "https://github.com/scpwiki/wikijump/tree/develop/wws" +readme = "README.md" +license = "AGPL-3.0-or-later" +keywords = ["wikijump", "api", "backend", "wiki"] +categories = ["asynchronous", "caching", "web-programming::http-server"] +exclude = [".gitignore", ".editorconfig"] + +version = "2025.1.1" +authors = ["Emmie Smith "] +edition = "2021" + +[dependencies] diff --git a/wws/src/main.rs b/wws/src/main.rs new file mode 100644 index 0000000000..51a2df474b --- /dev/null +++ b/wws/src/main.rs @@ -0,0 +1,3 @@ +// TODO + +fn main() {} From 4bb56dc94df402fc448060c6fa8421e09a64de0a Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 2 Jan 2025 23:26:36 -0500 Subject: [PATCH 002/306] Update dependabot settings for new crate. Also remove the old ftml dependabot entry. --- .github/dependabot.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml index e0100e0e23..67c91559ef 100644 --- a/.github/dependabot.yaml +++ b/.github/dependabot.yaml @@ -14,8 +14,8 @@ updates: schedule: interval: weekly - # FTML + # WWS - package-ecosystem: cargo - directory: "/ftml" + directory: "/ws" schedule: interval: weekly From 0f65163d6c69396e09efe26a92fda28bf0e25e58 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 3 Jan 2025 00:08:52 -0500 Subject: [PATCH 003/306] Add initial set of features. --- wws/Cargo.lock | 662 +++++++++++++++++++++++++++++++++++++++++++++++++ wws/Cargo.toml | 2 + 2 files changed, 664 insertions(+) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 1fd0a6c676..0368577d3a 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2,6 +2,668 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "axum" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" +dependencies = [ + "axum-core", + "axum-macros", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-extra" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fc6f625a1f7705c6cf62d0d070794e94668988b1c38111baeec177c715f7b" +dependencies = [ + "axum", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "serde", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "604fde5e028fea851ce1d8570bbdc034bec850d157f7569d10f347d06808c05c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets", +] + +[[package]] +name = "bytes" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "http" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httpdate", + "pin-project-lite", + "smallvec", + "tokio", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "tokio", + "tower-service", +] + +[[package]] +name = "indexmap" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ffbe83022cedc1d264172192511ae958937694cd57ce297164951b8b3568394" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "wasi", + "windows-sys", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "proc-macro2" +version = "1.0.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "syn" +version = "2.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" + +[[package]] +name = "tokio" +version = "1.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-util" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "log", + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", +] + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + [[package]] name = "wws" version = "2025.1.1" +dependencies = [ + "axum", + "axum-extra", +] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index e2100ea776..f03505b2d6 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -13,3 +13,5 @@ authors = ["Emmie Smith "] edition = "2021" [dependencies] +axum = { version = "0.8", features = [ "http2", "macros", "tokio", "tower-log", "tracing" ], default-features = false } +axum-extra = { version = "0.10", features = [ "attachment" ] } From 94deaab0c6f920501efb8b19ed8273bf68e49da4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 3 Jan 2025 01:21:16 -0500 Subject: [PATCH 004/306] Update dependencies. --- wws/Cargo.lock | 16 ++++++++++++++++ wws/Cargo.toml | 6 ++++-- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 0368577d3a..88562d38be 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -17,6 +17,12 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" + [[package]] name = "atomic-waker" version = "1.1.2" @@ -256,6 +262,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + [[package]] name = "httpdate" version = "1.0.3" @@ -274,7 +286,9 @@ dependencies = [ "h2", "http", "http-body", + "httparse", "httpdate", + "itoa", "pin-project-lite", "smallvec", "tokio", @@ -664,6 +678,8 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" name = "wws" version = "2025.1.1" dependencies = [ + "anyhow", "axum", "axum-extra", + "tokio", ] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index f03505b2d6..6f5cee90e6 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -6,12 +6,14 @@ readme = "README.md" license = "AGPL-3.0-or-later" keywords = ["wikijump", "api", "backend", "wiki"] categories = ["asynchronous", "caching", "web-programming::http-server"] -exclude = [".gitignore", ".editorconfig"] +exclude = [".gitignore"] version = "2025.1.1" authors = ["Emmie Smith "] edition = "2021" [dependencies] -axum = { version = "0.8", features = [ "http2", "macros", "tokio", "tower-log", "tracing" ], default-features = false } +anyhow = "1" +axum = { version = "0.8", features = [ "http1", "http2", "macros", "tokio", "tower-log", "tracing" ], default-features = false } axum-extra = { version = "0.10", features = [ "attachment" ] } +tokio = { version = "1", features = ["macros", "rt-multi-thread"] } From b3ea52f9b01176f2138c65aa7857c3e7c26719b7 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 01:00:06 -0500 Subject: [PATCH 005/306] Set initial main.rs --- wws/Cargo.lock | 1 + wws/Cargo.toml | 1 + wws/src/main.rs | 50 +++++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 50 insertions(+), 2 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 88562d38be..2097987fb1 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -682,4 +682,5 @@ dependencies = [ "axum", "axum-extra", "tokio", + "tower", ] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 6f5cee90e6..1b1e80094e 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -17,3 +17,4 @@ anyhow = "1" axum = { version = "0.8", features = [ "http1", "http2", "macros", "tokio", "tower-log", "tracing" ], default-features = false } axum-extra = { version = "0.10", features = [ "attachment" ] } tokio = { version = "1", features = ["macros", "rt-multi-thread"] } +tower = "0.5" diff --git a/wws/src/main.rs b/wws/src/main.rs index 51a2df474b..2275107f8b 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -1,3 +1,49 @@ -// TODO +use axum::{ + body::Body, + extract::{FromRequestParts, Path, Request}, + http::{request::Parts, StatusCode}, + response::{Html, IntoResponse, Response}, + routing::{any, get}, + RequestPartsExt, Router, +}; +use axum_extra::extract::Host; +use tower::util::ServiceExt; -fn main() {} +#[tokio::main] +async fn main() -> anyhow::Result<()> { + // Router that serves framerail + let main_router = Router::new().route("/_TODO", get(handler)); // handle wjfiles routes + + // Router that serves wjfiles + let file_router = Router::new() + .route("/local--files/{page_slug}/{filename}", get(handler)) + .route("/local--code/{page_slug}/{index}", get(handler)) + .route("/local--html/{page_slug}/{id}", get(handler)) + .route("/-/file/{page_slug}/{filename}", get(handler)) + .route("/-/download/{page_slug}/{filename}", get(handler)) + .route("/-/code/{page_slug}/{index}", get(handler)) + .route("/-/html/{page_slug}/{hash}", get(handler)) + .route("/{*path}", get(handler)); + + let app = Router::new().route( + "/{*path}", + any(|Host(hostname): Host, request: Request| async move { + match hostname.as_str() { + "api.mydomain.com" => file_router.oneshot(request).await, + _ => main_router.oneshot(request).await, + } + }), + ); + // TODO .layer(Extension(state)); + + // run it + let listener = tokio::net::TcpListener::bind("[::]:8080").await?; + + println!("listening on {}", listener.local_addr()?); + axum::serve(listener, app).await?; + Ok(()) +} + +async fn handler() -> Html<&'static str> { + Html("

Hello, World!

") +} From ea8a7afa0a50f288157f4e1b25a3723aa51eb35f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 01:09:51 -0500 Subject: [PATCH 006/306] Add file header for wws. --- wws/Cargo.toml | 2 +- wws/misc/header.txt | 20 ++++++++++++++++++++ wws/src/main.rs | 20 ++++++++++++++++++++ 3 files changed, 41 insertions(+), 1 deletion(-) create mode 100644 wws/misc/header.txt diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 1b1e80094e..756766d973 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "wws" -description = "Wilson's Web Server - Serve a zoo of user content (files, code, etc)" +description = "Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc)" repository = "https://github.com/scpwiki/wikijump/tree/develop/wws" readme = "README.md" license = "AGPL-3.0-or-later" diff --git a/wws/misc/header.txt b/wws/misc/header.txt new file mode 100644 index 0000000000..1050e3b5da --- /dev/null +++ b/wws/misc/header.txt @@ -0,0 +1,20 @@ +/* + * (FILENAME) + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + diff --git a/wws/src/main.rs b/wws/src/main.rs index 2275107f8b..f2aa2583b8 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -1,3 +1,23 @@ +/* + * main.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + use axum::{ body::Body, extract::{FromRequestParts, Path, Request}, From ffbaa44ea4bbd1ef9712af6791fb175a996f1c51 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 01:33:39 -0500 Subject: [PATCH 007/306] Add crate doc. --- wws/src/main.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/wws/src/main.rs b/wws/src/main.rs index f2aa2583b8..cf9bf32ba1 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -18,6 +18,11 @@ * along with this program. If not, see . */ +//! A server to handle incoming web requests. +//! +//! Depending on the hostname, requests are routed to either framerail +//! or given to logic to serve wjfiles data. + use axum::{ body::Body, extract::{FromRequestParts, Path, Request}, From c6b052a3cfdb3fd70e19e124244629c5a200d7f9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 02:07:56 -0500 Subject: [PATCH 008/306] Split code out of main.rs --- wws/src/handler/mod.rs | 25 ++++++++++++++++ wws/src/main.rs | 43 ++++----------------------- wws/src/route.rs | 66 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 96 insertions(+), 38 deletions(-) create mode 100644 wws/src/handler/mod.rs create mode 100644 wws/src/route.rs diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs new file mode 100644 index 0000000000..328e23f297 --- /dev/null +++ b/wws/src/handler/mod.rs @@ -0,0 +1,25 @@ +/* + * handler/mod.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use axum::response::Html; + +pub async fn handle_hello_world() -> Html<&'static str> { + Html("

Hello, World!

") +} diff --git a/wws/src/main.rs b/wws/src/main.rs index cf9bf32ba1..bc77fa4eb6 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -23,43 +23,14 @@ //! Depending on the hostname, requests are routed to either framerail //! or given to logic to serve wjfiles data. -use axum::{ - body::Body, - extract::{FromRequestParts, Path, Request}, - http::{request::Parts, StatusCode}, - response::{Html, IntoResponse, Response}, - routing::{any, get}, - RequestPartsExt, Router, -}; -use axum_extra::extract::Host; -use tower::util::ServiceExt; +mod handler; +mod route; + +use self::route::build_router; #[tokio::main] async fn main() -> anyhow::Result<()> { - // Router that serves framerail - let main_router = Router::new().route("/_TODO", get(handler)); // handle wjfiles routes - - // Router that serves wjfiles - let file_router = Router::new() - .route("/local--files/{page_slug}/{filename}", get(handler)) - .route("/local--code/{page_slug}/{index}", get(handler)) - .route("/local--html/{page_slug}/{id}", get(handler)) - .route("/-/file/{page_slug}/{filename}", get(handler)) - .route("/-/download/{page_slug}/{filename}", get(handler)) - .route("/-/code/{page_slug}/{index}", get(handler)) - .route("/-/html/{page_slug}/{hash}", get(handler)) - .route("/{*path}", get(handler)); - - let app = Router::new().route( - "/{*path}", - any(|Host(hostname): Host, request: Request| async move { - match hostname.as_str() { - "api.mydomain.com" => file_router.oneshot(request).await, - _ => main_router.oneshot(request).await, - } - }), - ); - // TODO .layer(Extension(state)); + let app = build_router(); // run it let listener = tokio::net::TcpListener::bind("[::]:8080").await?; @@ -68,7 +39,3 @@ async fn main() -> anyhow::Result<()> { axum::serve(listener, app).await?; Ok(()) } - -async fn handler() -> Html<&'static str> { - Html("

Hello, World!

") -} diff --git a/wws/src/route.rs b/wws/src/route.rs new file mode 100644 index 0000000000..817cd4949a --- /dev/null +++ b/wws/src/route.rs @@ -0,0 +1,66 @@ +/* + * route.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::handler::handle_hello_world; +use axum::{ + body::Body, + extract::{FromRequestParts, Path, Request}, + http::{request::Parts, StatusCode}, + response::{Html, IntoResponse, Response}, + routing::{any, get}, + RequestPartsExt, Router, +}; +use axum_extra::extract::Host; +use tower::util::ServiceExt; + +pub fn build_router() -> Router { + // Router that serves framerail + let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes + + // Router that serves wjfiles + let file_router = Router::new() + .route( + "/local--files/{page_slug}/{filename}", + get(handle_hello_world), + ) + .route("/local--code/{page_slug}/{index}", get(handle_hello_world)) + .route("/local--html/{page_slug}/{id}", get(handle_hello_world)) + .route("/-/file/{page_slug}/{filename}", get(handle_hello_world)) + .route( + "/-/download/{page_slug}/{filename}", + get(handle_hello_world), + ) + .route("/-/code/{page_slug}/{index}", get(handle_hello_world)) + .route("/-/html/{page_slug}/{hash}", get(handle_hello_world)) + .route("/{*path}", get(handle_hello_world)); + + let app = Router::new().route( + "/{*path}", + any(|Host(hostname): Host, request: Request| async move { + match hostname.as_str() { + "api.mydomain.com" => file_router.oneshot(request).await, + _ => main_router.oneshot(request).await, + } + }), + ); + // TODO .layer(Extension(state)); + + app +} From 97ff6adc564dabc4d454a7018d9709f5e9b99b52 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 02:22:42 -0500 Subject: [PATCH 009/306] Use consistent name for redis client. --- deepwell/src/redis.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/src/redis.rs b/deepwell/src/redis.rs index 00f1ba0302..0ce00000f1 100644 --- a/deepwell/src/redis.rs +++ b/deepwell/src/redis.rs @@ -23,7 +23,7 @@ use crate::services::job::{ }; use anyhow::Result; use bb8::{ErrorSink, Pool}; -use redis::{Client as RedisClient, IntoConnectionInfo, RedisError}; +use redis::{IntoConnectionInfo, RedisError}; use rsmq_async::{PooledRsmq, RedisConnectionManager, RsmqConnection}; const REDIS_POOL_SIZE: u32 = 12; @@ -33,7 +33,7 @@ pub async fn connect(redis_uri: &str) -> Result<(redis::Client, PooledRsmq)> { let redis = redis::Client::open(redis_uri)?; let mut rsmq = { let connection_info = redis_uri.into_connection_info()?; - let redis = RedisClient::open(connection_info)?; + let redis = redis::Client::open(connection_info)?; let redis_conn = RedisConnectionManager::from_client(redis)?; let pool = Pool::builder() .max_size(REDIS_POOL_SIZE) From 69fffae659a61b8954a843a3da88a014bdb1f904 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 02:46:11 -0500 Subject: [PATCH 010/306] Modify comment. --- deepwell/src/config/secrets.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepwell/src/config/secrets.rs b/deepwell/src/config/secrets.rs index be2eb47f4d..8efc1caae0 100644 --- a/deepwell/src/config/secrets.rs +++ b/deepwell/src/config/secrets.rs @@ -65,7 +65,7 @@ impl Secrets { pub fn load() -> Self { dotenv().ok(); - // Essentially .expect(), but allows inserting the environment variable name. + // Essentially .expect(), but allows printing the environment variable name in the message. macro_rules! get_env { ($name:expr) => { match env::var($name) { From 9f8724507383e8e1c094361dc95b39aa9cfd4f46 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 02:46:18 -0500 Subject: [PATCH 011/306] Change bucket timeout to be a constant. --- deepwell/src/api.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 8317f2c7bb..4ccc5fa09a 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -47,6 +47,8 @@ use std::fmt::{self, Debug}; use std::sync::Arc; use std::time::Duration; +const BUCKET_REQUEST_TIMEOUT: Duration = Duration::from_millis(500); + pub type ServerState = Arc; pub struct ServerStateInner { @@ -105,7 +107,7 @@ pub async fn build_server_state( bucket = bucket.with_path_style(); } - bucket.request_timeout = Some(Duration::from_millis(500)); + bucket.request_timeout = Some(BUCKET_REQUEST_TIMEOUT); bucket }; From 0b18ffd726ce34ea2b142a943c612429545631ff Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 02:55:47 -0500 Subject: [PATCH 012/306] Change structure doc. --- deepwell/src/config/object.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index 48ff4574d6..c2b7ccce51 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -29,7 +29,7 @@ use std::path::PathBuf; use std::time::Duration as StdDuration; use time::Duration as TimeDuration; -/// Primary configuration structure. +/// The primary configuration structure for the DEEPWELL server. /// /// * See `config/file.rs` for the structure as parsed from disk. /// * See `config.example.toml` for an explanation of all these fields. From 49df892f183c45037106a7539d554c226dcd6a30 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 03:10:11 -0500 Subject: [PATCH 013/306] Add new dependencies. --- wws/Cargo.lock | 1328 ++++++++++++++++++++++++++++++++++++++++++++++-- wws/Cargo.toml | 7 +- 2 files changed, 1302 insertions(+), 33 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 2097987fb1..42ca71df0e 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -23,18 +23,76 @@ version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + +[[package]] +name = "async-trait" +version = "0.1.84" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b1244b10dcd56c92219da4e14caa97e312079e185f04ba3eea25061561dc0a0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "atomic-waker" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "attohttpc" +version = "0.28.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "412b79ce053cef36eda52c25664b45ec92a21769488e20d5a8bf0b3c9e1a28cb" +dependencies = [ + "http 1.2.0", + "log", + "rustls 0.23.20", + "serde", + "serde_json", + "url", + "webpki-roots", +] + [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +[[package]] +name = "aws-creds" +version = "0.37.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f84143206b9c72b3c5cb65415de60c7539c79cd1559290fddec657939131be0" +dependencies = [ + "attohttpc", + "home", + "log", + "quick-xml", + "rust-ini", + "serde", + "thiserror", + "time", + "url", +] + +[[package]] +name = "aws-region" +version = "0.25.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9aed3f9c7eac9be28662fdb3b0f4d1951e812f7c64fed4f0327ba702f459b3b" +dependencies = [ + "thiserror", +] + [[package]] name = "axum" version = "0.8.1" @@ -45,10 +103,10 @@ dependencies = [ "axum-macros", "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", - "hyper", + "hyper 1.5.2", "hyper-util", "itoa", "matchit", @@ -74,8 +132,8 @@ checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" dependencies = [ "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -96,8 +154,8 @@ dependencies = [ "axum-core", "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -134,18 +192,182 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "bytes" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +[[package]] +name = "cc" +version = "1.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" +dependencies = [ + "shlex", +] + [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "const-random" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87e00182fe74b066627d63b85fd550ac2998d4b0bd86bfed477a0ae4c7c71359" +dependencies = [ + "const-random-macro", +] + +[[package]] +name = "const-random-macro" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" +dependencies = [ + "getrandom", + "once_cell", + "tiny-keccak", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +dependencies = [ + "libc", +] + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dlv-list" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "442039f5147480ba31067cb00ada1adae6892028e40e45fc5de7b7df6dcc1b5f" +dependencies = [ + "const-random", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "equivalent" version = "1.0.1" @@ -158,6 +380,30 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.31" @@ -165,6 +411,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", + "futures-sink", ] [[package]] @@ -173,6 +420,34 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "futures-sink" version = "0.3.31" @@ -191,10 +466,37 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ + "futures-channel", "futures-core", + "futures-io", + "futures-macro", + "futures-sink", "futures-task", + "memchr", "pin-project-lite", "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", ] [[package]] @@ -214,7 +516,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http", + "http 1.2.0", "indexmap", "slab", "tokio", @@ -222,12 +524,53 @@ dependencies = [ "tracing", ] +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" + [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http" version = "1.2.0" @@ -239,6 +582,17 @@ dependencies = [ "itoa", ] +[[package]] +name = "http-body" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +dependencies = [ + "bytes", + "http 0.2.12", + "pin-project-lite", +] + [[package]] name = "http-body" version = "1.0.1" @@ -246,7 +600,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http", + "http 1.2.0", ] [[package]] @@ -257,8 +611,8 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -274,6 +628,29 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" +[[package]] +name = "hyper" +version = "0.14.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 0.2.12", + "http-body 0.4.6", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + [[package]] name = "hyper" version = "1.5.2" @@ -284,8 +661,8 @@ dependencies = [ "futures-channel", "futures-util", "h2", - "http", - "http-body", + "http 1.2.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -294,6 +671,20 @@ dependencies = [ "tokio", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.32", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-util" version = "0.1.10" @@ -302,36 +693,181 @@ checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", "futures-util", - "http", - "http-body", - "hyper", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.5.2", "pin-project-lite", "tokio", "tower-service", ] [[package]] -name = "indexmap" -version = "2.7.0" +name = "icu_collections" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" dependencies = [ - "equivalent", - "hashbrown", + "displaydoc", + "yoke", + "zerofrom", + "zerovec", ] [[package]] -name = "itoa" -version = "1.0.14" +name = "icu_locid" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] [[package]] -name = "libc" -version = "0.2.169" +name = "icu_locid_transform" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" - +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", +] + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + [[package]] name = "log" version = "0.4.22" @@ -344,6 +880,23 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "maybe-async" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + [[package]] name = "memchr" version = "2.7.4" @@ -373,9 +926,15 @@ checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ "libc", "wasi", - "windows-sys", + "windows-sys 0.52.0", ] +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + [[package]] name = "object" version = "0.36.7" @@ -391,12 +950,48 @@ version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "ordered-multimap" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49203cdcae0030493bad186b28da2fa25645fa276a51b6fec8010d281e02ef79" +dependencies = [ + "dlv-list", + "hashbrown 0.14.5", +] + [[package]] name = "percent-encoding" version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "pin-project" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "pin-project-lite" version = "0.2.15" @@ -409,6 +1004,21 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + [[package]] name = "proc-macro2" version = "1.0.92" @@ -418,6 +1028,16 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "quick-xml" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "quote" version = "1.0.38" @@ -427,18 +1047,298 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "redis" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0d7a6955c7511f60f3ba9e86c6d02b3c3f144f8c24b288d1f4e18074ab8bbec" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine", + "futures", + "futures-util", + "itoa", + "percent-encoding", + "pin-project-lite", + "rustls 0.22.4", + "rustls-native-certs 0.7.3", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "ryu", + "sha1_smol", + "socket2", + "tokio", + "tokio-retry", + "tokio-rustls 0.25.0", + "tokio-util", + "url", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rust-ini" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e310ef0e1b6eeb79169a1171daf9abcb87a2e17c03bee2c4bb100b55c75409f" +dependencies = [ + "cfg-if", + "ordered-multimap", + "trim-in-place", +] + +[[package]] +name = "rust-s3" +version = "0.35.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3df3f353b1f4209dcf437d777cda90279c397ab15a0cd6fd06bd32c88591533" +dependencies = [ + "async-trait", + "aws-creds", + "aws-region", + "base64 0.22.1", + "bytes", + "cfg-if", + "futures", + "hex", + "hmac", + "http 0.2.12", + "hyper 0.14.32", + "hyper-rustls", + "log", + "maybe-async", + "md5", + "percent-encoding", + "quick-xml", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "serde", + "serde_derive", + "serde_json", + "sha2", + "thiserror", + "time", + "tokio", + "tokio-rustls 0.24.1", + "tokio-stream", + "url", +] + [[package]] name = "rustc-demangle" version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.7", + "sct", +] + +[[package]] +name = "rustls" +version = "0.22.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls" +version = "0.23.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile 1.0.4", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +dependencies = [ + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1863fd3768cd83c56a7f60faa4dc0d403f1b6df0a38c3c25f44b7894e45370d5" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "serde" version = "1.0.217" @@ -459,6 +1359,41 @@ dependencies = [ "syn", ] +[[package]] +name = "serde_json" +version = "1.0.134" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d00f4175c42ee48b15416f6193a959ba3a0d67fc699a0db9ad12df9f83991c7d" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "sha1_smol" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "slab" version = "0.4.9" @@ -481,9 +1416,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.52.0", ] +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "syn" version = "2.0.94" @@ -501,6 +1454,87 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.3.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tiny-keccak" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" +dependencies = [ + "crunchy", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + [[package]] name = "tokio" version = "1.42.0" @@ -514,7 +1548,7 @@ dependencies = [ "pin-project-lite", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.52.0", ] [[package]] @@ -528,6 +1562,49 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.4", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.13" @@ -589,18 +1666,89 @@ dependencies = [ "once_cell", ] +[[package]] +name = "trim-in-place" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343e926fc669bc8cde4fa3129ab681c63671bae288b1f1081ceee6d9d37904fc" + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + [[package]] name = "unicode-ident" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "webpki-roots" +version = "0.26.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "windows-sys" version = "0.52.0" @@ -610,6 +1758,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -674,6 +1831,18 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + [[package]] name = "wws" version = "2025.1.1" @@ -681,6 +1850,103 @@ dependencies = [ "anyhow", "axum", "axum-extra", + "dotenvy", + "redis", + "rust-s3", "tokio", "tower", ] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 756766d973..6f320f4aeb 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -14,7 +14,10 @@ edition = "2021" [dependencies] anyhow = "1" -axum = { version = "0.8", features = [ "http1", "http2", "macros", "tokio", "tower-log", "tracing" ], default-features = false } -axum-extra = { version = "0.10", features = [ "attachment" ] } +axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } +axum-extra = { version = "0.10", features = ["attachment"] } +dotenvy = "0.15" +redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } +rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } tokio = { version = "1", features = ["macros", "rt-multi-thread"] } tower = "0.5" From 06bb01514ac02d3152847c7f3ab2d80d28289bda Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 04:03:06 -0500 Subject: [PATCH 014/306] Change deepwell address logging to be on serve, not in config section. --- deepwell/src/config/object.rs | 1 - deepwell/src/main.rs | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index c2b7ccce51..fb8cfb8a1f 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -241,7 +241,6 @@ impl Config { } info!("Configuration details:"); - info!("Serving on {}", self.address); info!( "Auto-restart on config change: {}", bool_str(self.watch_files), diff --git a/deepwell/src/main.rs b/deepwell/src/main.rs index 79d75f2483..1a66457027 100644 --- a/deepwell/src/main.rs +++ b/deepwell/src/main.rs @@ -69,6 +69,7 @@ use std::process; async fn main() -> Result<()> { // Load the configuration so we can set up let SetupConfig { secrets, config } = SetupConfig::load(); + let address = config.address; let run_seeder = config.run_seeder; // Configure the logger @@ -119,7 +120,7 @@ async fn main() -> Result<()> { info!("Building server..."); let server = api::build_server(app_state).await?; - info!("Listening to connections..."); + info!("Listening to connections on {address}..."); server.stopped().await; // block until end Ok(()) } From b26849f304140d0483247655f4b664c2f9bc9155 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 04:35:00 -0500 Subject: [PATCH 015/306] Set up basic server structure. --- wws/Cargo.lock | 389 ++++++++++++++++++++++++++++++++++++++ wws/Cargo.toml | 9 + wws/build.rs | 4 + wws/secrets.rs | 148 +++++++++++++++ wws/src/config/args.rs | 92 +++++++++ wws/src/config/mod.rs | 134 +++++++++++++ wws/src/config/object.rs | 31 +++ wws/src/config/secrets.rs | 54 ++++++ wws/src/handler/mod.rs | 2 + wws/src/info.rs | 29 +++ wws/src/main.rs | 33 +++- wws/src/route.rs | 2 +- wws/src/state.rs | 55 ++++++ wws/src/trace.rs | 38 ++++ 14 files changed, 1014 insertions(+), 6 deletions(-) create mode 100644 wws/build.rs create mode 100644 wws/secrets.rs create mode 100644 wws/src/config/args.rs create mode 100644 wws/src/config/mod.rs create mode 100644 wws/src/config/object.rs create mode 100644 wws/src/config/secrets.rs create mode 100644 wws/src/info.rs create mode 100644 wws/src/state.rs create mode 100644 wws/src/trace.rs diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 42ca71df0e..d3b21ac2a6 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -17,6 +17,64 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +dependencies = [ + "anstyle", + "windows-sys 0.59.0", +] + [[package]] name = "anyhow" version = "1.0.95" @@ -219,6 +277,15 @@ dependencies = [ "generic-array", ] +[[package]] +name = "built" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c360505aed52b7ec96a3636c3f039d99103c37d1d9b4f7a8c743d3ea9ffcd03b" +dependencies = [ + "git2", +] + [[package]] name = "byteorder" version = "1.5.0" @@ -237,6 +304,8 @@ version = "1.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a012a0df96dd6d06ba9a1b29d6402d1a5d77c6befd2566afdc26e10603dc93d7" dependencies = [ + "jobserver", + "libc", "shlex", ] @@ -246,6 +315,49 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "clap" +version = "4.5.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "color-backtrace" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "150fd80a270c0671379f388c8204deb6a746bb4eac8a6c03fe2460b2c0127ea0" +dependencies = [ + "backtrace", + "termcolor", +] + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + [[package]] name = "combine" version = "4.6.7" @@ -505,6 +617,19 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +[[package]] +name = "git2" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" +dependencies = [ + "bitflags", + "libc", + "libgit2-sys", + "log", + "url", +] + [[package]] name = "h2" version = "0.4.7" @@ -850,18 +975,63 @@ dependencies = [ "hashbrown 0.15.2", ] +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + [[package]] name = "itoa" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + [[package]] name = "libc" version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +[[package]] +name = "libgit2-sys" +version = "0.17.0+1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + +[[package]] +name = "libz-sys" +version = "1.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "litemap" version = "0.7.4" @@ -874,6 +1044,15 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "matchit" version = "0.8.4" @@ -929,6 +1108,16 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -966,6 +1155,12 @@ dependencies = [ "hashbrown 0.14.5", ] +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + [[package]] name = "percent-encoding" version = "2.3.1" @@ -1004,6 +1199,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + [[package]] name = "powerfmt" version = "0.2.0" @@ -1106,6 +1307,56 @@ dependencies = [ "url", ] +[[package]] +name = "ref-map" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d22b73985e369f260445a5e08ad470117b30e522c91b4820585baa2e0cbf7075" + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.4.9", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.8.5", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + [[package]] name = "ring" version = "0.17.8" @@ -1388,6 +1639,15 @@ dependencies = [ "digest", ] +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + [[package]] name = "shlex" version = "1.3.0" @@ -1431,6 +1691,18 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "str-macro" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b7514866270741c7b03dd36e2c68f71cf064b230e8217c81bbd4d619d564864" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + [[package]] name = "subtle" version = "2.6.1" @@ -1465,6 +1737,15 @@ dependencies = [ "syn", ] +[[package]] +name = "termcolor" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" +dependencies = [ + "winapi-util", +] + [[package]] name = "thiserror" version = "1.0.69" @@ -1485,6 +1766,16 @@ dependencies = [ "syn", ] +[[package]] +name = "thread_local" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +dependencies = [ + "cfg-if", + "once_cell", +] + [[package]] name = "time" version = "0.3.37" @@ -1654,9 +1945,21 @@ checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", + "tracing-attributes", "tracing-core", ] +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tracing-core" version = "0.1.33" @@ -1664,6 +1967,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", ] [[package]] @@ -1719,6 +2052,24 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version_check" version = "0.9.5" @@ -1749,6 +2100,37 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows-sys" version = "0.52.0" @@ -1850,11 +2232,18 @@ dependencies = [ "anyhow", "axum", "axum-extra", + "built", + "clap", + "color-backtrace", "dotenvy", "redis", + "ref-map", "rust-s3", + "str-macro", "tokio", "tower", + "tracing", + "tracing-subscriber", ] [[package]] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 6f320f4aeb..bdee9388ad 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -16,8 +16,17 @@ edition = "2021" anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } axum-extra = { version = "0.10", features = ["attachment"] } +clap = "4" +color-backtrace = "0.6" dotenvy = "0.15" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } +ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } +str-macro = "1" tokio = { version = "1", features = ["macros", "rt-multi-thread"] } tower = "0.5" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter"] } + +[build-dependencies] +built = { version = "0.7", features = ["git2"] } diff --git a/wws/build.rs b/wws/build.rs new file mode 100644 index 0000000000..5aed94002c --- /dev/null +++ b/wws/build.rs @@ -0,0 +1,4 @@ +fn main() { + // Compile-time build information. + built::write_built_file().expect("Failed to write build information"); +} diff --git a/wws/secrets.rs b/wws/secrets.rs new file mode 100644 index 0000000000..4ebe3d5243 --- /dev/null +++ b/wws/secrets.rs @@ -0,0 +1,148 @@ +/* + * config/secrets.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use dotenvy::dotenv; +use ref_map::*; +use s3::{creds::Credentials, region::Region}; +use std::{env, process}; + +#[derive(Debug, Clone)] +pub struct Secrets { + /// The URL of the Redis database to connect to. + /// + /// Set using environment variable `REDIS_URL`. + pub redis_url: String, + + /// The name of the S3 bucket that file blobs are kept in. + /// The bucket must already exist prior to program invocation. + /// + /// Set using environment variable `S3_BUCKET`. + pub s3_bucket: String, + + /// The region to use for S3. + /// + /// Set using environment variable `S3_AWS_REGION` if standard, + /// or `S3_REGION_NAME` and `S3_CUSTOM_ENDPOINT` if custom. + pub s3_region: Region, + + /// Whether to use path style for S3. + /// + /// Set using environment variable `S3_PATH_STYLE`. + pub s3_path_style: bool, + + /// The credentials to use for S3. + /// + /// Set using environment variable `S3_ACCESS_KEY_ID` and `S3_SECRET_ACCESS_KEY`. + /// + /// Alternatively you can have it read from the AWS credentials file. + /// The profile to read from can be set in the `AWS_PROFILE_NAME` environment variable. + pub s3_credentials: Credentials, +} + +impl Secrets { + pub fn load() -> Self { + dotenv().ok(); + + // Essentially .expect(), but allows printing the environment variable name in the message. + macro_rules! get_env { + ($name:expr) => { + match env::var($name) { + Ok(value) => value, + Err(error) => { + eprintln!( + "Unable to read environment variable {}: {}", + $name, error, + ); + process::exit(1); + } + } + }; + } + + let redis_url = get_env!("REDIS_URL"); + + let s3_bucket = get_env!("S3_BUCKET"); + let s3_region = match env::var("S3_AWS_REGION") { + // Standard AWS S3 region, parse out into enum. + Ok(value) => { + match value.parse() { + Ok(region) => region, + Err(error) => { + eprintln!("S3_AWS_REGION variable is not a valid AWS region ID: {error}"); + process::exit(1); + } + } + } + + // Custom region, with a specific S3 endpoint. + Err(_) => { + let region = get_env!("S3_REGION_NAME"); + let endpoint = get_env!("S3_CUSTOM_ENDPOINT"); + + Region::Custom { region, endpoint } + } + }; + + let s3_path_style = match get_env!("S3_PATH_STYLE").parse() { + Ok(path_style) => path_style, + Err(_) => { + eprintln!("S3_PATH_STYLE variable is not a valid boolean"); + process::exit(1); + } + }; + + let s3_credentials = { + // Try to read from environment + // Reads from S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY + let env_creds = Credentials::from_env_specific( + Some("S3_ACCESS_KEY_ID"), + Some("S3_SECRET_ACCESS_KEY"), + None, + None, + ); + + match env_creds { + Ok(credentials) => credentials, + Err(_) => { + // Try to read from profile + let profile_name = env::var("AWS_PROFILE_NAME").ok(); + let profile_name = profile_name.ref_map(|s| s.as_str()); + + match Credentials::from_profile(profile_name) { + Ok(credentials) => credentials, + Err(error) => { + eprintln!("Unable to read AWS credentials file: {error}"); + process::exit(1); + } + } + } + } + }; + + // Build and return + Secrets { + redis_url, + s3_bucket, + s3_region, + s3_path_style, + s3_credentials, + } + } +} diff --git a/wws/src/config/args.rs b/wws/src/config/args.rs new file mode 100644 index 0000000000..0c75b10b96 --- /dev/null +++ b/wws/src/config/args.rs @@ -0,0 +1,92 @@ +/* + * config/args.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::info; +use clap::builder::{BoolishValueParser, NonEmptyStringValueParser}; +use clap::{value_parser, Arg, ArgAction, Command}; +use std::net::{IpAddr, SocketAddr}; + +#[derive(Debug, Clone)] +pub struct Arguments { + pub enable_trace: bool, + pub address: SocketAddr, +} + +impl Default for Arguments { + fn default() -> Arguments { + Arguments { + enable_trace: true, + address: "[::]:80".parse().unwrap(), + } + } +} + +impl Arguments { + pub fn parse() -> Self { + let mut matches = Command::new("wws") + .author(info::PKG_AUTHORS) + .version(info::PKG_VERSION) + .about(info::PKG_DESCRIPTION) + .arg( + Arg::new("disable-trace") + .short('q') + .long("quiet") + .long("disable-trace") + .action(ArgAction::SetTrue) + .help("Disable trace output."), + ) + .arg( + Arg::new("host") + .short('H') + .long("host") + .long("hostname") + .value_name("HOST") + .value_parser(value_parser!(IpAddr)) + .action(ArgAction::Set) + .help("What host to listen on."), + ) + .arg( + Arg::new("port") + .short('p') + .long("port") + .value_name("PORT") + .value_parser(value_parser!(u16)) + .action(ArgAction::Set) + .help("What port to listen on."), + ) + .get_matches(); + + let mut args = Arguments::default(); + + if matches.remove_one::("disable-trace") == Some(true) { + args.enable_trace = false; + } + + if let Some(value) = matches.remove_one::("host") { + args.address.set_ip(value); + } + + if let Some(value) = matches.remove_one::("port") { + args.address.set_port(value); + } + + args + } +} diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs new file mode 100644 index 0000000000..8529be97c0 --- /dev/null +++ b/wws/src/config/mod.rs @@ -0,0 +1,134 @@ +/* + * config/mod.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +mod args; +mod object; +mod secrets; + +pub use self::object::Config; +pub use self::secrets::Secrets; + +use self::args::Arguments; +use dotenvy::dotenv; +use ref_map::*; +use s3::{creds::Credentials, region::Region}; +use std::{env, process}; + +pub fn load_config() -> (Config, Secrets) { + dotenv().ok(); + + // Essentially .expect(), but allows printing the environment variable name in the message. + macro_rules! get_env { + ($name:expr) => { + match env::var($name) { + Ok(value) => value, + Err(error) => { + eprintln!("Unable to read environment variable {}: {}", $name, error); + process::exit(1); + } + } + }; + } + + // Process arguments and overrides + let Arguments { + enable_trace, + mut address, + } = Arguments::parse(); + + if let Ok(value) = env::var("ADDRESS") { + address = value.parse().expect("Unable to parse socket address"); + } + + // Process secrets + let redis_url = get_env!("REDIS_URL"); + + let s3_bucket = get_env!("S3_BUCKET"); + let s3_region = match env::var("S3_AWS_REGION") { + // Standard AWS S3 region, parse out into enum. + Ok(value) => match value.parse() { + Ok(region) => region, + Err(error) => { + eprintln!("S3_AWS_REGION variable is not a valid AWS region ID: {error}"); + process::exit(1); + } + }, + + // Custom region, with a specific S3 endpoint. + Err(_) => { + let region = get_env!("S3_REGION_NAME"); + let endpoint = get_env!("S3_CUSTOM_ENDPOINT"); + + Region::Custom { region, endpoint } + } + }; + + let s3_path_style = match get_env!("S3_PATH_STYLE").parse() { + Ok(path_style) => path_style, + Err(_) => { + eprintln!("S3_PATH_STYLE variable is not a valid boolean"); + process::exit(1); + } + }; + + let s3_credentials = { + // Try to read from environment + // Reads from S3_ACCESS_KEY_ID and S3_SECRET_ACCESS_KEY + let env_creds = Credentials::from_env_specific( + Some("S3_ACCESS_KEY_ID"), + Some("S3_SECRET_ACCESS_KEY"), + None, + None, + ); + + match env_creds { + Ok(credentials) => credentials, + Err(_) => { + // Try to read from profile + let profile_name = env::var("AWS_PROFILE_NAME").ok(); + let profile_name = profile_name.ref_map(|s| s.as_str()); + + match Credentials::from_profile(profile_name) { + Ok(credentials) => credentials, + Err(error) => { + eprintln!("Unable to read AWS credentials file: {error}"); + process::exit(1); + } + } + } + } + }; + + // Build and return + let config = Config { + enable_trace, + address, + }; + + let secrets = Secrets { + redis_url, + s3_bucket, + s3_region, + s3_path_style, + s3_credentials, + }; + + (config, secrets) +} diff --git a/wws/src/config/object.rs b/wws/src/config/object.rs new file mode 100644 index 0000000000..09dca516a3 --- /dev/null +++ b/wws/src/config/object.rs @@ -0,0 +1,31 @@ +/* + * config/object.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use std::net::SocketAddr; + +/// The runtime configuration structure for the web server. +#[derive(Debug, Clone)] +pub struct Config { + /// Whether to enable tracing and colored backtrace. + pub enable_trace: bool, + + /// The address the server will be hosted on. + pub address: SocketAddr, +} diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs new file mode 100644 index 0000000000..c22918065f --- /dev/null +++ b/wws/src/config/secrets.rs @@ -0,0 +1,54 @@ +/* + * config/secrets.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use s3::{creds::Credentials, region::Region}; + +#[derive(Debug, Clone)] +pub struct Secrets { + /// The URL of the Redis cache to connect to. + /// + /// Set using environment variable `REDIS_URL`. + pub redis_url: String, + + /// The name of the S3 bucket that file blobs are kept in. + /// The bucket must already exist prior to program invocation. + /// + /// Set using environment variable `S3_BUCKET`. + pub s3_bucket: String, + + /// The region to use for S3. + /// + /// Set using environment variable `S3_AWS_REGION` if standard, + /// or `S3_REGION_NAME` and `S3_CUSTOM_ENDPOINT` if custom. + pub s3_region: Region, + + /// Whether to use path style for S3. + /// + /// Set using environment variable `S3_PATH_STYLE`. + pub s3_path_style: bool, + + /// The credentials to use for S3. + /// + /// Set using environment variable `S3_ACCESS_KEY_ID` and `S3_SECRET_ACCESS_KEY`. + /// + /// Alternatively you can have it read from the AWS credentials file. + /// The profile to read from can be set in the `AWS_PROFILE_NAME` environment variable. + pub s3_credentials: Credentials, +} diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 328e23f297..e77ae9800e 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -18,6 +18,8 @@ * along with this program. If not, see . */ +// TODO + use axum::response::Html; pub async fn handle_hello_world() -> Html<&'static str> { diff --git a/wws/src/info.rs b/wws/src/info.rs new file mode 100644 index 0000000000..750e805212 --- /dev/null +++ b/wws/src/info.rs @@ -0,0 +1,29 @@ +/* + * info.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +mod build { + include!(concat!(env!("OUT_DIR"), "/built.rs")); +} + +#[allow(unused_imports)] +pub use self::build::{ + CFG_ENDIAN, GIT_COMMIT_HASH, NUM_JOBS, PKG_AUTHORS, PKG_DESCRIPTION, PKG_LICENSE, PKG_NAME, + PKG_REPOSITORY, PKG_VERSION, RUSTC_VERSION, TARGET, +}; diff --git a/wws/src/main.rs b/wws/src/main.rs index bc77fa4eb6..91f74c8248 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -23,19 +23,42 @@ //! Depending on the hostname, requests are routed to either framerail //! or given to logic to serve wjfiles data. +#[macro_use] +extern crate str_macro; + +#[macro_use] +extern crate tracing; + +mod config; mod handler; +mod info; mod route; +mod state; +mod trace; +use self::config::{load_config, Config, Secrets}; use self::route::build_router; +use self::state::ServerState; +use self::trace::setup_tracing; +use anyhow::Result; +use tokio::net::TcpListener; #[tokio::main] -async fn main() -> anyhow::Result<()> { - let app = build_router(); +async fn main() -> Result<()> { + let (config, secrets) = load_config(); + if config.enable_trace { + setup_tracing(); + } - // run it - let listener = tokio::net::TcpListener::bind("[::]:8080").await?; + let domains = (); + let state = ServerState::build(secrets)?; + let app = build_router(domains); + let listener = TcpListener::bind(config.address).await?; - println!("listening on {}", listener.local_addr()?); + info!( + address = str!(config.address), + "Listening to connections...", + ); axum::serve(listener, app).await?; Ok(()) } diff --git a/wws/src/route.rs b/wws/src/route.rs index 817cd4949a..192c5320a4 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -30,7 +30,7 @@ use axum::{ use axum_extra::extract::Host; use tower::util::ServiceExt; -pub fn build_router() -> Router { +pub fn build_router(domains: ()) -> Router { // Router that serves framerail let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes diff --git a/wws/src/state.rs b/wws/src/state.rs new file mode 100644 index 0000000000..b906e544bc --- /dev/null +++ b/wws/src/state.rs @@ -0,0 +1,55 @@ +/* + * state.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::Secrets; +use anyhow::Result; +use redis::{Client as RedisClient, IntoConnectionInfo, RedisError}; +use s3::bucket::Bucket; +use std::time::Duration; + +const BUCKET_REQUEST_TIMEOUT: Duration = Duration::from_millis(200); + +#[derive(Debug)] +pub struct ServerState { + redis: redis::Client, + s3_bucket: Box, +} + +impl ServerState { + pub fn build(secrets: Secrets) -> Result { + let redis = redis::Client::open(secrets.redis_url)?; + let s3_bucket = { + let mut bucket = Bucket::new( + &secrets.s3_bucket, + secrets.s3_region.clone(), + secrets.s3_credentials.clone(), + )?; + + if secrets.s3_path_style { + bucket = bucket.with_path_style(); + } + + bucket.request_timeout = Some(BUCKET_REQUEST_TIMEOUT); + bucket + }; + + Ok(ServerState { redis, s3_bucket }) + } +} diff --git a/wws/src/trace.rs b/wws/src/trace.rs new file mode 100644 index 0000000000..cdae623d39 --- /dev/null +++ b/wws/src/trace.rs @@ -0,0 +1,38 @@ +/* + * trace.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use tracing_subscriber::{fmt, layer::SubscriberExt, registry, util::SubscriberInitExt, EnvFilter}; + +pub fn setup_tracing() { + registry() + .with(EnvFilter::try_from_default_env().unwrap_or_else(|_| { + // axum logs rejections from built-in extractors with the `axum::rejection` + // target, at `TRACE` level. `axum::rejection=trace` enables showing those events + concat!( + env!("CARGO_CRATE_NAME"), + "=debug,tower_http=debug,axum::rejection=trace", + ) + .into() + })) + .with(fmt::layer()) + .init(); + + color_backtrace::install(); +} From 4aeef57eaca40acb3a7bf87940307b017a7a185f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 04:36:48 -0500 Subject: [PATCH 016/306] Remove some unused imports. --- wws/src/config/args.rs | 1 - wws/src/main.rs | 2 +- wws/src/state.rs | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/wws/src/config/args.rs b/wws/src/config/args.rs index 0c75b10b96..1cc4aebe4e 100644 --- a/wws/src/config/args.rs +++ b/wws/src/config/args.rs @@ -19,7 +19,6 @@ */ use crate::info; -use clap::builder::{BoolishValueParser, NonEmptyStringValueParser}; use clap::{value_parser, Arg, ArgAction, Command}; use std::net::{IpAddr, SocketAddr}; diff --git a/wws/src/main.rs b/wws/src/main.rs index 91f74c8248..eb582c4953 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -36,7 +36,7 @@ mod route; mod state; mod trace; -use self::config::{load_config, Config, Secrets}; +use self::config::{load_config, Secrets}; use self::route::build_router; use self::state::ServerState; use self::trace::setup_tracing; diff --git a/wws/src/state.rs b/wws/src/state.rs index b906e544bc..daa6886d8b 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -20,7 +20,6 @@ use crate::Secrets; use anyhow::Result; -use redis::{Client as RedisClient, IntoConnectionInfo, RedisError}; use s3::bucket::Bucket; use std::time::Duration; From 9fd9009d1fab4ca519fb8acef57488ca8b3f8d5d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 04:40:03 -0500 Subject: [PATCH 017/306] Create deepwell interface stub. --- wws/src/config/mod.rs | 3 +++ wws/src/config/secrets.rs | 5 +++++ wws/src/deepwell.rs | 24 ++++++++++++++++++++++++ wws/src/main.rs | 2 ++ 4 files changed, 34 insertions(+) create mode 100644 wws/src/deepwell.rs diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 8529be97c0..f00effdb34 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -58,6 +58,8 @@ pub fn load_config() -> (Config, Secrets) { } // Process secrets + let deepwell_url = get_env!("DEEPWELL_URL"); + let redis_url = get_env!("REDIS_URL"); let s3_bucket = get_env!("S3_BUCKET"); @@ -123,6 +125,7 @@ pub fn load_config() -> (Config, Secrets) { }; let secrets = Secrets { + deepwell_url, redis_url, s3_bucket, s3_region, diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index c22918065f..5dcf1ea4ce 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -22,6 +22,11 @@ use s3::{creds::Credentials, region::Region}; #[derive(Debug, Clone)] pub struct Secrets { + /// The URL of the DEEPWELL backend server. + /// + /// Set using environment variable `DEEPWELL_URL`. + pub deepwell_url: String, + /// The URL of the Redis cache to connect to. /// /// Set using environment variable `REDIS_URL`. diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs new file mode 100644 index 0000000000..aa0c32c515 --- /dev/null +++ b/wws/src/deepwell.rs @@ -0,0 +1,24 @@ +/* + * deepwell.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +// TODO + +#[derive(Debug)] +pub struct Deepwell; diff --git a/wws/src/main.rs b/wws/src/main.rs index eb582c4953..f759a52cea 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -30,6 +30,7 @@ extern crate str_macro; extern crate tracing; mod config; +mod deepwell; mod handler; mod info; mod route; @@ -37,6 +38,7 @@ mod state; mod trace; use self::config::{load_config, Secrets}; +use self::deepwell::Deepwell; use self::route::build_router; use self::state::ServerState; use self::trace::setup_tracing; From 9c6806708135521131f86e694c6605f0381091f0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 06:13:00 -0500 Subject: [PATCH 018/306] Add tower-http middleware. --- wws/Cargo.lock | 122 ++++++++++++++++++++++++++++++++++++++++++++++ wws/Cargo.toml | 3 ++ wws/src/info.rs | 27 ++++++++++ wws/src/macros.rs | 33 +++++++++++++ wws/src/main.rs | 11 +++-- wws/src/route.rs | 40 ++++++++++++++- wws/src/state.rs | 43 ++++++++-------- 7 files changed, 252 insertions(+), 27 deletions(-) create mode 100644 wws/src/macros.rs diff --git a/wws/Cargo.lock b/wws/Cargo.lock index d3b21ac2a6..c304e4f8b0 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -26,6 +26,21 @@ dependencies = [ "memchr", ] +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + [[package]] name = "anstream" version = "0.6.18" @@ -87,6 +102,22 @@ version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" +[[package]] +name = "async-compression" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" +dependencies = [ + "brotli", + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "zstd", + "zstd-safe", +] + [[package]] name = "async-trait" version = "0.1.84" @@ -277,6 +308,27 @@ dependencies = [ "generic-array", ] +[[package]] +name = "brotli" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + [[package]] name = "built" version = "0.7.5" @@ -417,6 +469,15 @@ dependencies = [ "libc", ] +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + [[package]] name = "crunchy" version = "0.2.2" @@ -486,6 +547,16 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "flate2" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1925,6 +1996,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower-http" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" +dependencies = [ + "async-compression", + "bitflags", + "bytes", + "futures-core", + "http 1.2.0", + "http-body 1.0.1", + "pin-project-lite", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -2236,12 +2327,15 @@ dependencies = [ "clap", "color-backtrace", "dotenvy", + "http 1.2.0", + "once_cell", "redis", "ref-map", "rust-s3", "str-macro", "tokio", "tower", + "tower-http", "tracing", "tracing-subscriber", ] @@ -2339,3 +2433,31 @@ dependencies = [ "quote", "syn", ] + +[[package]] +name = "zstd" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.13+zstd.1.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index bdee9388ad..73ab5f3cbe 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -19,12 +19,15 @@ axum-extra = { version = "0.10", features = ["attachment"] } clap = "4" color-backtrace = "0.6" dotenvy = "0.15" +http = "1" +once_cell = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } str-macro = "1" tokio = { version = "1", features = ["macros", "rt-multi-thread"] } tower = "0.5" +tower-http = { version = "0.6.1", features = ["add-extension", "compression-br", "compression-deflate", "compression-gzip", "compression-zstd", "normalize-path", "set-header", "trace"] } tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } diff --git a/wws/src/info.rs b/wws/src/info.rs index 750e805212..918f52d1ad 100644 --- a/wws/src/info.rs +++ b/wws/src/info.rs @@ -22,8 +22,35 @@ mod build { include!(concat!(env!("OUT_DIR"), "/built.rs")); } +use once_cell::sync::Lazy; + #[allow(unused_imports)] pub use self::build::{ CFG_ENDIAN, GIT_COMMIT_HASH, NUM_JOBS, PKG_AUTHORS, PKG_DESCRIPTION, PKG_LICENSE, PKG_NAME, PKG_REPOSITORY, PKG_VERSION, RUSTC_VERSION, TARGET, }; + +pub static VERSION_INFO: Lazy = Lazy::new(|| { + let mut version = format!("v{PKG_VERSION}"); + + if let Some(commit_hash) = *GIT_COMMIT_HASH_SHORT { + str_write!(&mut version, " [{commit_hash}]"); + } + + version +}); + +pub static VERSION: Lazy = Lazy::new(|| format!("{PKG_NAME} {}", *VERSION_INFO)); + +pub static GIT_COMMIT_HASH_SHORT: Lazy> = + Lazy::new(|| build::GIT_COMMIT_HASH.map(|s| &s[..8])); + +#[test] +fn info() { + assert!(VERSION.starts_with(PKG_NAME)); + assert!(VERSION.ends_with(&*VERSION_INFO)); + + if let Some(hash) = *GIT_COMMIT_HASH_SHORT { + assert_eq!(hash.len(), 8); + } +} diff --git a/wws/src/macros.rs b/wws/src/macros.rs new file mode 100644 index 0000000000..34a5ec29a2 --- /dev/null +++ b/wws/src/macros.rs @@ -0,0 +1,33 @@ +/* + * macros.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +/// Like `std::write!()`, except it asserts the writing succeeded. +/// +/// This is done because the only failure mode for writing to a `String` +/// would be insufficient memory, which would cause an abort anyways. +/// +/// # See also +/// * [`str_writeln!`](macro.str_writeln.html) +macro_rules! str_write { + ($dest:expr, $($arg:tt)*) => {{ + use std::fmt::Write; + write!($dest, $($arg)*).expect("Writing to string failed"); + }}; +} diff --git a/wws/src/main.rs b/wws/src/main.rs index f759a52cea..a9c91660f7 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -29,6 +29,9 @@ extern crate str_macro; #[macro_use] extern crate tracing; +#[macro_use] +mod macros; + mod config; mod deepwell; mod handler; @@ -40,7 +43,7 @@ mod trace; use self::config::{load_config, Secrets}; use self::deepwell::Deepwell; use self::route::build_router; -use self::state::ServerState; +use self::state::build_server_state; use self::trace::setup_tracing; use anyhow::Result; use tokio::net::TcpListener; @@ -52,9 +55,9 @@ async fn main() -> Result<()> { setup_tracing(); } - let domains = (); - let state = ServerState::build(secrets)?; - let app = build_router(domains); + let deepwell_info = (); + let state = build_server_state(secrets)?; + let app = build_router(state, deepwell_info); let listener = TcpListener::bind(config.address).await?; info!( diff --git a/wws/src/route.rs b/wws/src/route.rs index 192c5320a4..a4e291565f 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -19,6 +19,8 @@ */ use crate::handler::handle_hello_world; +use crate::info; +use crate::state::ServerState; use axum::{ body::Body, extract::{FromRequestParts, Path, Request}, @@ -28,9 +30,14 @@ use axum::{ RequestPartsExt, Router, }; use axum_extra::extract::Host; +use http::header::{HeaderName, HeaderValue}; use tower::util::ServiceExt; +use tower_http::{ + add_extension::AddExtensionLayer, compression::CompressionLayer, + normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, +}; -pub fn build_router(domains: ()) -> Router { +pub fn build_router(state: ServerState, deepwell_info: ()) -> Router { // Router that serves framerail let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes @@ -60,7 +67,36 @@ pub fn build_router(domains: ()) -> Router { } }), ); - // TODO .layer(Extension(state)); + + macro_rules! header_value { + ($value:expr) => { + HeaderValue::from_str($value).expect("Version is not a valid header value") + }; + } + + let app = app + .layer(TraceLayer::new_for_http()) + .layer(NormalizePathLayer::trim_trailing_slash()) + .layer( + CompressionLayer::new() + .gzip(true) + .deflate(true) + .br(true) + .zstd(true), + ) + .layer(AddExtensionLayer::new(state)) + .layer(SetResponseHeaderLayer::overriding( + HeaderName::from_static("x-wikijump"), + Some(HeaderValue::from_static("1")), + )) + .layer(SetResponseHeaderLayer::overriding( + HeaderName::from_static("x-wikijump-wws-ver"), + Some(header_value!(&*info::VERSION_INFO)), + )) + .layer(SetResponseHeaderLayer::overriding( + HeaderName::from_static("x-wikijump-deepwell-ver"), + Some(header_value!(todo!())), + )); app } diff --git a/wws/src/state.rs b/wws/src/state.rs index daa6886d8b..ccbb1d86ef 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,34 +21,35 @@ use crate::Secrets; use anyhow::Result; use s3::bucket::Bucket; +use std::sync::Arc; use std::time::Duration; const BUCKET_REQUEST_TIMEOUT: Duration = Duration::from_millis(200); +pub type ServerState = Arc; + #[derive(Debug)] -pub struct ServerState { +pub struct ServerStateInner { redis: redis::Client, s3_bucket: Box, } -impl ServerState { - pub fn build(secrets: Secrets) -> Result { - let redis = redis::Client::open(secrets.redis_url)?; - let s3_bucket = { - let mut bucket = Bucket::new( - &secrets.s3_bucket, - secrets.s3_region.clone(), - secrets.s3_credentials.clone(), - )?; - - if secrets.s3_path_style { - bucket = bucket.with_path_style(); - } - - bucket.request_timeout = Some(BUCKET_REQUEST_TIMEOUT); - bucket - }; - - Ok(ServerState { redis, s3_bucket }) - } +pub fn build_server_state(secrets: Secrets) -> Result { + let redis = redis::Client::open(secrets.redis_url)?; + let s3_bucket = { + let mut bucket = Bucket::new( + &secrets.s3_bucket, + secrets.s3_region.clone(), + secrets.s3_credentials.clone(), + )?; + + if secrets.s3_path_style { + bucket = bucket.with_path_style(); + } + + bucket.request_timeout = Some(BUCKET_REQUEST_TIMEOUT); + bucket + }; + + Ok(Arc::new(ServerStateInner { redis, s3_bucket })) } From 3252cd21e0bc7fd79fade1604ddc47bfedd06d4b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 06:46:04 -0500 Subject: [PATCH 019/306] Set up JSONRPC client. --- wws/Cargo.lock | 241 +++++++++++++++++++++++++++++++++++++++++++- wws/Cargo.toml | 1 + wws/src/deepwell.rs | 22 +++- 3 files changed, 258 insertions(+), 6 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index c304e4f8b0..ec9ab2e1ff 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -207,7 +207,7 @@ dependencies = [ "serde", "sync_wrapper", "tokio", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -249,7 +249,7 @@ dependencies = [ "mime", "pin-project-lite", "serde", - "tower", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", @@ -361,6 +361,12 @@ dependencies = [ "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cfg-if" version = "1.0.0" @@ -643,6 +649,12 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" + [[package]] name = "futures-util" version = "0.3.31" @@ -865,6 +877,7 @@ dependencies = [ "pin-project-lite", "smallvec", "tokio", + "want", ] [[package]] @@ -881,6 +894,24 @@ dependencies = [ "tokio-rustls 0.24.1", ] +[[package]] +name = "hyper-rustls" +version = "0.27.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" +dependencies = [ + "futures-util", + "http 1.2.0", + "hyper 1.5.2", + "hyper-util", + "log", + "rustls 0.23.20", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.1", + "tower-service", +] + [[package]] name = "hyper-util" version = "0.1.10" @@ -888,13 +919,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" dependencies = [ "bytes", + "futures-channel", "futures-util", "http 1.2.0", "http-body 1.0.1", "hyper 1.5.2", "pin-project-lite", + "socket2", "tokio", "tower-service", + "tracing", ] [[package]] @@ -1058,6 +1092,26 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" +[[package]] +name = "jni" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" +dependencies = [ + "cesu8", + "combine", + "jni-sys", + "log", + "thiserror", + "walkdir", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.32" @@ -1067,6 +1121,77 @@ dependencies = [ "libc", ] +[[package]] +name = "jsonrpsee" +version = "0.24.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" +dependencies = [ + "jsonrpsee-core", + "jsonrpsee-http-client", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.24.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" +dependencies = [ + "async-trait", + "bytes", + "futures-timer", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "jsonrpsee-types", + "pin-project", + "rustc-hash", + "serde", + "serde_json", + "thiserror", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.24.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3638bc4617f96675973253b3a45006933bde93c2fd8a6170b33c777cc389e5b" +dependencies = [ + "async-trait", + "base64 0.22.1", + "http-body 1.0.1", + "hyper 1.5.2", + "hyper-rustls 0.27.5", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "rustls 0.23.20", + "rustls-platform-verifier", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower 0.4.13", + "tracing", + "url", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.24.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" +dependencies = [ + "http 1.2.0", + "serde", + "serde_json", + "thiserror", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -1189,12 +1314,40 @@ dependencies = [ "winapi", ] +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + [[package]] name = "num-conv" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + [[package]] name = "object" version = "0.36.7" @@ -1471,7 +1624,7 @@ dependencies = [ "hmac", "http 0.2.12", "hyper 0.14.32", - "hyper-rustls", + "hyper-rustls 0.24.2", "log", "maybe-async", "md5", @@ -1497,6 +1650,12 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" + [[package]] name = "rustls" version = "0.21.12" @@ -1529,6 +1688,7 @@ version = "0.23.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" dependencies = [ + "log", "once_cell", "ring", "rustls-pki-types", @@ -1586,6 +1746,33 @@ version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2bf47e6ff922db3825eb750c4e2ff784c6ff8fb9e13046ef6a1d1c5401b0b37" +[[package]] +name = "rustls-platform-verifier" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afbb878bdfdf63a336a5e63561b1835e7a8c91524f51621db870169eac84b490" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls 0.23.20", + "rustls-native-certs 0.7.3", + "rustls-platform-verifier-android", + "rustls-webpki 0.102.8", + "security-framework", + "security-framework-sys", + "webpki-roots", + "winapi", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -1619,6 +1806,15 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "schannel" version = "0.1.27" @@ -1648,6 +1844,7 @@ dependencies = [ "core-foundation", "core-foundation-sys", "libc", + "num-bigint", "security-framework-sys", ] @@ -1956,6 +2153,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls 0.23.20", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.17" @@ -1980,6 +2187,21 @@ dependencies = [ "tokio", ] +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.5.2" @@ -2167,6 +2389,16 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "want" version = "0.3.1" @@ -2328,13 +2560,14 @@ dependencies = [ "color-backtrace", "dotenvy", "http 1.2.0", + "jsonrpsee", "once_cell", "redis", "ref-map", "rust-s3", "str-macro", "tokio", - "tower", + "tower 0.5.2", "tower-http", "tracing", "tracing-subscriber", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 73ab5f3cbe..97994ca958 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -20,6 +20,7 @@ clap = "4" color-backtrace = "0.6" dotenvy = "0.15" http = "1" +jsonrpsee = { version = "0.24", features = ["async-client", "jsonrpsee-http-client"] } once_cell = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } ref-map = "0.1" diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index aa0c32c515..5a866b1880 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -18,7 +18,25 @@ * along with this program. If not, see . */ -// TODO +use anyhow::Result; +use jsonrpsee::{http_client::HttpClient, rpc_params}; +use std::time::Duration; + +const JSONRPC_MAX_REQUEST: u32 = 16 * 1024; +const JSONRPC_TIMEOUT: Duration = Duration::from_millis(200); #[derive(Debug)] -pub struct Deepwell; +pub struct Deepwell { + client: HttpClient, +} + +impl Deepwell { + pub fn new(deepwell_url: &str) -> Result { + let client = HttpClient::builder() + .max_request_size(JSONRPC_MAX_REQUEST) + .request_timeout(JSONRPC_TIMEOUT) + .build(deepwell_url)?; + + Ok(Deepwell { client }) + } +} From 60055bc2164d2a4de0267231ffa754174b0ca470 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 06:57:54 -0500 Subject: [PATCH 020/306] Add pid-file item to configuration. --- wws/src/config/args.rs | 16 ++++++++++++++++ wws/src/config/mod.rs | 7 +++++++ wws/src/config/object.rs | 4 ++++ 3 files changed, 27 insertions(+) diff --git a/wws/src/config/args.rs b/wws/src/config/args.rs index 1cc4aebe4e..b0b28912ea 100644 --- a/wws/src/config/args.rs +++ b/wws/src/config/args.rs @@ -20,11 +20,14 @@ use crate::info; use clap::{value_parser, Arg, ArgAction, Command}; +use std::ffi::OsString; use std::net::{IpAddr, SocketAddr}; +use std::path::PathBuf; #[derive(Debug, Clone)] pub struct Arguments { pub enable_trace: bool, + pub pid_file: Option, pub address: SocketAddr, } @@ -32,6 +35,7 @@ impl Default for Arguments { fn default() -> Arguments { Arguments { enable_trace: true, + pid_file: None, address: "[::]:80".parse().unwrap(), } } @@ -51,6 +55,14 @@ impl Arguments { .action(ArgAction::SetTrue) .help("Disable trace output."), ) + .arg( + Arg::new("pid-file") + .short('P') + .long("pid") + .long("pid-file") + .value_name("PATH") + .help("The PID file to write to on boot."), + ) .arg( Arg::new("host") .short('H') @@ -78,6 +90,10 @@ impl Arguments { args.enable_trace = false; } + if let Some(value) = matches.remove_one::("pid-file") { + args.pid_file = Some(PathBuf::from(value)); + } + if let Some(value) = matches.remove_one::("host") { args.address.set_ip(value); } diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index f00effdb34..2cb96bd773 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -29,6 +29,7 @@ use self::args::Arguments; use dotenvy::dotenv; use ref_map::*; use s3::{creds::Credentials, region::Region}; +use std::path::PathBuf; use std::{env, process}; pub fn load_config() -> (Config, Secrets) { @@ -50,9 +51,14 @@ pub fn load_config() -> (Config, Secrets) { // Process arguments and overrides let Arguments { enable_trace, + mut pid_file, mut address, } = Arguments::parse(); + if let Some(value) = env::var_os("PID_FILE") { + pid_file = Some(PathBuf::from(value)); + } + if let Ok(value) = env::var("ADDRESS") { address = value.parse().expect("Unable to parse socket address"); } @@ -121,6 +127,7 @@ pub fn load_config() -> (Config, Secrets) { // Build and return let config = Config { enable_trace, + pid_file, address, }; diff --git a/wws/src/config/object.rs b/wws/src/config/object.rs index 09dca516a3..f0d13dd967 100644 --- a/wws/src/config/object.rs +++ b/wws/src/config/object.rs @@ -19,6 +19,7 @@ */ use std::net::SocketAddr; +use std::path::PathBuf; /// The runtime configuration structure for the web server. #[derive(Debug, Clone)] @@ -26,6 +27,9 @@ pub struct Config { /// Whether to enable tracing and colored backtrace. pub enable_trace: bool, + /// The PID file (if any) to write to on boot. + pub pid_file: Option, + /// The address the server will be hosted on. pub address: SocketAddr, } From 102a1bb94d5a1059196992953139a4f1b787b530 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 06:58:37 -0500 Subject: [PATCH 021/306] Adjust comment in main.rs --- deepwell/src/main.rs | 2 +- wws/src/main.rs | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/deepwell/src/main.rs b/deepwell/src/main.rs index 1a66457027..daf5e8c068 100644 --- a/deepwell/src/main.rs +++ b/deepwell/src/main.rs @@ -81,7 +81,7 @@ async fn main() -> Result<()> { color_backtrace::install(); } - // Write PID file, if enabled + // Write PID file if let Some(ref path) = config.pid_file { info!( "Writing process ID ({}) to {}", diff --git a/wws/src/main.rs b/wws/src/main.rs index a9c91660f7..d6c5576d71 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -46,17 +46,29 @@ use self::route::build_router; use self::state::build_server_state; use self::trace::setup_tracing; use anyhow::Result; +use std::fs::File; +use std::io::Write; +use std::process; use tokio::net::TcpListener; #[tokio::main] async fn main() -> Result<()> { let (config, secrets) = load_config(); + + // Set up tracing if config.enable_trace { setup_tracing(); } - let deepwell_info = (); + // Write PID file + if let Some(ref path) = config.pid_file { + debug!(pid = process::id(), "Writing PID file"); + let mut file = File::create(path)?; + writeln!(&mut file, "{}", process::id())?; + } + let state = build_server_state(secrets)?; + let deepwell_info = (); let app = build_router(state, deepwell_info); let listener = TcpListener::bind(config.address).await?; From 0dd752281d7026aac519a7608909633a1738b024 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:02:25 -0500 Subject: [PATCH 022/306] Add DEEPWELL JSONRPC client instance to server state. --- wws/src/deepwell.rs | 2 +- wws/src/state.rs | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 5a866b1880..99a89c3837 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -31,7 +31,7 @@ pub struct Deepwell { } impl Deepwell { - pub fn new(deepwell_url: &str) -> Result { + pub fn open(deepwell_url: &str) -> Result { let client = HttpClient::builder() .max_request_size(JSONRPC_MAX_REQUEST) .request_timeout(JSONRPC_TIMEOUT) diff --git a/wws/src/state.rs b/wws/src/state.rs index ccbb1d86ef..96fdbab526 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -18,7 +18,8 @@ * along with this program. If not, see . */ -use crate::Secrets; +use crate::config::Secrets; +use crate::deepwell::Deepwell; use anyhow::Result; use s3::bucket::Bucket; use std::sync::Arc; @@ -30,11 +31,13 @@ pub type ServerState = Arc; #[derive(Debug)] pub struct ServerStateInner { + deepwell: Deepwell, redis: redis::Client, s3_bucket: Box, } pub fn build_server_state(secrets: Secrets) -> Result { + let deepwell = Deepwell::open(&secrets.deepwell_url)?; let redis = redis::Client::open(secrets.redis_url)?; let s3_bucket = { let mut bucket = Bucket::new( @@ -51,5 +54,5 @@ pub fn build_server_state(secrets: Secrets) -> Result { bucket }; - Ok(Arc::new(ServerStateInner { redis, s3_bucket })) + Ok(Arc::new(ServerStateInner { deepwell, redis, s3_bucket })) } From 3231b5f130f777755b36b7a0acdb0d2d1e3634ab Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:02:44 -0500 Subject: [PATCH 023/306] Make server state fields public. --- wws/src/state.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/wws/src/state.rs b/wws/src/state.rs index 96fdbab526..2e37f7fdec 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -31,9 +31,9 @@ pub type ServerState = Arc; #[derive(Debug)] pub struct ServerStateInner { - deepwell: Deepwell, - redis: redis::Client, - s3_bucket: Box, + pub deepwell: Deepwell, + pub redis: redis::Client, + pub s3_bucket: Box, } pub fn build_server_state(secrets: Secrets) -> Result { From cb3821527c097dbe20c3487ecaabc07ba6f8ba83 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:08:59 -0500 Subject: [PATCH 024/306] Prepare to use file domain fields. --- deepwell/src/config/object.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index fb8cfb8a1f..417b81e6d0 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -65,11 +65,9 @@ pub struct Config { /// The files domain to serve user-generated content from. /// /// Always starts with a `.` - #[allow(dead_code)] // TEMP pub files_domain: String, /// The files domain, but without a leading `.` - #[allow(dead_code)] // TEMP pub files_domain_no_dot: String, /// Whether to auto-restart on configuration file change. From 1f5859a6cfe52170cb95f3620c71afdf9e478c24 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:09:10 -0500 Subject: [PATCH 025/306] Use files domain for missing site localization. Probably not actually used, but we can add it anyways. --- deepwell/src/services/view/service.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index f1acf5401a..24548804da 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -562,6 +562,7 @@ impl ViewService { let mut args = FluentArgs::new(); args.set("slug", fluent_str!(site_slug)); args.set("domain", fluent_str!(config.main_domain_no_dot)); + args.set("files-domain", fluent_str!(config.files_domain_no_dot)); let html = ctx.localization().translate( locales, @@ -577,6 +578,7 @@ impl ViewService { let mut args = FluentArgs::new(); args.set("custom_domain", fluent_str!(domain)); args.set("domain", fluent_str!(config.main_domain_no_dot)); + args.set("files-domain", fluent_str!(config.files_domain_no_dot)); let html = ctx.localization().translate( locales, From 7b42e07bdc084c0e28353a85c48e6ed58acdb9f9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:13:41 -0500 Subject: [PATCH 026/306] Mark unused field to suppress warning. --- deepwell/src/config/object.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index 417b81e6d0..a8939089d7 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -65,6 +65,7 @@ pub struct Config { /// The files domain to serve user-generated content from. /// /// Always starts with a `.` + #[allow(dead_code)] // TEMP pub files_domain: String, /// The files domain, but without a leading `.` From a17ec36fad1575ed65e465c29ee4b12183cb8559 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:14:26 -0500 Subject: [PATCH 027/306] Add new method to provide domain information to wws. --- deepwell/src/api.rs | 1 + deepwell/src/endpoints/info.rs | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 4ccc5fa09a..ccf8acd952 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -183,6 +183,7 @@ async fn build_module(app_state: ServerState) -> anyhow::Result, + _params: Params<'static>, +) -> Result { + let config = ctx.config(); + + info!("Building server domain information response"); + Ok(Domains { + main_domain_no_dot: config.main_domain_no_dot.clone(), + files_domain_no_dot: config.files_domain_no_dot.clone(), + deepwell_version: &*info::VERSION_INFO, + }) +} From fe505f7d95f9077e9d4607c77cb04bbffd3a059e Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:20:09 -0500 Subject: [PATCH 028/306] Address deepwell clippy lint. --- deepwell/src/endpoints/info.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepwell/src/endpoints/info.rs b/deepwell/src/endpoints/info.rs index 2801dfdcbb..e5b558736c 100644 --- a/deepwell/src/endpoints/info.rs +++ b/deepwell/src/endpoints/info.rs @@ -103,6 +103,6 @@ pub async fn server_domains( Ok(Domains { main_domain_no_dot: config.main_domain_no_dot.clone(), files_domain_no_dot: config.files_domain_no_dot.clone(), - deepwell_version: &*info::VERSION_INFO, + deepwell_version: &info::VERSION_INFO, }) } From 94bc44713718e3d1a370cbf94825ea5ceff7e91a Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:25:11 -0500 Subject: [PATCH 029/306] Add first deepwell client method impl. --- wws/Cargo.lock | 1 + wws/Cargo.toml | 1 + wws/src/deepwell.rs | 21 ++++++++++++++++++++- wws/src/main.rs | 2 +- wws/src/route.rs | 6 ++++-- 5 files changed, 27 insertions(+), 4 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index ec9ab2e1ff..a5502bd05d 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2565,6 +2565,7 @@ dependencies = [ "redis", "ref-map", "rust-s3", + "serde", "str-macro", "tokio", "tower 0.5.2", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 97994ca958..5e94c35d45 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -25,6 +25,7 @@ once_cell = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } +serde = { version = "1", features = ["derive"] } str-macro = "1" tokio = { version = "1", features = ["macros", "rt-multi-thread"] } tower = "0.5" diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 99a89c3837..cf169ed91f 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -19,7 +19,7 @@ */ use anyhow::Result; -use jsonrpsee::{http_client::HttpClient, rpc_params}; +use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; use std::time::Duration; const JSONRPC_MAX_REQUEST: u32 = 16 * 1024; @@ -39,4 +39,23 @@ impl Deepwell { Ok(Deepwell { client }) } + + pub async fn ping(&self) -> Result<()> { + let response: String = self.client.request("ping", rpc_params![]).await?; + assert!(!response.is_empty()); + Ok(()) + } + + pub async fn info(&self) -> Result { + todo!() + } +} + +#[derive(Debug, Clone)] +pub struct DeepwellInfo { + pub main_domain: String, + pub main_domain_no_dot: String, + pub file_domain: String, + pub file_domain_no_dot: String, + pub deepwell_version: String, } diff --git a/wws/src/main.rs b/wws/src/main.rs index d6c5576d71..762e04e4a0 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -68,7 +68,7 @@ async fn main() -> Result<()> { } let state = build_server_state(secrets)?; - let deepwell_info = (); + let deepwell_info = state.deepwell.info().await?; let app = build_router(state, deepwell_info); let listener = TcpListener::bind(config.address).await?; diff --git a/wws/src/route.rs b/wws/src/route.rs index a4e291565f..830ce572e8 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -21,6 +21,7 @@ use crate::handler::handle_hello_world; use crate::info; use crate::state::ServerState; +use crate::deepwell::DeepwellInfo; use axum::{ body::Body, extract::{FromRequestParts, Path, Request}, @@ -37,7 +38,7 @@ use tower_http::{ normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, }; -pub fn build_router(state: ServerState, deepwell_info: ()) -> Router { +pub fn build_router(state: ServerState, deepwell_info: DeepwellInfo) -> Router { // Router that serves framerail let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes @@ -61,6 +62,7 @@ pub fn build_router(state: ServerState, deepwell_info: ()) -> Router { let app = Router::new().route( "/{*path}", any(|Host(hostname): Host, request: Request| async move { + // TODO match hostname.as_str() { "api.mydomain.com" => file_router.oneshot(request).await, _ => main_router.oneshot(request).await, @@ -95,7 +97,7 @@ pub fn build_router(state: ServerState, deepwell_info: ()) -> Router { )) .layer(SetResponseHeaderLayer::overriding( HeaderName::from_static("x-wikijump-deepwell-ver"), - Some(header_value!(todo!())), + Some(header_value!(&deepwell_info.deepwell_version)), )); app From 615bd2df0a79b9dec7347166097b7285aaf4d0fa Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:32:15 -0500 Subject: [PATCH 030/306] Add impl for domains request, including adding dots. --- wws/src/deepwell.rs | 38 +++++++++++++++++++++++++++++++++++--- wws/src/main.rs | 4 ++-- wws/src/route.rs | 6 +++--- wws/src/state.rs | 6 +++++- 4 files changed, 45 insertions(+), 9 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index cf169ed91f..3f8e59de21 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -20,6 +20,7 @@ use anyhow::Result; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; +use serde::Deserialize; use std::time::Duration; const JSONRPC_MAX_REQUEST: u32 = 16 * 1024; @@ -46,13 +47,44 @@ impl Deepwell { Ok(()) } - pub async fn info(&self) -> Result { - todo!() + pub async fn domains(&self) -> Result { + #[derive(Deserialize, Debug)] + struct Response { + main_domain_no_dot: String, + file_domain_no_dot: String, + deepwell_version: String, + } + + let Response { + main_domain_no_dot, + file_domain_no_dot, + deepwell_version, + } = self.client.request("domains", rpc_params![]).await?; + + assert!( + !main_domain_no_dot.starts_with('.'), + "Main domain returned from DEEPWELL starts with '.': {main_domain_no_dot:?}", + ); + let main_domain = format!(".{main_domain_no_dot}"); + + assert!( + !file_domain_no_dot.starts_with('.'), + "File domain returned from DEEPWELL starts with '.': {file_domain_no_dot:?}", + ); + let file_domain = format!(".{file_domain_no_dot}"); + + Ok(Domains { + main_domain, + main_domain_no_dot, + file_domain, + file_domain_no_dot, + deepwell_version, + }) } } #[derive(Debug, Clone)] -pub struct DeepwellInfo { +pub struct Domains { pub main_domain: String, pub main_domain_no_dot: String, pub file_domain: String, diff --git a/wws/src/main.rs b/wws/src/main.rs index 762e04e4a0..0e96af1118 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -68,8 +68,8 @@ async fn main() -> Result<()> { } let state = build_server_state(secrets)?; - let deepwell_info = state.deepwell.info().await?; - let app = build_router(state, deepwell_info); + let domains = state.deepwell.domains().await?; + let app = build_router(state, domains); let listener = TcpListener::bind(config.address).await?; info!( diff --git a/wws/src/route.rs b/wws/src/route.rs index 830ce572e8..803c917568 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -18,10 +18,10 @@ * along with this program. If not, see . */ +use crate::deepwell::Domains; use crate::handler::handle_hello_world; use crate::info; use crate::state::ServerState; -use crate::deepwell::DeepwellInfo; use axum::{ body::Body, extract::{FromRequestParts, Path, Request}, @@ -38,7 +38,7 @@ use tower_http::{ normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, }; -pub fn build_router(state: ServerState, deepwell_info: DeepwellInfo) -> Router { +pub fn build_router(state: ServerState, info: Domains) -> Router { // Router that serves framerail let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes @@ -97,7 +97,7 @@ pub fn build_router(state: ServerState, deepwell_info: DeepwellInfo) -> Router { )) .layer(SetResponseHeaderLayer::overriding( HeaderName::from_static("x-wikijump-deepwell-ver"), - Some(header_value!(&deepwell_info.deepwell_version)), + Some(header_value!(&info.deepwell_version)), )); app diff --git a/wws/src/state.rs b/wws/src/state.rs index 2e37f7fdec..d47beaa043 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -54,5 +54,9 @@ pub fn build_server_state(secrets: Secrets) -> Result { bucket }; - Ok(Arc::new(ServerStateInner { deepwell, redis, s3_bucket })) + Ok(Arc::new(ServerStateInner { + deepwell, + redis, + s3_bucket, + })) } From 4181a03e6ce735fc3fdf610d68096574158bef4b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:34:58 -0500 Subject: [PATCH 031/306] Create stub cache struct. --- wws/src/cache.rs | 33 +++++++++++++++++++++++++++++++++ wws/src/deepwell.rs | 2 +- wws/src/main.rs | 1 + wws/src/state.rs | 9 +++++---- 4 files changed, 40 insertions(+), 5 deletions(-) create mode 100644 wws/src/cache.rs diff --git a/wws/src/cache.rs b/wws/src/cache.rs new file mode 100644 index 0000000000..5bad99208d --- /dev/null +++ b/wws/src/cache.rs @@ -0,0 +1,33 @@ +/* + * cache.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use anyhow::Result; + +#[derive(Debug)] +pub struct Cache { + client: redis::Client, +} + +impl Cache { + pub fn connect(redis_url: &str) -> Result { + let client = redis::Client::open(redis_url)?; + Ok(Cache { client }) + } +} diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 3f8e59de21..a652b09645 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -32,7 +32,7 @@ pub struct Deepwell { } impl Deepwell { - pub fn open(deepwell_url: &str) -> Result { + pub fn connect(deepwell_url: &str) -> Result { let client = HttpClient::builder() .max_request_size(JSONRPC_MAX_REQUEST) .request_timeout(JSONRPC_TIMEOUT) diff --git a/wws/src/main.rs b/wws/src/main.rs index 0e96af1118..8171d39513 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -32,6 +32,7 @@ extern crate tracing; #[macro_use] mod macros; +mod cache; mod config; mod deepwell; mod handler; diff --git a/wws/src/state.rs b/wws/src/state.rs index d47beaa043..69108013f4 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -18,6 +18,7 @@ * along with this program. If not, see . */ +use crate::cache::Cache; use crate::config::Secrets; use crate::deepwell::Deepwell; use anyhow::Result; @@ -32,13 +33,13 @@ pub type ServerState = Arc; #[derive(Debug)] pub struct ServerStateInner { pub deepwell: Deepwell, - pub redis: redis::Client, + pub cache: Cache, pub s3_bucket: Box, } pub fn build_server_state(secrets: Secrets) -> Result { - let deepwell = Deepwell::open(&secrets.deepwell_url)?; - let redis = redis::Client::open(secrets.redis_url)?; + let deepwell = Deepwell::connect(&secrets.deepwell_url)?; + let cache = Cache::connect(&secrets.redis_url)?; let s3_bucket = { let mut bucket = Bucket::new( &secrets.s3_bucket, @@ -56,7 +57,7 @@ pub fn build_server_state(secrets: Secrets) -> Result { Ok(Arc::new(ServerStateInner { deepwell, - redis, + cache, s3_bucket, })) } From b0d8e52a9e8464c265f4e48967c8fece77da4be6 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:37:20 -0500 Subject: [PATCH 032/306] Remove unused info constant. --- wws/src/info.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/wws/src/info.rs b/wws/src/info.rs index 918f52d1ad..a208913029 100644 --- a/wws/src/info.rs +++ b/wws/src/info.rs @@ -40,8 +40,6 @@ pub static VERSION_INFO: Lazy = Lazy::new(|| { version }); -pub static VERSION: Lazy = Lazy::new(|| format!("{PKG_NAME} {}", *VERSION_INFO)); - pub static GIT_COMMIT_HASH_SHORT: Lazy> = Lazy::new(|| build::GIT_COMMIT_HASH.map(|s| &s[..8])); From 5ed37782944c47f201472e9b384f2c1adcc7f3da Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:37:45 -0500 Subject: [PATCH 033/306] Remove unused imports in main.rs --- wws/src/main.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/wws/src/main.rs b/wws/src/main.rs index 8171d39513..a1b18f85ae 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -41,8 +41,7 @@ mod route; mod state; mod trace; -use self::config::{load_config, Secrets}; -use self::deepwell::Deepwell; +use self::config::load_config; use self::route::build_router; use self::state::build_server_state; use self::trace::setup_tracing; From c6ec370f370b2859f5d2ad07d3b0444614ce8a13 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:49:58 -0500 Subject: [PATCH 034/306] Rename deepwell secret variable. --- wws/src/config/mod.rs | 4 ++-- wws/src/config/secrets.rs | 6 +++--- wws/src/state.rs | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 2cb96bd773..05f841f1fe 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -64,7 +64,7 @@ pub fn load_config() -> (Config, Secrets) { } // Process secrets - let deepwell_url = get_env!("DEEPWELL_URL"); + let deepwell_host = get_env!("DEEPWELL_HOST"); let redis_url = get_env!("REDIS_URL"); @@ -132,7 +132,7 @@ pub fn load_config() -> (Config, Secrets) { }; let secrets = Secrets { - deepwell_url, + deepwell_host, redis_url, s3_bucket, s3_region, diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index 5dcf1ea4ce..d180d663b1 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -22,10 +22,10 @@ use s3::{creds::Credentials, region::Region}; #[derive(Debug, Clone)] pub struct Secrets { - /// The URL of the DEEPWELL backend server. + /// The hostname of the DEEPWELL backend server. /// - /// Set using environment variable `DEEPWELL_URL`. - pub deepwell_url: String, + /// Set using environment variable `DEEPWELL_HOST`. + pub deepwell_host: String, /// The URL of the Redis cache to connect to. /// diff --git a/wws/src/state.rs b/wws/src/state.rs index 69108013f4..d5fb7f5cb8 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -38,7 +38,7 @@ pub struct ServerStateInner { } pub fn build_server_state(secrets: Secrets) -> Result { - let deepwell = Deepwell::connect(&secrets.deepwell_url)?; + let deepwell = Deepwell::connect(&secrets.deepwell_host)?; let cache = Cache::connect(&secrets.redis_url)?; let s3_bucket = { let mut bucket = Bucket::new( From b20186cc0c66ddbe5b1f615ec642ed4c0cf3726b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:54:18 -0500 Subject: [PATCH 035/306] Add example .env file. --- wws/.env.example | 34 ++++++++++++++++++++++++++++++++++ wws/.gitignore | 3 +++ 2 files changed, 37 insertions(+) create mode 100644 wws/.env.example diff --git a/wws/.env.example b/wws/.env.example new file mode 100644 index 0000000000..d86f510317 --- /dev/null +++ b/wws/.env.example @@ -0,0 +1,34 @@ +# Definition of secrets, passed via environment variable. +# There are no defaults, if an item is missing then the server will not start. +# +# If you're using docker-compose, these are already set in the container as appropriate. + +# DEEPWELL hostname +DEEPWELL_HOST=localhost + +# Redis URL +# Includes password (if needed) to connect. +REDIS_URL=redis://localhost + +# S3 configuration settings +S3_BUCKET=deepwell-files + +# Region, either specify: +S3_AWS_REGION=us-east-2 +# Or a custom region: +# But don't include both. +S3_REGION_NAME=local +S3_CUSTOM_ENDPOINT=http://localhost:9000 + +# If true, use path-style (i.e. http://s3host/bucket/path, e.g. local minio) +# If false, use domain-style (i.e. http://bucket.s3host/path, e.g. AWS S3) +S3_PATH_STYLE=false + +# Credentials, either specify: +S3_ACCESS_KEY_ID= +S3_SECRET_ACCESS_KEY= +# Or the profile name, with secrets located in an AWS profile file. +# But don't include both. +AWS_PROFILE_NAME=wikijump + +# vim: set ft=sh: diff --git a/wws/.gitignore b/wws/.gitignore index 8cbdf2177b..6f920f5c01 100644 --- a/wws/.gitignore +++ b/wws/.gitignore @@ -1,2 +1,5 @@ # Artifacts target/ + +# Secrets +.env From 3dcc8915d17eabf64f5757d8a1391f687b7c19c2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 07:56:59 -0500 Subject: [PATCH 036/306] Add comments to wws main.rs --- wws/src/main.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/wws/src/main.rs b/wws/src/main.rs index a1b18f85ae..7843a89d0f 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -67,11 +67,15 @@ async fn main() -> Result<()> { writeln!(&mut file, "{}", process::id())?; } + // Connect to external services and fetch DEEPWELL data let state = build_server_state(secrets)?; let domains = state.deepwell.domains().await?; + + // Build HTTP server let app = build_router(state, domains); let listener = TcpListener::bind(config.address).await?; + // Begin listening info!( address = str!(config.address), "Listening to connections...", From da574be4c35de5b7dd947252246a82e0b6e859a4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 08:02:52 -0500 Subject: [PATCH 037/306] Remove some more unused imports. --- wws/src/route.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index 803c917568..a2bf467d62 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -24,11 +24,9 @@ use crate::info; use crate::state::ServerState; use axum::{ body::Body, - extract::{FromRequestParts, Path, Request}, - http::{request::Parts, StatusCode}, - response::{Html, IntoResponse, Response}, + extract::Request, routing::{any, get}, - RequestPartsExt, Router, + Router, }; use axum_extra::extract::Host; use http::header::{HeaderName, HeaderValue}; From 871805d6b3857b017b9f83df2947ac23a70dd8f3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 08:27:49 -0500 Subject: [PATCH 038/306] Use premade no_dot variant of domain. --- deepwell/src/services/domain/service.rs | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 78f4deeefb..1b7584db97 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -181,21 +181,11 @@ impl DomainService { /// If this domain is canonical domain, extract the site slug. pub fn parse_canonical<'a>(config: &Config, domain: &'a str) -> Option<&'a str> { let main_domain = &config.main_domain; + let main_domain_no_dot = &config.main_domain_no_dot; // Special case, see if it's the root domain (i.e. 'wikijump.com') - { - // This slice is safe, we know the first character of 'main_domain' - // is always '.', then we compare to the passed domain to see if - // it's the root domain. - // - // We are not slicing 'domain' at all, which is user-provided and - // has no guarantees about character composition. - // - // See config/file.rs prefix_domain() - let root_domain = &main_domain[1..]; - if domain == root_domain { - return Some("www"); - } + if domain == main_domain_no_dot { + return Some("www"); } // Remove the '.wikijump.com' suffix, get slug From 2775e691be746f5149f5f24294b3f4255f293ce9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 16:09:03 -0500 Subject: [PATCH 039/306] Bump deepwell version to v2025.1.3 --- deepwell/Cargo.lock | 2 +- deepwell/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/Cargo.lock b/deepwell/Cargo.lock index 90bb538044..eff725499a 100644 --- a/deepwell/Cargo.lock +++ b/deepwell/Cargo.lock @@ -782,7 +782,7 @@ dependencies = [ [[package]] name = "deepwell" -version = "2024.12.30" +version = "2025.1.3" dependencies = [ "anyhow", "argon2", diff --git a/deepwell/Cargo.toml b/deepwell/Cargo.toml index 1425f7adc6..ec9d308366 100644 --- a/deepwell/Cargo.toml +++ b/deepwell/Cargo.toml @@ -8,7 +8,7 @@ keywords = ["wikijump", "api", "backend", "wiki"] categories = ["asynchronous", "database", "web-programming::http-server"] exclude = [".gitignore", ".editorconfig"] -version = "2024.12.30" +version = "2025.1.3" authors = ["Emmie Smith "] edition = "2021" From bcebad1a783e5ee81651a4f584892129da406793 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 16:09:33 -0500 Subject: [PATCH 040/306] Add initial host processing code. --- wws/src/route.rs | 43 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index a2bf467d62..9c2db57a8f 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -25,6 +25,7 @@ use crate::state::ServerState; use axum::{ body::Body, extract::Request, + response::Redirect, routing::{any, get}, Router, }; @@ -36,11 +37,13 @@ use tower_http::{ normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, }; -pub fn build_router(state: ServerState, info: Domains) -> Router { +pub fn build_router(state: ServerState, Domains { file_domain, file_domain_no_dot, deepwell_version, .. }: Domains) -> Router { // Router that serves framerail + // TODO let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes // Router that serves wjfiles + // TODO let file_router = Router::new() .route( "/local--files/{page_slug}/{filename}", @@ -60,11 +63,39 @@ pub fn build_router(state: ServerState, info: Domains) -> Router { let app = Router::new().route( "/{*path}", any(|Host(hostname): Host, request: Request| async move { - // TODO - match hostname.as_str() { - "api.mydomain.com" => file_router.oneshot(request).await, - _ => main_router.oneshot(request).await, + // Determine if it's a files domain. + if let Some(site_slug) = hostname.strip_suffix(&file_domain) { + // TODO + println!("DEBUG files (site {site_slug})"); + return file_router.oneshot(request).await; } + + // Next, check if it's the files domain by itself. + // + // This is weird, wjfiles should always a site slug subdomain, + // so in this case we just XXX + if hostname == file_domain_no_dot { + // TODO + println!("DEBUG files no site"); + return todo!(); + } + + // If it's anything else, it is a canonical domain or a custom domain. + // In either case, it goes to framerail as-is. + // + // NOTE: Do not include code to massage requests to the framerail web server. + // We shouldn't spread around logic throughout the stack since this makes + // debugging and later maintenance and development more difficult. + // + // If you need to adjust web server processing in general, modify framerail. + // + // If you need to adjust how custom domains work or how site information + // is fetched from the database, modify DomainService in DEEPWELL. + // + // The only exception are the fixed redirects which would be + // included in an nginx configuration or used for wjfiles + // compatibility. See the definition of main_router above. + main_router.oneshot(request).await }), ); @@ -95,7 +126,7 @@ pub fn build_router(state: ServerState, info: Domains) -> Router { )) .layer(SetResponseHeaderLayer::overriding( HeaderName::from_static("x-wikijump-deepwell-ver"), - Some(header_value!(&info.deepwell_version)), + Some(header_value!(&deepwell_version)), )); app From 42e6c6dffc317e056e4c619d934f9ca5ca213d23 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 16:15:45 -0500 Subject: [PATCH 041/306] Suppress unused warning for ping. --- wws/src/deepwell.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index a652b09645..c58849e9bc 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -41,6 +41,7 @@ impl Deepwell { Ok(Deepwell { client }) } + #[allow(dead_code)] // We currently don't have a direct place to use this pub async fn ping(&self) -> Result<()> { let response: String = self.client.request("ping", rpc_params![]).await?; assert!(!response.is_empty()); From d614e546ead0a48047e8e46fdaf9c328b83dc688 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 16:37:45 -0500 Subject: [PATCH 042/306] Move domains struct to ServerState. --- wws/src/main.rs | 9 +++------ wws/src/route.rs | 19 +++++++++++++++---- wws/src/state.rs | 7 +++++-- 3 files changed, 23 insertions(+), 12 deletions(-) diff --git a/wws/src/main.rs b/wws/src/main.rs index 7843a89d0f..ae118bfb0c 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -67,12 +67,9 @@ async fn main() -> Result<()> { writeln!(&mut file, "{}", process::id())?; } - // Connect to external services and fetch DEEPWELL data - let state = build_server_state(secrets)?; - let domains = state.deepwell.domains().await?; - - // Build HTTP server - let app = build_router(state, domains); + // Connect to services, build server state and then run + let state = build_server_state(secrets).await?; + let app = build_router(state); let listener = TcpListener::bind(config.address).await?; // Begin listening diff --git a/wws/src/route.rs b/wws/src/route.rs index 9c2db57a8f..d301a2a0d0 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -31,13 +31,17 @@ use axum::{ }; use axum_extra::extract::Host; use http::header::{HeaderName, HeaderValue}; +use std::sync::Arc; use tower::util::ServiceExt; use tower_http::{ add_extension::AddExtensionLayer, compression::CompressionLayer, normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, }; -pub fn build_router(state: ServerState, Domains { file_domain, file_domain_no_dot, deepwell_version, .. }: Domains) -> Router { +pub fn build_router(state: ServerState) -> Router { + let host_state = Arc::clone(&state); + let header_state = Arc::clone(&state); + // Router that serves framerail // TODO let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes @@ -60,11 +64,18 @@ pub fn build_router(state: ServerState, Domains { file_domain, file_domain_no_do .route("/-/html/{page_slug}/{hash}", get(handle_hello_world)) .route("/{*path}", get(handle_hello_world)); + // Domain delegation logic let app = Router::new().route( "/{*path}", any(|Host(hostname): Host, request: Request| async move { + let Domains { + ref file_domain, + ref file_domain_no_dot, + .. + } = host_state.domains; + // Determine if it's a files domain. - if let Some(site_slug) = hostname.strip_suffix(&file_domain) { + if let Some(site_slug) = hostname.strip_suffix(file_domain) { // TODO println!("DEBUG files (site {site_slug})"); return file_router.oneshot(request).await; @@ -74,7 +85,7 @@ pub fn build_router(state: ServerState, Domains { file_domain, file_domain_no_do // // This is weird, wjfiles should always a site slug subdomain, // so in this case we just XXX - if hostname == file_domain_no_dot { + if &hostname == file_domain_no_dot { // TODO println!("DEBUG files no site"); return todo!(); @@ -126,7 +137,7 @@ pub fn build_router(state: ServerState, Domains { file_domain, file_domain_no_do )) .layer(SetResponseHeaderLayer::overriding( HeaderName::from_static("x-wikijump-deepwell-ver"), - Some(header_value!(&deepwell_version)), + Some(header_value!(&header_state.domains.deepwell_version)), )); app diff --git a/wws/src/state.rs b/wws/src/state.rs index d5fb7f5cb8..fccf49dbe8 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -20,7 +20,7 @@ use crate::cache::Cache; use crate::config::Secrets; -use crate::deepwell::Deepwell; +use crate::deepwell::{Deepwell, Domains}; use anyhow::Result; use s3::bucket::Bucket; use std::sync::Arc; @@ -32,13 +32,15 @@ pub type ServerState = Arc; #[derive(Debug)] pub struct ServerStateInner { + pub domains: Domains, pub deepwell: Deepwell, pub cache: Cache, pub s3_bucket: Box, } -pub fn build_server_state(secrets: Secrets) -> Result { +pub async fn build_server_state(secrets: Secrets) -> Result { let deepwell = Deepwell::connect(&secrets.deepwell_host)?; + let domains = deepwell.domains().await?; let cache = Cache::connect(&secrets.redis_url)?; let s3_bucket = { let mut bucket = Bucket::new( @@ -56,6 +58,7 @@ pub fn build_server_state(secrets: Secrets) -> Result { }; Ok(Arc::new(ServerStateInner { + domains, deepwell, cache, s3_bucket, From fce01763323b01074b51c992dfce7c4836bb3df2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 16:48:03 -0500 Subject: [PATCH 043/306] Use URL for DEEPWELL instead of host. We need to specify the port and the client wants a string. --- wws/.env.example | 4 ++-- wws/src/config/mod.rs | 4 ++-- wws/src/config/secrets.rs | 6 +++--- wws/src/state.rs | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/wws/.env.example b/wws/.env.example index d86f510317..e5cce909aa 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -3,8 +3,8 @@ # # If you're using docker-compose, these are already set in the container as appropriate. -# DEEPWELL hostname -DEEPWELL_HOST=localhost +# DEEPWELL URL +DEEPWELL_URL=http://localhost:2747 # Redis URL # Includes password (if needed) to connect. diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 05f841f1fe..2cb96bd773 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -64,7 +64,7 @@ pub fn load_config() -> (Config, Secrets) { } // Process secrets - let deepwell_host = get_env!("DEEPWELL_HOST"); + let deepwell_url = get_env!("DEEPWELL_URL"); let redis_url = get_env!("REDIS_URL"); @@ -132,7 +132,7 @@ pub fn load_config() -> (Config, Secrets) { }; let secrets = Secrets { - deepwell_host, + deepwell_url, redis_url, s3_bucket, s3_region, diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index d180d663b1..fbc8aa77b2 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -22,10 +22,10 @@ use s3::{creds::Credentials, region::Region}; #[derive(Debug, Clone)] pub struct Secrets { - /// The hostname of the DEEPWELL backend server. + /// The URL to the DEEPWELL server to connect to. /// - /// Set using environment variable `DEEPWELL_HOST`. - pub deepwell_host: String, + /// Set using environment variable `DEEPWELL_URL`. + pub deepwell_url: String, /// The URL of the Redis cache to connect to. /// diff --git a/wws/src/state.rs b/wws/src/state.rs index fccf49dbe8..2f3191a22e 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -39,7 +39,7 @@ pub struct ServerStateInner { } pub async fn build_server_state(secrets: Secrets) -> Result { - let deepwell = Deepwell::connect(&secrets.deepwell_host)?; + let deepwell = Deepwell::connect(&secrets.deepwell_url)?; let domains = deepwell.domains().await?; let cache = Cache::connect(&secrets.redis_url)?; let s3_bucket = { From 6ca52e1ffd170496ccc0db8ef4f51b77b6b8cf17 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 17:01:30 -0500 Subject: [PATCH 044/306] Add check for deepwell, improved error message. --- wws/src/deepwell.rs | 6 +++++- wws/src/state.rs | 1 + 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index c58849e9bc..ecbac1fa58 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -41,7 +41,11 @@ impl Deepwell { Ok(Deepwell { client }) } - #[allow(dead_code)] // We currently don't have a direct place to use this + /// Attempt to ping DEEPWELL, panicking if connecting failed. + pub async fn check(&self) { + self.ping().await.expect("Unable to connect to DEEPWELL"); + } + pub async fn ping(&self) -> Result<()> { let response: String = self.client.request("ping", rpc_params![]).await?; assert!(!response.is_empty()); diff --git a/wws/src/state.rs b/wws/src/state.rs index 2f3191a22e..6884d95e46 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -40,6 +40,7 @@ pub struct ServerStateInner { pub async fn build_server_state(secrets: Secrets) -> Result { let deepwell = Deepwell::connect(&secrets.deepwell_url)?; + deepwell.check().await; let domains = deepwell.domains().await?; let cache = Cache::connect(&secrets.redis_url)?; let s3_bucket = { From ad65336b08f79d0682221d669d62a66bfb9bea90 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 23:18:16 -0500 Subject: [PATCH 045/306] Remove newline. --- deepwell/src/config/mod.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/deepwell/src/config/mod.rs b/deepwell/src/config/mod.rs index 95ce22a09e..72fa93e49b 100644 --- a/deepwell/src/config/mod.rs +++ b/deepwell/src/config/mod.rs @@ -41,7 +41,6 @@ impl SetupConfig { run_special_action(); let secrets = Secrets::load(); let config = parse_args(); - SetupConfig { secrets, config } } } From ef11cd287fc73f3e5c1185424a77d7642d32ca8b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 23:23:58 -0500 Subject: [PATCH 046/306] Fix field name from DEEPWELL. --- wws/src/deepwell.rs | 18 +++++++++--------- wws/src/route.rs | 8 ++++---- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index ecbac1fa58..7f71c739a2 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -56,13 +56,13 @@ impl Deepwell { #[derive(Deserialize, Debug)] struct Response { main_domain_no_dot: String, - file_domain_no_dot: String, + files_domain_no_dot: String, deepwell_version: String, } let Response { main_domain_no_dot, - file_domain_no_dot, + files_domain_no_dot, deepwell_version, } = self.client.request("domains", rpc_params![]).await?; @@ -73,16 +73,16 @@ impl Deepwell { let main_domain = format!(".{main_domain_no_dot}"); assert!( - !file_domain_no_dot.starts_with('.'), - "File domain returned from DEEPWELL starts with '.': {file_domain_no_dot:?}", + !files_domain_no_dot.starts_with('.'), + "Files domain returned from DEEPWELL starts with '.': {files_domain_no_dot:?}", ); - let file_domain = format!(".{file_domain_no_dot}"); + let files_domain = format!(".{files_domain_no_dot}"); Ok(Domains { main_domain, main_domain_no_dot, - file_domain, - file_domain_no_dot, + files_domain, + files_domain_no_dot, deepwell_version, }) } @@ -92,7 +92,7 @@ impl Deepwell { pub struct Domains { pub main_domain: String, pub main_domain_no_dot: String, - pub file_domain: String, - pub file_domain_no_dot: String, + pub files_domain: String, + pub files_domain_no_dot: String, pub deepwell_version: String, } diff --git a/wws/src/route.rs b/wws/src/route.rs index d301a2a0d0..577358d9e2 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -69,13 +69,13 @@ pub fn build_router(state: ServerState) -> Router { "/{*path}", any(|Host(hostname): Host, request: Request| async move { let Domains { - ref file_domain, - ref file_domain_no_dot, + ref files_domain, + ref files_domain_no_dot, .. } = host_state.domains; // Determine if it's a files domain. - if let Some(site_slug) = hostname.strip_suffix(file_domain) { + if let Some(site_slug) = hostname.strip_suffix(files_domain) { // TODO println!("DEBUG files (site {site_slug})"); return file_router.oneshot(request).await; @@ -85,7 +85,7 @@ pub fn build_router(state: ServerState) -> Router { // // This is weird, wjfiles should always a site slug subdomain, // so in this case we just XXX - if &hostname == file_domain_no_dot { + if &hostname == files_domain_no_dot { // TODO println!("DEBUG files no site"); return todo!(); From fb736ff63b9aa5e25d22c9cec6202e02bda4652d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 23:29:24 -0500 Subject: [PATCH 047/306] Add log line to show domain information. --- wws/src/deepwell.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 7f71c739a2..3db855eb36 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -78,6 +78,12 @@ impl Deepwell { ); let files_domain = format!(".{files_domain_no_dot}"); + info!( + main_domain = main_domain_no_dot, + files_domain = files_domain_no_dot, + "Got domain information from DEEPWELL {deepwell_version}", + ); + Ok(Domains { main_domain, main_domain_no_dot, From a04d3e6c496e47ea8ecb5b7ef96e3104fbf618ba Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 4 Jan 2025 23:30:35 -0500 Subject: [PATCH 048/306] Add trace line for DEEPWELL ping. --- wws/src/deepwell.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 3db855eb36..022652a742 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -49,6 +49,7 @@ impl Deepwell { pub async fn ping(&self) -> Result<()> { let response: String = self.client.request("ping", rpc_params![]).await?; assert!(!response.is_empty()); + debug!("Successfully pinged DEEPWELL"); Ok(()) } From f25b41facd0d5e55dddc7383d96991591df4d6ba Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 6 Jan 2025 23:42:20 -0500 Subject: [PATCH 049/306] Fix middleware and compilation. --- wws/Cargo.lock | 1 - wws/Cargo.toml | 1 - wws/src/handler/framerail.rs | 44 ++++++++++++ wws/src/handler/misc.rs | 29 ++++++++ wws/src/handler/mod.rs | 13 +++- wws/src/handler/redirect.rs | 37 ++++++++++ wws/src/main.rs | 1 + wws/src/path.rs | 33 +++++++++ wws/src/route.rs | 132 +++++++++++++++++++---------------- 9 files changed, 227 insertions(+), 64 deletions(-) create mode 100644 wws/src/handler/framerail.rs create mode 100644 wws/src/handler/misc.rs create mode 100644 wws/src/handler/redirect.rs create mode 100644 wws/src/path.rs diff --git a/wws/Cargo.lock b/wws/Cargo.lock index a5502bd05d..8db3836afa 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2559,7 +2559,6 @@ dependencies = [ "clap", "color-backtrace", "dotenvy", - "http 1.2.0", "jsonrpsee", "once_cell", "redis", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 5e94c35d45..96b651143e 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -19,7 +19,6 @@ axum-extra = { version = "0.10", features = ["attachment"] } clap = "4" color-backtrace = "0.6" dotenvy = "0.15" -http = "1" jsonrpsee = { version = "0.24", features = ["async-client", "jsonrpsee-http-client"] } once_cell = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs new file mode 100644 index 0000000000..2db57c9b67 --- /dev/null +++ b/wws/src/handler/framerail.rs @@ -0,0 +1,44 @@ +/* + * handler/framerail.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::path::get_path; +use crate::state::ServerState; +use axum::{ + extract::{Request, State}, + http::{status::StatusCode, Uri}, + response::Html, +}; + +pub async fn proxy_framerail( + State(state): State, + mut req: Request, +) -> Html<&'static str> { + // Get path and query + let path = get_path(req.uri()); + + // Create and set framerail URL + let framerail_host = "framerail"; // TODO + let framerail_port = 3000; // TODO + let uri = format!("http://{framerail_host}:{framerail_port}{path}"); + *req.uri_mut() = Uri::try_from(uri).expect("Internal framerail URI is invalid"); + + // TODO + todo!() +} diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs new file mode 100644 index 0000000000..7343b2922c --- /dev/null +++ b/wws/src/handler/misc.rs @@ -0,0 +1,29 @@ +/* + * handler/misc.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use axum::{body::Body, http::status::StatusCode, response::Response}; + +pub async fn handle_teapot() -> Response { + Response::builder() + .status(StatusCode::IM_A_TEAPOT) + .header("Content-Type", "text/html; charset=utf-8") + .body(Body::from("🫖")) + .expect("Unable to convert response data") +} diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index e77ae9800e..0a942b4bd5 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -18,9 +18,18 @@ * along with this program. If not, see . */ -// TODO +mod framerail; +mod misc; +mod redirect; -use axum::response::Html; +pub use self::framerail::*; +pub use self::misc::*; +pub use self::redirect::*; + +use axum::{ + http::status::StatusCode, + response::{Html, Response}, +}; pub async fn handle_hello_world() -> Html<&'static str> { Html("

Hello, World!

") diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs new file mode 100644 index 0000000000..f3ae0fb508 --- /dev/null +++ b/wws/src/handler/redirect.rs @@ -0,0 +1,37 @@ +/* + * handler/redirect.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::path::get_path; +use crate::state::ServerState; +use axum::{ + extract::{Request, State}, + response::Html, +}; +use axum_extra::extract::Host; + +pub async fn redirect_to_files(Host(hostname): Host, req: Request) -> Html<&'static str> { + let path = get_path(req.uri()); + + // xyz.wikijump.com -> xyz.wjfiles.com + // customdomain.com -> xyz.wjfiles.com + + let uri = format!("https://{hostname}{path}"); + todo!() +} diff --git a/wws/src/main.rs b/wws/src/main.rs index ae118bfb0c..c7f46b9ca4 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -37,6 +37,7 @@ mod config; mod deepwell; mod handler; mod info; +mod path; mod route; mod state; mod trace; diff --git a/wws/src/path.rs b/wws/src/path.rs new file mode 100644 index 0000000000..f42b216579 --- /dev/null +++ b/wws/src/path.rs @@ -0,0 +1,33 @@ +/* + * path.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use axum::http::Uri; + +/// Extracts the path and query from a URI. +/// +/// Since `Uri::path_and_query()` returns an `Option`, +/// we need a match statement to get the path if there +/// is no query string portion. +pub fn get_path(uri: &Uri) -> &str { + match uri.path_and_query() { + Some(path_and_query) => path_and_query.as_str(), + None => uri.path(), + } +} diff --git a/wws/src/route.rs b/wws/src/route.rs index 577358d9e2..ee916e0882 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -19,32 +19,49 @@ */ use crate::deepwell::Domains; -use crate::handler::handle_hello_world; +use crate::handler::*; use crate::info; use crate::state::ServerState; use axum::{ body::Body, extract::Request, + http::header::{HeaderName, HeaderValue}, response::Redirect, routing::{any, get}, Router, }; use axum_extra::extract::Host; -use http::header::{HeaderName, HeaderValue}; use std::sync::Arc; use tower::util::ServiceExt; use tower_http::{ - add_extension::AddExtensionLayer, compression::CompressionLayer, - normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, + compression::CompressionLayer, normalize_path::NormalizePathLayer, + set_header::SetResponseHeaderLayer, trace::TraceLayer, }; pub fn build_router(state: ServerState) -> Router { + let main_state = Arc::clone(&state); + let file_state = Arc::clone(&state); let host_state = Arc::clone(&state); let header_state = Arc::clone(&state); + macro_rules! header_value { + ($value:expr) => { + HeaderValue::from_str($value).expect("Version is not a valid header value") + }; + } + // Router that serves framerail // TODO - let main_router = Router::new().route("/_TODO", get(handle_hello_world)); // handle wjfiles routes + let main_router = Router::new() + .route("/local--files/{*rest}", any(redirect_to_files)) + .route("/local--code/{*rest}", any(redirect_to_files)) + .route("/local--html/{*rest}", any(redirect_to_files)) + .route("/-/file/{*rest}", any(redirect_to_files)) + .route("/-/download/{*rest}", any(redirect_to_files)) + .route("/-/code/{*rest}", any(redirect_to_files)) + .route("/-/html/{*rest}", any(redirect_to_files)) + .route("/", any(proxy_framerail)) + .with_state(main_state); // Router that serves wjfiles // TODO @@ -62,61 +79,59 @@ pub fn build_router(state: ServerState) -> Router { ) .route("/-/code/{page_slug}/{index}", get(handle_hello_world)) .route("/-/html/{page_slug}/{hash}", get(handle_hello_world)) - .route("/{*path}", get(handle_hello_world)); - - // Domain delegation logic - let app = Router::new().route( - "/{*path}", - any(|Host(hostname): Host, request: Request| async move { - let Domains { - ref files_domain, - ref files_domain_no_dot, - .. - } = host_state.domains; + .route("/", get(handle_hello_world)) + .with_state(file_state); - // Determine if it's a files domain. - if let Some(site_slug) = hostname.strip_suffix(files_domain) { - // TODO - println!("DEBUG files (site {site_slug})"); - return file_router.oneshot(request).await; - } + Router::new() + // Domain delegation logic + .route( + "/", + any(|Host(hostname): Host, request: Request| async move { + let Domains { + ref files_domain, + ref files_domain_no_dot, + .. + } = host_state.domains; - // Next, check if it's the files domain by itself. - // - // This is weird, wjfiles should always a site slug subdomain, - // so in this case we just XXX - if &hostname == files_domain_no_dot { - // TODO - println!("DEBUG files no site"); - return todo!(); - } + // Determine if it's a files domain. + if let Some(site_slug) = hostname.strip_suffix(files_domain) { + // TODO + println!("DEBUG files (site {site_slug})"); + return file_router.oneshot(request).await; + } - // If it's anything else, it is a canonical domain or a custom domain. - // In either case, it goes to framerail as-is. - // - // NOTE: Do not include code to massage requests to the framerail web server. - // We shouldn't spread around logic throughout the stack since this makes - // debugging and later maintenance and development more difficult. - // - // If you need to adjust web server processing in general, modify framerail. - // - // If you need to adjust how custom domains work or how site information - // is fetched from the database, modify DomainService in DEEPWELL. - // - // The only exception are the fixed redirects which would be - // included in an nginx configuration or used for wjfiles - // compatibility. See the definition of main_router above. - main_router.oneshot(request).await - }), - ); + // Next, check if it's the files domain by itself. + // + // This is weird, wjfiles should always a site slug subdomain, + // so in this case we just XXX + if &hostname == files_domain_no_dot { + // TODO + println!("DEBUG files no site"); + return todo!(); + } - macro_rules! header_value { - ($value:expr) => { - HeaderValue::from_str($value).expect("Version is not a valid header value") - }; - } - - let app = app + // If it's anything else, it is a canonical domain or a custom domain. + // In either case, it goes to framerail as-is. + // + // NOTE: Do not include code to massage requests to the framerail web server. + // We shouldn't spread around logic throughout the stack since this makes + // debugging and later maintenance and development more difficult. + // + // If you need to adjust web server processing in general, modify framerail. + // + // If you need to adjust how custom domains work or how site information + // is fetched from the database, modify DomainService in DEEPWELL. + // + // The only exception are the fixed redirects which would be + // included in an nginx configuration or used for wjfiles + // compatibility. See the definition of main_router above. + println!("DEBUG main {hostname}"); + main_router.oneshot(request).await + }), + ) + // Easter egg + .route("/-/teapot", any(handle_teapot)) + // Middleware .layer(TraceLayer::new_for_http()) .layer(NormalizePathLayer::trim_trailing_slash()) .layer( @@ -126,7 +141,6 @@ pub fn build_router(state: ServerState) -> Router { .br(true) .zstd(true), ) - .layer(AddExtensionLayer::new(state)) .layer(SetResponseHeaderLayer::overriding( HeaderName::from_static("x-wikijump"), Some(HeaderValue::from_static("1")), @@ -138,7 +152,5 @@ pub fn build_router(state: ServerState) -> Router { .layer(SetResponseHeaderLayer::overriding( HeaderName::from_static("x-wikijump-deepwell-ver"), Some(header_value!(&header_state.domains.deepwell_version)), - )); - - app + )) } From e92d1e4f5c83e89cc524a9f33d18ea16e93a940c Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 6 Jan 2025 23:53:21 -0500 Subject: [PATCH 050/306] Add state to redirect_to_files. --- wws/src/handler/redirect.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index f3ae0fb508..738e4889c2 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -26,7 +26,11 @@ use axum::{ }; use axum_extra::extract::Host; -pub async fn redirect_to_files(Host(hostname): Host, req: Request) -> Html<&'static str> { +pub async fn redirect_to_files( + State(state): State, + Host(hostname): Host, + req: Request, +) -> Html<&'static str> { let path = get_path(req.uri()); // xyz.wikijump.com -> xyz.wjfiles.com From 281892c0ef3f8bc9e60165a08eb9ca9717f8b1eb Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 9 Jan 2025 00:32:17 -0500 Subject: [PATCH 051/306] Add assertion for main and files domains. --- wws/src/deepwell.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 022652a742..edd7219262 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -79,6 +79,8 @@ impl Deepwell { ); let files_domain = format!(".{files_domain_no_dot}"); + assert_ne!(main_domain, files_domain, "Cannot set domain for main and files service!"); + info!( main_domain = main_domain_no_dot, files_domain = files_domain_no_dot, From ccea13a0d7497e40f8b887047bdcdeff153c69e0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 9 Jan 2025 00:36:27 -0500 Subject: [PATCH 052/306] Run rustfmt. --- wws/src/deepwell.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index edd7219262..39e87c2088 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -79,7 +79,10 @@ impl Deepwell { ); let files_domain = format!(".{files_domain_no_dot}"); - assert_ne!(main_domain, files_domain, "Cannot set domain for main and files service!"); + assert_ne!( + main_domain, files_domain, + "Cannot set domain for main and files service!", + ); info!( main_domain = main_domain_no_dot, From 910fd51d0046e72069f3ee1e69d9a315ce0f1818 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 9 Jan 2025 00:42:18 -0500 Subject: [PATCH 053/306] Start new domain processing logic. --- wws/src/route.rs | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index ee916e0882..3316d9f1fd 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -88,11 +88,27 @@ pub fn build_router(state: ServerState) -> Router { "/", any(|Host(hostname): Host, request: Request| async move { let Domains { + ref main_domain, + ref main_domain_no_dot, ref files_domain, ref files_domain_no_dot, .. } = host_state.domains; + // Determine if it's the main domain. + if let Some(site_slug) = hostname.strip_suffix(main_domain) { + // TODO + println!("DEBUG main (main {site_slug})"); + return main_router.oneshot(request).await; + } + + // Next, check if it's the main domain by itself. + if &hostname == main_domain_no_dot { + // TODO + println!("DEBUG main (main default)"); + return main_router.oneshot(request).await; + } + // Determine if it's a files domain. if let Some(site_slug) = hostname.strip_suffix(files_domain) { // TODO @@ -103,28 +119,16 @@ pub fn build_router(state: ServerState) -> Router { // Next, check if it's the files domain by itself. // // This is weird, wjfiles should always a site slug subdomain, - // so in this case we just XXX + // so in this case we just temporary redirect to the main domain, + // stripping the path. if &hostname == files_domain_no_dot { // TODO println!("DEBUG files no site"); return todo!(); } - // If it's anything else, it is a canonical domain or a custom domain. - // In either case, it goes to framerail as-is. - // - // NOTE: Do not include code to massage requests to the framerail web server. - // We shouldn't spread around logic throughout the stack since this makes - // debugging and later maintenance and development more difficult. - // - // If you need to adjust web server processing in general, modify framerail. - // - // If you need to adjust how custom domains work or how site information - // is fetched from the database, modify DomainService in DEEPWELL. - // - // The only exception are the fixed redirects which would be - // included in an nginx configuration or used for wjfiles - // compatibility. See the definition of main_router above. + // If it's anything else, it must be a custom domain. + // Do a lookup, then set the site data as appropriate. println!("DEBUG main {hostname}"); main_router.oneshot(request).await }), From 51faea1a8eab9cd9e6b6f834046d7163e9d502b9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 9 Jan 2025 00:59:04 -0500 Subject: [PATCH 054/306] Reorder hostname checks. --- wws/src/route.rs | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index 3316d9f1fd..d050384987 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -86,7 +86,7 @@ pub fn build_router(state: ServerState) -> Router { // Domain delegation logic .route( "/", - any(|Host(hostname): Host, request: Request| async move { + any(|Host(ref hostname): Host, request: Request| async move { let Domains { ref main_domain, ref main_domain_no_dot, @@ -95,17 +95,17 @@ pub fn build_router(state: ServerState) -> Router { .. } = host_state.domains; - // Determine if it's the main domain. - if let Some(site_slug) = hostname.strip_suffix(main_domain) { + // First, check if it's the main domain by itself. + if hostname = main_domain_no_dot { // TODO - println!("DEBUG main (main {site_slug})"); + println!("DEBUG main default"); return main_router.oneshot(request).await; } - // Next, check if it's the main domain by itself. - if &hostname == main_domain_no_dot { + // Determine if it's the main domain. + if let Some(site_slug) = hostname.strip_suffix(main_domain) { // TODO - println!("DEBUG main (main default)"); + println!("DEBUG main ({site_slug})"); return main_router.oneshot(request).await; } @@ -116,15 +116,18 @@ pub fn build_router(state: ServerState) -> Router { return file_router.oneshot(request).await; } - // Next, check if it's the files domain by itself. + // Finally, check if it's the files domain by itself. // // This is weird, wjfiles should always a site slug subdomain, // so in this case we just temporary redirect to the main domain, // stripping the path. - if &hostname == files_domain_no_dot { + // + // Since this is expected to be uncommon, we're putting it after + // the site files check. + if hostname = files_domain_no_dot { // TODO - println!("DEBUG files no site"); - return todo!(); + println!("DEBUG files default"); + return file_router.oneshot(request).await; } // If it's anything else, it must be a custom domain. From 4a847ee26926f920e67e8510e7f3bf4076985267 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 9 Jan 2025 01:00:45 -0500 Subject: [PATCH 055/306] Use if / else if chain. --- wws/src/route.rs | 58 ++++++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 32 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index d050384987..f14bc7f0f2 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -86,7 +86,7 @@ pub fn build_router(state: ServerState) -> Router { // Domain delegation logic .route( "/", - any(|Host(ref hostname): Host, request: Request| async move { + any(|Host(hostname): Host, request: Request| async move { let Domains { ref main_domain, ref main_domain_no_dot, @@ -95,45 +95,39 @@ pub fn build_router(state: ServerState) -> Router { .. } = host_state.domains; - // First, check if it's the main domain by itself. - if hostname = main_domain_no_dot { - // TODO + if &hostname == main_domain_no_dot { + // First, check if it's the main domain by itself. println!("DEBUG main default"); - return main_router.oneshot(request).await; - } - - // Determine if it's the main domain. - if let Some(site_slug) = hostname.strip_suffix(main_domain) { // TODO + main_router.oneshot(request).await + } else if let Some(site_slug) = hostname.strip_suffix(main_domain) { + // Determine if it's the main domain. println!("DEBUG main ({site_slug})"); - return main_router.oneshot(request).await; - } - - // Determine if it's a files domain. - if let Some(site_slug) = hostname.strip_suffix(files_domain) { // TODO + main_router.oneshot(request).await + } else if let Some(site_slug) = hostname.strip_suffix(files_domain) { + // Determine if it's a files domain. println!("DEBUG files (site {site_slug})"); - return file_router.oneshot(request).await; - } - - // Finally, check if it's the files domain by itself. - // - // This is weird, wjfiles should always a site slug subdomain, - // so in this case we just temporary redirect to the main domain, - // stripping the path. - // - // Since this is expected to be uncommon, we're putting it after - // the site files check. - if hostname = files_domain_no_dot { // TODO + file_router.oneshot(request).await + } else if &hostname == files_domain_no_dot { + // Finally, check if it's the files domain by itself. + // + // This is weird, wjfiles should always a site slug subdomain, + // so in this case we just temporary redirect to the main domain, + // stripping the path. + // + // Since this is expected to be uncommon, we're putting it after + // the site files check. println!("DEBUG files default"); - return file_router.oneshot(request).await; + // TODO + file_router.oneshot(request).await + } else { + // If it's anything else, it must be a custom domain. + // Do a lookup, then set the site data as appropriate. + println!("DEBUG main {hostname}"); + main_router.oneshot(request).await } - - // If it's anything else, it must be a custom domain. - // Do a lookup, then set the site data as appropriate. - println!("DEBUG main {hostname}"); - main_router.oneshot(request).await }), ) // Easter egg From b8fbacc20c88ccbf85f37f74363e89e08c25302a Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 15 Jan 2025 01:45:58 -0500 Subject: [PATCH 056/306] Start implementation of data fetching and caching. --- wws/src/cache.rs | 35 +++++++++++++++++++++++++++++++++++ wws/src/deepwell.rs | 34 ++++++++++++++++++++++++++++++++++ wws/src/state.rs | 36 +++++++++++++++++++++++++++++++++++- 3 files changed, 104 insertions(+), 1 deletion(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 5bad99208d..49dc737f4d 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -19,6 +19,7 @@ */ use anyhow::Result; +use redis::Commands; #[derive(Debug)] pub struct Cache { @@ -26,8 +27,42 @@ pub struct Cache { } impl Cache { + /// Connect to the Redis cluster. pub fn connect(redis_url: &str) -> Result { let client = redis::Client::open(redis_url)?; Ok(Cache { client }) } + + /// Retrieve the site ID from the slug from the cache. + pub fn get_site_slug(&self, site_slug: &str) -> Result> { + let mut conn = self.client.get_connection()?; + let key = format!("site_slug:{site_slug}"); + let value = conn.hget(key, "id")?; + Ok(value) + } + + /// Set the site ID for a site slug. + pub fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { + let mut conn = self.client.get_connection()?; + let key = format!("site_slug:{site_slug}"); + conn.hset(key, "id", site_id)?; + Ok(()) + } + + /// Retrieve the site slug and ID from a custom domain from the cache. + pub fn get_site_domain(&self, domain: &str) -> Result> { + let mut conn = self.client.get_connection()?; + let key = format!("site_domain:{domain}"); + let value = conn.hget(key, &["id", "slug"])?; + Ok(value) + } + + /// Set the site slug and ID for a custom domain. + pub fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { + let mut conn = self.client.get_connection()?; + let key = format!("site_domain:{domain}"); + conn.hset(&key, "id", site_id)?; + conn.hset(&key, "slug", site_slug)?; + Ok(()) + } } diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 39e87c2088..9886df3a5e 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -26,6 +26,23 @@ use std::time::Duration; const JSONRPC_MAX_REQUEST: u32 = 16 * 1024; const JSONRPC_TIMEOUT: Duration = Duration::from_millis(200); +/// Macro to create `ObjectParams` instances. +/// This is the object equivalent to `rpc_params!`, which creates `ArrayParams` instances. +macro_rules! rpc_object { + ($($key:expr => $value:expr,)+) => { rpc_object!($($key => $value),+) }; + ($($key:expr => $value:expr),*) => {{ + use jsonrpsee::core::params::ObjectParams; + + let mut params = ObjectParams::new(); + $( + if let Err(error) = params.insert($key, $value) { + panic!("Parameter `{}` cannot be serialized: {:?}", stringify!($), error); + } + )* + params + }}; +} + #[derive(Debug)] pub struct Deepwell { client: HttpClient, @@ -98,6 +115,15 @@ impl Deepwell { deepwell_version, }) } + + pub async fn get_site_from_slug(&self, slug: &str) -> Result { + let response: SiteData = self.client.request("site_get", rpc_object! {}).await?; + Ok(response) + } + + pub async fn get_site_from_domain(&self, domain: &str) -> Result { + todo!() + } } #[derive(Debug, Clone)] @@ -108,3 +134,11 @@ pub struct Domains { pub files_domain_no_dot: String, pub deepwell_version: String, } + +#[derive(Deserialize, Debug, Clone)] +pub struct SiteData { + pub site_id: i64, + pub slug: String, + pub name: String, + pub custom_domain: Option, +} diff --git a/wws/src/state.rs b/wws/src/state.rs index 6884d95e46..b08a574b3f 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -20,7 +20,7 @@ use crate::cache::Cache; use crate::config::Secrets; -use crate::deepwell::{Deepwell, Domains}; +use crate::deepwell::{Deepwell, Domains, SiteData}; use anyhow::Result; use s3::bucket::Bucket; use std::sync::Arc; @@ -65,3 +65,37 @@ pub async fn build_server_state(secrets: Secrets) -> Result { s3_bucket, })) } + +impl ServerStateInner { + // Contains implementations for the common pattern of "check the cache, + // if not present, get it from DEEPWELL and populate it". + + pub async fn get_site_slug(&self, site_slug: &str) -> Result { + match self.cache.get_site_slug(site_slug)? { + Some(site_id) => Ok(site_id), + None => { + let SiteData { site_id, .. } = self.deepwell.get_site_from_slug(site_slug).await?; + self.cache.set_site_slug(site_slug, site_id)?; + Ok(site_id) + } + } + } + + pub async fn get_site_domain(&self, site_domain: &str) -> Result<(i64, String)> { + match self.cache.get_site_domain(site_domain)? { + Some((site_id, site_slug)) => Ok((site_id, site_slug)), + None => { + let SiteData { + site_id, + slug: site_slug, + .. + } = self.deepwell.get_site_from_domain(site_domain).await?; + + self.cache + .set_site_domain(site_domain, site_id, &site_slug)?; + + Ok((site_id, site_slug)) + } + } + } +} From c479bbeb69cc41ff6fcafe31f56575f6db89af51 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 15 Jan 2025 01:49:14 -0500 Subject: [PATCH 057/306] Specify return type to address warnings. --- wws/src/cache.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 49dc737f4d..f875f3d400 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -45,7 +45,7 @@ impl Cache { pub fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { let mut conn = self.client.get_connection()?; let key = format!("site_slug:{site_slug}"); - conn.hset(key, "id", site_id)?; + conn.hset::<_, _, _, ()>(key, "id", site_id)?; Ok(()) } @@ -61,8 +61,8 @@ impl Cache { pub fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { let mut conn = self.client.get_connection()?; let key = format!("site_domain:{domain}"); - conn.hset(&key, "id", site_id)?; - conn.hset(&key, "slug", site_slug)?; + conn.hset::<_, _, _, ()>(&key, "id", site_id)?; + conn.hset::<_, _, _, ()>(&key, "slug", site_slug)?; Ok(()) } } From ecd5187fed8f2ce81ecc714af645b383a66a4e96 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 15 Jan 2025 18:14:10 -0500 Subject: [PATCH 058/306] Change return type to optional. --- wws/src/deepwell.rs | 21 ++++++++++++++++----- wws/src/state.rs | 37 ++++++++++++++++++++----------------- 2 files changed, 36 insertions(+), 22 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 9886df3a5e..0ef389afad 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -116,13 +116,24 @@ impl Deepwell { }) } - pub async fn get_site_from_slug(&self, slug: &str) -> Result { - let response: SiteData = self.client.request("site_get", rpc_object! {}).await?; - Ok(response) + pub async fn get_site_from_slug(&self, slug: &str) -> Result> { + let response: SiteData = self + .client + .request("site_get", rpc_object! { "site" => slug }) + .await?; + + // TODO handle missing site + + Ok(Some(response)) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result { - todo!() + pub async fn get_site_from_domain(&self, domain: &str) -> Result> { + let response: Option = self + .client + .request("site_from_domain", rpc_params![domain]) + .await?; + + Ok(response) } } diff --git a/wws/src/state.rs b/wws/src/state.rs index b08a574b3f..89fa50fb91 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -70,32 +70,35 @@ impl ServerStateInner { // Contains implementations for the common pattern of "check the cache, // if not present, get it from DEEPWELL and populate it". - pub async fn get_site_slug(&self, site_slug: &str) -> Result { + pub async fn get_site_slug(&self, site_slug: &str) -> Result> { match self.cache.get_site_slug(site_slug)? { - Some(site_id) => Ok(site_id), - None => { - let SiteData { site_id, .. } = self.deepwell.get_site_from_slug(site_slug).await?; - self.cache.set_site_slug(site_slug, site_id)?; - Ok(site_id) - } + Some(site_id) => Ok(Some(site_id)), + None => match self.deepwell.get_site_from_slug(site_slug).await? { + None => Ok(None), + Some(SiteData { site_id, .. }) => { + self.cache.set_site_slug(site_slug, site_id)?; + Ok(Some(site_id)) + } + }, } } - pub async fn get_site_domain(&self, site_domain: &str) -> Result<(i64, String)> { + pub async fn get_site_domain(&self, site_domain: &str) -> Result> { match self.cache.get_site_domain(site_domain)? { - Some((site_id, site_slug)) => Ok((site_id, site_slug)), - None => { - let SiteData { + Some((site_id, site_slug)) => Ok(Some((site_id, site_slug))), + None => match self.deepwell.get_site_from_domain(site_domain).await? { + None => Ok(None), + Some(SiteData { site_id, slug: site_slug, .. - } = self.deepwell.get_site_from_domain(site_domain).await?; + }) => { + self.cache + .set_site_domain(site_domain, site_id, &site_slug)?; - self.cache - .set_site_domain(site_domain, site_id, &site_slug)?; - - Ok((site_id, site_slug)) - } + Ok(Some((site_id, site_slug))) + } + }, } } } From fd75f8ab1f95099f40f72c11863d5d69176a1a74 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 15 Jan 2025 21:21:48 -0500 Subject: [PATCH 059/306] Create error wrapper type. --- wws/Cargo.lock | 37 +++++++++++++++++++++++++++++-------- wws/Cargo.toml | 1 + wws/src/cache.rs | 2 +- wws/src/deepwell.rs | 2 +- wws/src/error.rs | 41 +++++++++++++++++++++++++++++++++++++++++ wws/src/main.rs | 2 ++ 6 files changed, 75 insertions(+), 10 deletions(-) create mode 100644 wws/src/error.rs diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 8db3836afa..aa29519bc8 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -168,7 +168,7 @@ dependencies = [ "quick-xml", "rust-ini", "serde", - "thiserror", + "thiserror 1.0.69", "time", "url", ] @@ -179,7 +179,7 @@ version = "0.25.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9aed3f9c7eac9be28662fdb3b0f4d1951e812f7c64fed4f0327ba702f459b3b" dependencies = [ - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1102,7 +1102,7 @@ dependencies = [ "combine", "jni-sys", "log", - "thiserror", + "thiserror 1.0.69", "walkdir", ] @@ -1149,7 +1149,7 @@ dependencies = [ "rustc-hash", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", "tracing", @@ -1173,7 +1173,7 @@ dependencies = [ "rustls-platform-verifier", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tower 0.4.13", "tracing", @@ -1189,7 +1189,7 @@ dependencies = [ "http 1.2.0", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -1636,7 +1636,7 @@ dependencies = [ "serde_derive", "serde_json", "sha2", - "thiserror", + "thiserror 1.0.69", "time", "tokio", "tokio-rustls 0.24.1", @@ -2020,7 +2020,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", ] [[package]] @@ -2034,6 +2043,17 @@ dependencies = [ "syn", ] +[[package]] +name = "thiserror-impl" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "thread_local" version = "1.1.8" @@ -2566,6 +2586,7 @@ dependencies = [ "rust-s3", "serde", "str-macro", + "thiserror 2.0.11", "tokio", "tower 0.5.2", "tower-http", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 96b651143e..c16a6151ec 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -26,6 +26,7 @@ ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } serde = { version = "1", features = ["derive"] } str-macro = "1" +thiserror = "2" tokio = { version = "1", features = ["macros", "rt-multi-thread"] } tower = "0.5" tower-http = { version = "0.6.1", features = ["add-extension", "compression-br", "compression-deflate", "compression-gzip", "compression-zstd", "normalize-path", "set-header", "trace"] } diff --git a/wws/src/cache.rs b/wws/src/cache.rs index f875f3d400..5f2cedcd4c 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -use anyhow::Result; +use crate::error::Result; use redis::Commands; #[derive(Debug)] diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 0ef389afad..ef8f0c63fa 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -use anyhow::Result; +use crate::error::Result; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; use serde::Deserialize; use std::time::Duration; diff --git a/wws/src/error.rs b/wws/src/error.rs new file mode 100644 index 0000000000..e84e83cead --- /dev/null +++ b/wws/src/error.rs @@ -0,0 +1,41 @@ +/* + * error.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use jsonrpsee::core::ClientError; +use std::io; +use thiserror::Error as ThisError; + +pub use std::error::Error as StdError; + +pub type StdResult = std::result::Result; +pub type Result = StdResult; + +/// Wrapper error for possible upstream errors. +#[derive(ThisError, Debug)] +pub enum Error { + #[error("DEEPWELL API error: {0}")] + Deepwell(#[from] ClientError), + + #[error("Redis error: {0}")] + Redis(#[from] redis::RedisError), + + #[error("I/O error: {0}")] + Io(#[from] io::Error), +} diff --git a/wws/src/main.rs b/wws/src/main.rs index c7f46b9ca4..aad03198eb 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -35,6 +35,7 @@ mod macros; mod cache; mod config; mod deepwell; +mod error; mod handler; mod info; mod path; @@ -78,6 +79,7 @@ async fn main() -> Result<()> { address = str!(config.address), "Listening to connections...", ); + axum::serve(listener, app).await?; Ok(()) } From 47a3c5c9e1daa86249dadbfd972cba3ba17688b9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Wed, 15 Jan 2025 21:37:19 -0500 Subject: [PATCH 060/306] Add state to router. --- wws/src/route.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/route.rs b/wws/src/route.rs index f14bc7f0f2..2fd0dbce69 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -154,4 +154,5 @@ pub fn build_router(state: ServerState) -> Router { HeaderName::from_static("x-wikijump-deepwell-ver"), Some(header_value!(&header_state.domains.deepwell_version)), )) + .with_state(state) } From af3813b3c66eed649d2a6d53817a28cd15c05a40 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 00:28:26 -0500 Subject: [PATCH 061/306] Move out host-processing logic to separate module. --- wws/src/error.rs | 4 ++ wws/src/host.rs | 169 +++++++++++++++++++++++++++++++++++++++++++++++ wws/src/main.rs | 1 + wws/src/route.rs | 77 +++++++++------------ wws/src/state.rs | 10 +-- 5 files changed, 210 insertions(+), 51 deletions(-) create mode 100644 wws/src/host.rs diff --git a/wws/src/error.rs b/wws/src/error.rs index e84e83cead..9c05dc1397 100644 --- a/wws/src/error.rs +++ b/wws/src/error.rs @@ -19,6 +19,7 @@ */ use jsonrpsee::core::ClientError; +use s3::error::S3Error; use std::io; use thiserror::Error as ThisError; @@ -36,6 +37,9 @@ pub enum Error { #[error("Redis error: {0}")] Redis(#[from] redis::RedisError), + #[error("S3 service returned error: {0}")] + S3(#[from] S3Error), + #[error("I/O error: {0}")] Io(#[from] io::Error), } diff --git a/wws/src/host.rs b/wws/src/host.rs new file mode 100644 index 0000000000..4761fd936d --- /dev/null +++ b/wws/src/host.rs @@ -0,0 +1,169 @@ +/* + * host.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::{deepwell::Domains, error::Result, state::ServerState}; + +/// The slug for the default site. +/// +/// This refers to the site displayed when you visit `wikijump.com` +/// with no subdomain component. +const DEFAULT_SITE_SLUG: &str = "www"; + +#[derive(Debug)] +pub enum SiteAndHost<'a> { + Main { site_id: i64, site_slug: &'a str }, + MainMissing { site_slug: &'a str }, + MainCustom { site_id: i64, site_slug: String }, + MainCustomMissing, + DefaultRedirect, + File { site_id: i64, site_slug: &'a str }, + FileMissing { site_slug: &'a str }, + FileRoot, +} + +pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result> { + let Domains { + ref main_domain, + ref main_domain_no_dot, + ref files_domain, + ref files_domain_no_dot, + .. + } = state.domains; + + if &hostname == main_domain_no_dot { + // First, check if it's the default domain by itself. + main_site_slug(state, hostname, None).await + } else if let Some(site_slug) = hostname.strip_suffix(main_domain) { + // Determine if it's the main domain. + + let site_id = state.get_site_slug(site_slug).await?; + + if site_slug == DEFAULT_SITE_SLUG { + // We should be redirecting to the non-www version of the link + return Ok(SiteAndHost::DefaultRedirect); + } + + main_site_slug(state, hostname, Some(site_slug)).await + } else if let Some(site_slug) = hostname.strip_suffix(files_domain) { + // Determine if it's a files domain. + let site_id = state.get_site_slug(site_slug).await?; + match site_id { + Some(site_id) => { + // Site exists + info!( + domain = hostname, + site_slug = site_slug, + site_id = site_id, + "Routing files site request", + ); + + Ok(SiteAndHost::File { site_id, site_slug }) + } + None => { + // No such site + warn!( + domain = hostname, + site_slug = site_slug, + "No such site with slug (files)", + ); + + Ok(SiteAndHost::FileMissing { site_slug }) + } + } + } else if &hostname == files_domain_no_dot { + // Finally, check if it's the files domain by itself. + // + // This is weird, wjfiles should always a site slug subdomain, + // so in this case we just temporary redirect to the main domain, + // stripping the path. + // + // Since this is expected to be uncommon, we're putting it after + // the site files check. + + info!(domain = hostname, "Handling lone files site request",); + + Ok(SiteAndHost::FileRoot) + } else { + // If it's anything else, it must be a custom domain. + // Do a lookup, then set the site data as appropriate. + + match state.get_site_domain(&hostname).await? { + Some((site_id, site_slug)) => { + // Site exists + info!( + domain = hostname, + site_id = site_id, + "Routing main site request (custom)", + ); + + Ok(SiteAndHost::MainCustom { site_id, site_slug }) + } + None => { + // No such site + warn!(domain = hostname, "No such site with slug (custom)"); + Ok(SiteAndHost::MainCustomMissing) + } + } + } +} + +/// Process a request from `[site-slug].wikijump.com`. +/// +/// Because `wikijump.com` (default) and specifying a slug +/// have essentially the same code paths, we avoid code +/// duplication by using this helper function. +async fn main_site_slug<'a>( + state: &ServerState, + hostname: &str, + site_slug: Option<&'a str>, +) -> Result> { + // This is our way of passing in "is default site" or not. + // If it's None, it's 'wikijump.com', if it's Some(_), it's 'xxx.wikijump.com'. + let (site_slug, is_default) = match site_slug { + Some(site_slug) => (site_slug, false), + None => (DEFAULT_SITE_SLUG, true), + }; + + // Return site present or missing response based on site ID. + let site_id = state.get_site_slug(site_slug).await?; + match site_id { + Some(site_id) => { + // Site exists + info!( + domain = hostname, + site_id = site_id, + "Routing main site request ({})", + if is_default { "default" } else { "slug" }, + ); + + Ok(SiteAndHost::Main { site_id, site_slug }) + } + None => { + // No such site + warn!( + domain = hostname, + site_slug = site_slug, + "No such site with slug (main)", + ); + + Ok(SiteAndHost::MainMissing { site_slug }) + } + } +} diff --git a/wws/src/main.rs b/wws/src/main.rs index aad03198eb..ca7651ff97 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -37,6 +37,7 @@ mod config; mod deepwell; mod error; mod handler; +mod host; mod info; mod path; mod route; diff --git a/wws/src/route.rs b/wws/src/route.rs index 2fd0dbce69..ad596a79bb 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -18,13 +18,15 @@ * along with this program. If not, see . */ -use crate::deepwell::Domains; -use crate::handler::*; -use crate::info; -use crate::state::ServerState; +use crate::{ + handler::*, + host::{lookup_host, SiteAndHost}, + info, + state::ServerState, +}; use axum::{ body::Body, - extract::Request, + extract::{Request, State}, http::header::{HeaderName, HeaderValue}, response::Redirect, routing::{any, get}, @@ -86,49 +88,30 @@ pub fn build_router(state: ServerState) -> Router { // Domain delegation logic .route( "/", - any(|Host(hostname): Host, request: Request| async move { - let Domains { - ref main_domain, - ref main_domain_no_dot, - ref files_domain, - ref files_domain_no_dot, - .. - } = host_state.domains; + any( + |State(state): State, + Host(hostname): Host, + mut request: Request| async { + { + let mut headers = request.headers_mut(); + + // Strip internal headers, just to be safe. + headers.remove("x-wikijump-site-slug"); + headers.remove("x-wikijump-site-id"); + headers.remove("x-wikijump-domain"); + + /* + // Also add the domain header since that is the same before lookup_host() + headers.insert("x-wikijump-domain", &hostname); + */ + } - if &hostname == main_domain_no_dot { - // First, check if it's the main domain by itself. - println!("DEBUG main default"); - // TODO - main_router.oneshot(request).await - } else if let Some(site_slug) = hostname.strip_suffix(main_domain) { - // Determine if it's the main domain. - println!("DEBUG main ({site_slug})"); - // TODO - main_router.oneshot(request).await - } else if let Some(site_slug) = hostname.strip_suffix(files_domain) { - // Determine if it's a files domain. - println!("DEBUG files (site {site_slug})"); - // TODO - file_router.oneshot(request).await - } else if &hostname == files_domain_no_dot { - // Finally, check if it's the files domain by itself. - // - // This is weird, wjfiles should always a site slug subdomain, - // so in this case we just temporary redirect to the main domain, - // stripping the path. - // - // Since this is expected to be uncommon, we're putting it after - // the site files check. - println!("DEBUG files default"); - // TODO - file_router.oneshot(request).await - } else { - // If it's anything else, it must be a custom domain. - // Do a lookup, then set the site data as appropriate. - println!("DEBUG main {hostname}"); - main_router.oneshot(request).await - } - }), + match lookup_host(&state, &hostname).await { + // TODO + _ => todo!(), + } + }, + ), ) // Easter egg .route("/-/teapot", any(handle_teapot)) diff --git a/wws/src/state.rs b/wws/src/state.rs index 89fa50fb91..c32eca56dd 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -18,10 +18,12 @@ * along with this program. If not, see . */ -use crate::cache::Cache; -use crate::config::Secrets; -use crate::deepwell::{Deepwell, Domains, SiteData}; -use anyhow::Result; +use crate::{ + cache::Cache, + config::Secrets, + deepwell::{Deepwell, Domains, SiteData}, + error::Result, +}; use s3::bucket::Bucket; use std::sync::Arc; use std::time::Duration; From fc9cc19a13a72ae159ee817a7fd0adb5e2bc38db Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 00:47:54 -0500 Subject: [PATCH 062/306] Fix match arm formatting. --- wws/src/host.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/wws/src/host.rs b/wws/src/host.rs index 4761fd936d..dabb3ac5e2 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -96,9 +96,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result Date: Thu, 16 Jan 2025 01:07:01 -0500 Subject: [PATCH 063/306] Remove unused variable. --- wws/src/host.rs | 4 ---- 1 file changed, 4 deletions(-) diff --git a/wws/src/host.rs b/wws/src/host.rs index dabb3ac5e2..e55272e9f7 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -51,10 +51,6 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result Date: Thu, 16 Jan 2025 01:32:35 -0500 Subject: [PATCH 064/306] Add logic to handle errors and missing sites. --- wws/src/deepwell.rs | 23 +++++++++----- wws/src/route.rs | 74 ++++++++++++++++++++++++++++++++++++++++----- 2 files changed, 83 insertions(+), 14 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index ef8f0c63fa..6c82f427dd 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -use crate::error::Result; +use crate::error::{Error, Result}; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; use serde::Deserialize; use std::time::Duration; @@ -117,23 +117,32 @@ impl Deepwell { } pub async fn get_site_from_slug(&self, slug: &str) -> Result> { - let response: SiteData = self + use jsonrpsee::core::ClientError; + + let result = self .client .request("site_get", rpc_object! { "site" => slug }) - .await?; + .await; - // TODO handle missing site + match result { + // Site data found + Ok(site_data) => Ok(Some(site_data)), - Ok(Some(response)) + // SiteNotFound error case + Err(ClientError::Call(error)) if error.code() == 2004 => Ok(None), + + // For any other error, forward + Err(error) => Err(Error::Deepwell(error)), + } } pub async fn get_site_from_domain(&self, domain: &str) -> Result> { - let response: Option = self + let site_data: Option = self .client .request("site_from_domain", rpc_params![domain]) .await?; - Ok(response) + Ok(site_data) } } diff --git a/wws/src/route.rs b/wws/src/route.rs index ad596a79bb..6f4557e0fc 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -22,18 +22,19 @@ use crate::{ handler::*, host::{lookup_host, SiteAndHost}, info, + path::get_path, state::ServerState, }; use axum::{ body::Body, extract::{Request, State}, http::header::{HeaderName, HeaderValue}, - response::Redirect, + response::{IntoResponse, Redirect}, routing::{any, get}, Router, }; use axum_extra::extract::Host; -use std::sync::Arc; +use std::{convert::Infallible, sync::Arc}; use tower::util::ServiceExt; use tower_http::{ compression::CompressionLayer, normalize_path::NormalizePathLayer, @@ -67,7 +68,7 @@ pub fn build_router(state: ServerState) -> Router { // Router that serves wjfiles // TODO - let file_router = Router::new() + let files_router = Router::new() .route( "/local--files/{page_slug}/{filename}", get(handle_hello_world), @@ -91,7 +92,7 @@ pub fn build_router(state: ServerState) -> Router { any( |State(state): State, Host(hostname): Host, - mut request: Request| async { + mut request: Request| async move { { let mut headers = request.headers_mut(); @@ -106,9 +107,68 @@ pub fn build_router(state: ServerState) -> Router { */ } - match lookup_host(&state, &hostname).await { - // TODO - _ => todo!(), + macro_rules! forward_request { + ($router:expr) => { + match $router.oneshot(request).await { + Ok(response) => response, + Err(Infallible) => match Infallible {}, + } + }; + } + + let host_data = match lookup_host(&state, &hostname).await { + Ok(host_data) => host_data, + Err(error) => { + // TODO error page response in case of an internal issue + todo!() + } + }; + + match host_data { + // Main site route handling + SiteAndHost::Main { site_id, site_slug } => { + // TODO + forward_request!(main_router) + } + SiteAndHost::MainCustom { site_id, site_slug } => { + // TODO + forward_request!(main_router) + } + // Main site missing + SiteAndHost::MainMissing { site_slug } => { + // TODO + forward_request!(main_router) + } + SiteAndHost::MainCustomMissing => { + todo!() + } + // Default site redirect + // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" + SiteAndHost::DefaultRedirect => { + let destination = format!( + "https://{}{}", + state.domains.main_domain_no_dot, + get_path(request.uri()), + ); + Redirect::permanent(&destination).into_response() + } + // Files site route handling + SiteAndHost::File { site_id, site_slug } => { + // TODO + forward_request!(files_router) + } + SiteAndHost::FileMissing { site_slug } => { + // TODO + forward_request!(files_router) + } + // Files site by itself + // See the case in host.rs for an explanation + SiteAndHost::FileRoot => { + let destination = + format!("https://{}", state.domains.main_domain_no_dot); + + Redirect::temporary(&destination).into_response() + } } }, ), From 79587a051ebbd7d105c7a99a16481b843a1eef9f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 01:41:30 -0500 Subject: [PATCH 065/306] Add special headers for site metadata. --- wws/src/route.rs | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index 6f4557e0fc..c7fc625e98 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -49,7 +49,7 @@ pub fn build_router(state: ServerState) -> Router { macro_rules! header_value { ($value:expr) => { - HeaderValue::from_str($value).expect("Version is not a valid header value") + HeaderValue::from_str(&$value).expect("Version is not a valid header value") }; } @@ -97,14 +97,12 @@ pub fn build_router(state: ServerState) -> Router { let mut headers = request.headers_mut(); // Strip internal headers, just to be safe. - headers.remove("x-wikijump-site-slug"); headers.remove("x-wikijump-site-id"); + headers.remove("x-wikijump-site-slug"); headers.remove("x-wikijump-domain"); - /* // Also add the domain header since that is the same before lookup_host() - headers.insert("x-wikijump-domain", &hostname); - */ + headers.insert("x-wikijump-domain", header_value!(hostname)); } macro_rules! forward_request { @@ -116,6 +114,19 @@ pub fn build_router(state: ServerState) -> Router { }; } + macro_rules! add_headers { + ($site_id:expr, $site_slug:expr) => {{ + // Validate types + let _: i64 = $site_id; + let _: &str = &$site_slug; + + // Add headers + let mut headers = request.headers_mut(); + headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); + headers.insert("x-wikijump-site-slug", header_value!($site_slug)); + }}; + } + let host_data = match lookup_host(&state, &hostname).await { Ok(host_data) => host_data, Err(error) => { @@ -127,11 +138,11 @@ pub fn build_router(state: ServerState) -> Router { match host_data { // Main site route handling SiteAndHost::Main { site_id, site_slug } => { - // TODO + add_headers!(site_id, site_slug); forward_request!(main_router) } SiteAndHost::MainCustom { site_id, site_slug } => { - // TODO + add_headers!(site_id, site_slug); forward_request!(main_router) } // Main site missing @@ -154,7 +165,7 @@ pub fn build_router(state: ServerState) -> Router { } // Files site route handling SiteAndHost::File { site_id, site_slug } => { - // TODO + add_headers!(site_id, site_slug); forward_request!(files_router) } SiteAndHost::FileMissing { site_slug } => { From ddd9754d99565509e4433d64b789e7a074e243de Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 01:43:59 -0500 Subject: [PATCH 066/306] Address lint. It's not a type, but a value. --- wws/src/route.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index c7fc625e98..a4641f63c2 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -109,7 +109,7 @@ pub fn build_router(state: ServerState) -> Router { ($router:expr) => { match $router.oneshot(request).await { Ok(response) => response, - Err(Infallible) => match Infallible {}, + Err(infallible) => match infallible {}, } }; } From 1b735b33ce3495e13a608011a7f61aee591e2918 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 01:46:06 -0500 Subject: [PATCH 067/306] Resolve some warnings. --- wws/src/handler/mod.rs | 5 +---- wws/src/route.rs | 5 ++--- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 0a942b4bd5..79c4e8e417 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -26,10 +26,7 @@ pub use self::framerail::*; pub use self::misc::*; pub use self::redirect::*; -use axum::{ - http::status::StatusCode, - response::{Html, Response}, -}; +use axum::response::Html; pub async fn handle_hello_world() -> Html<&'static str> { Html("

Hello, World!

") diff --git a/wws/src/route.rs b/wws/src/route.rs index a4641f63c2..af31070dd4 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -44,7 +44,6 @@ use tower_http::{ pub fn build_router(state: ServerState) -> Router { let main_state = Arc::clone(&state); let file_state = Arc::clone(&state); - let host_state = Arc::clone(&state); let header_state = Arc::clone(&state); macro_rules! header_value { @@ -94,7 +93,7 @@ pub fn build_router(state: ServerState) -> Router { Host(hostname): Host, mut request: Request| async move { { - let mut headers = request.headers_mut(); + let headers = request.headers_mut(); // Strip internal headers, just to be safe. headers.remove("x-wikijump-site-id"); @@ -121,7 +120,7 @@ pub fn build_router(state: ServerState) -> Router { let _: &str = &$site_slug; // Add headers - let mut headers = request.headers_mut(); + let headers = request.headers_mut(); headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); headers.insert("x-wikijump-site-slug", header_value!($site_slug)); }}; From f94400249da8843daa8069f1e2c4a04dd30c6375 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 01:46:23 -0500 Subject: [PATCH 068/306] Mark temporary handler as deprecated. Just meant to be a placeholder until it can be replaced with the actual handler implementations. --- wws/src/handler/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 79c4e8e417..d825f79506 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -28,6 +28,7 @@ pub use self::redirect::*; use axum::response::Html; +#[deprecated] pub async fn handle_hello_world() -> Html<&'static str> { Html("

Hello, World!

") } From c6374ef810b6c175952ba619590fd0050f726d81 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:04:24 -0500 Subject: [PATCH 069/306] Add more explanatory comments. --- wws/src/route.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/wws/src/route.rs b/wws/src/route.rs index af31070dd4..11873a0f6c 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -126,6 +126,7 @@ pub fn build_router(state: ServerState) -> Router { }}; } + // Determine what host and site (e.g. main vs files, what site slug and ID) let host_data = match lookup_host(&state, &hostname).await { Ok(host_data) => host_data, Err(error) => { @@ -134,6 +135,8 @@ pub fn build_router(state: ServerState) -> Router { } }; + // Now that we have the general category of request type, we can + // give it to the right place to be processed. match host_data { // Main site route handling SiteAndHost::Main { site_id, site_slug } => { @@ -141,6 +144,7 @@ pub fn build_router(state: ServerState) -> Router { forward_request!(main_router) } SiteAndHost::MainCustom { site_id, site_slug } => { + // NOTE: The difference here is site_slug here is String not &str add_headers!(site_id, site_slug); forward_request!(main_router) } From 4d8b7df81a740ab9b740d026af15b953beec2fb6 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:07:51 -0500 Subject: [PATCH 070/306] Fix assertion message. --- wws/src/route.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index 11873a0f6c..5ca2795724 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -48,7 +48,7 @@ pub fn build_router(state: ServerState) -> Router { macro_rules! header_value { ($value:expr) => { - HeaderValue::from_str(&$value).expect("Version is not a valid header value") + HeaderValue::from_str(&$value).expect("String is not a valid header value") }; } From 73d71b070d8b46e49c48d1d2492b8c8512e8a5bc Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:18:55 -0500 Subject: [PATCH 071/306] Add some handler stubs for files router. --- wws/src/handler/code.rs | 33 ++++++++++++++++++++++++++++++++ wws/src/handler/file.rs | 42 +++++++++++++++++++++++++++++++++++++++++ wws/src/handler/html.rs | 33 ++++++++++++++++++++++++++++++++ wws/src/handler/mod.rs | 6 ++++++ wws/src/route.rs | 8 ++++---- 5 files changed, 118 insertions(+), 4 deletions(-) create mode 100644 wws/src/handler/code.rs create mode 100644 wws/src/handler/file.rs create mode 100644 wws/src/handler/html.rs diff --git a/wws/src/handler/code.rs b/wws/src/handler/code.rs new file mode 100644 index 0000000000..49dbe35f24 --- /dev/null +++ b/wws/src/handler/code.rs @@ -0,0 +1,33 @@ +/* + * handler/code.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::state::ServerState; +use axum::{ + extract::{Path, State}, + response::Html, +}; + +pub async fn handle_code_block( + State(state): State, + Path((page_slug, hash)): Path<(String, String)>, +) -> Html<&'static str> { + // TODO + todo!() +} diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs new file mode 100644 index 0000000000..673d517b95 --- /dev/null +++ b/wws/src/handler/file.rs @@ -0,0 +1,42 @@ +/* + * handler/file.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::state::ServerState; +use axum::{ + extract::{Path, State}, + response::Html, +}; +use axum_extra::response::Attachment; + +pub async fn handle_file_fetch( + State(state): State, + Path((page_slug, filename)): Path<(String, String)>, +) -> Html<&'static str> { + // TODO + todo!() +} + +pub async fn handle_file_download( + State(state): State, + Path((page_slug, filename)): Path<(String, String)>, +) -> Html<&'static str> { + // TODO Attachment + todo!() +} diff --git a/wws/src/handler/html.rs b/wws/src/handler/html.rs new file mode 100644 index 0000000000..ecf76a12d6 --- /dev/null +++ b/wws/src/handler/html.rs @@ -0,0 +1,33 @@ +/* + * handler/html.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +use crate::state::ServerState; +use axum::{ + extract::{Path, State}, + response::Html, +}; + +pub async fn handle_html_block( + State(state): State, + Path((page_slug, index)): Path<(String, String)>, +) -> Html<&'static str> { + // TODO + todo!() +} diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index d825f79506..f10dc9a0b4 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -18,11 +18,17 @@ * along with this program. If not, see . */ +mod code; +mod file; mod framerail; +mod html; mod misc; mod redirect; +pub use self::code::*; +pub use self::file::*; pub use self::framerail::*; +pub use self::html::*; pub use self::misc::*; pub use self::redirect::*; diff --git a/wws/src/route.rs b/wws/src/route.rs index 5ca2795724..1308bcbd58 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -74,13 +74,13 @@ pub fn build_router(state: ServerState) -> Router { ) .route("/local--code/{page_slug}/{index}", get(handle_hello_world)) .route("/local--html/{page_slug}/{id}", get(handle_hello_world)) - .route("/-/file/{page_slug}/{filename}", get(handle_hello_world)) + .route("/-/file/{page_slug}/{filename}", get(handle_file_fetch)) .route( "/-/download/{page_slug}/{filename}", - get(handle_hello_world), + get(handle_file_download), ) - .route("/-/code/{page_slug}/{index}", get(handle_hello_world)) - .route("/-/html/{page_slug}/{hash}", get(handle_hello_world)) + .route("/-/code/{page_slug}/{index}", get(handle_code_block)) + .route("/-/html/{page_slug}/{hash}", get(handle_html_block)) .route("/", get(handle_hello_world)) .with_state(file_state); From 3329e356a4cb8647827e45dde3070582c50316a3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:22:09 -0500 Subject: [PATCH 072/306] Use fallback for routing. Finally, this is how you do the catch-all. --- wws/src/route.rs | 185 +++++++++++++++++++++++------------------------ 1 file changed, 91 insertions(+), 94 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index 1308bcbd58..a50447eafd 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -62,7 +62,7 @@ pub fn build_router(state: ServerState) -> Router { .route("/-/download/{*rest}", any(redirect_to_files)) .route("/-/code/{*rest}", any(redirect_to_files)) .route("/-/html/{*rest}", any(redirect_to_files)) - .route("/", any(proxy_framerail)) + .fallback(proxy_framerail) .with_state(main_state); // Router that serves wjfiles @@ -81,111 +81,108 @@ pub fn build_router(state: ServerState) -> Router { ) .route("/-/code/{page_slug}/{index}", get(handle_code_block)) .route("/-/html/{page_slug}/{hash}", get(handle_html_block)) - .route("/", get(handle_hello_world)) + .fallback(handle_hello_world) .with_state(file_state); Router::new() // Domain delegation logic - .route( - "/", - any( - |State(state): State, - Host(hostname): Host, - mut request: Request| async move { - { - let headers = request.headers_mut(); + .fallback( + |State(state): State, + Host(hostname): Host, + mut request: Request| async move { + { + let headers = request.headers_mut(); - // Strip internal headers, just to be safe. - headers.remove("x-wikijump-site-id"); - headers.remove("x-wikijump-site-slug"); - headers.remove("x-wikijump-domain"); + // Strip internal headers, just to be safe. + headers.remove("x-wikijump-site-id"); + headers.remove("x-wikijump-site-slug"); + headers.remove("x-wikijump-domain"); - // Also add the domain header since that is the same before lookup_host() - headers.insert("x-wikijump-domain", header_value!(hostname)); - } + // Also add the domain header since that is the same before lookup_host() + headers.insert("x-wikijump-domain", header_value!(hostname)); + } - macro_rules! forward_request { - ($router:expr) => { - match $router.oneshot(request).await { - Ok(response) => response, - Err(infallible) => match infallible {}, - } - }; - } + macro_rules! forward_request { + ($router:expr) => { + match $router.oneshot(request).await { + Ok(response) => response, + Err(infallible) => match infallible {}, + } + }; + } - macro_rules! add_headers { - ($site_id:expr, $site_slug:expr) => {{ - // Validate types - let _: i64 = $site_id; - let _: &str = &$site_slug; + macro_rules! add_headers { + ($site_id:expr, $site_slug:expr) => {{ + // Validate types + let _: i64 = $site_id; + let _: &str = &$site_slug; - // Add headers - let headers = request.headers_mut(); - headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); - headers.insert("x-wikijump-site-slug", header_value!($site_slug)); - }}; - } + // Add headers + let headers = request.headers_mut(); + headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); + headers.insert("x-wikijump-site-slug", header_value!($site_slug)); + }}; + } - // Determine what host and site (e.g. main vs files, what site slug and ID) - let host_data = match lookup_host(&state, &hostname).await { - Ok(host_data) => host_data, - Err(error) => { - // TODO error page response in case of an internal issue - todo!() - } - }; + // Determine what host and site (e.g. main vs files, what site slug and ID) + let host_data = match lookup_host(&state, &hostname).await { + Ok(host_data) => host_data, + Err(error) => { + // TODO error page response in case of an internal issue + todo!() + } + }; - // Now that we have the general category of request type, we can - // give it to the right place to be processed. - match host_data { - // Main site route handling - SiteAndHost::Main { site_id, site_slug } => { - add_headers!(site_id, site_slug); - forward_request!(main_router) - } - SiteAndHost::MainCustom { site_id, site_slug } => { - // NOTE: The difference here is site_slug here is String not &str - add_headers!(site_id, site_slug); - forward_request!(main_router) - } - // Main site missing - SiteAndHost::MainMissing { site_slug } => { - // TODO - forward_request!(main_router) - } - SiteAndHost::MainCustomMissing => { - todo!() - } - // Default site redirect - // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" - SiteAndHost::DefaultRedirect => { - let destination = format!( - "https://{}{}", - state.domains.main_domain_no_dot, - get_path(request.uri()), - ); - Redirect::permanent(&destination).into_response() - } - // Files site route handling - SiteAndHost::File { site_id, site_slug } => { - add_headers!(site_id, site_slug); - forward_request!(files_router) - } - SiteAndHost::FileMissing { site_slug } => { - // TODO - forward_request!(files_router) - } - // Files site by itself - // See the case in host.rs for an explanation - SiteAndHost::FileRoot => { - let destination = - format!("https://{}", state.domains.main_domain_no_dot); + // Now that we have the general category of request type, we can + // give it to the right place to be processed. + match host_data { + // Main site route handling + SiteAndHost::Main { site_id, site_slug } => { + add_headers!(site_id, site_slug); + forward_request!(main_router) + } + SiteAndHost::MainCustom { site_id, site_slug } => { + // NOTE: The difference here is site_slug here is String not &str + add_headers!(site_id, site_slug); + forward_request!(main_router) + } + // Main site missing + SiteAndHost::MainMissing { site_slug } => { + // TODO + forward_request!(main_router) + } + SiteAndHost::MainCustomMissing => { + todo!() + } + // Default site redirect + // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" + SiteAndHost::DefaultRedirect => { + let destination = format!( + "https://{}{}", + state.domains.main_domain_no_dot, + get_path(request.uri()), + ); + Redirect::permanent(&destination).into_response() + } + // Files site route handling + SiteAndHost::File { site_id, site_slug } => { + add_headers!(site_id, site_slug); + forward_request!(files_router) + } + SiteAndHost::FileMissing { site_slug } => { + // TODO + forward_request!(files_router) + } + // Files site by itself + // See the case in host.rs for an explanation + SiteAndHost::FileRoot => { + let destination = + format!("https://{}", state.domains.main_domain_no_dot); - Redirect::temporary(&destination).into_response() - } - } - }, - ), + Redirect::temporary(&destination).into_response() + } + } + } ) // Easter egg .route("/-/teapot", any(handle_teapot)) From 49c8c6cc19705136ba4722e69fab2d945e634a19 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:36:52 -0500 Subject: [PATCH 073/306] Move main domain route logic to its own file. --- wws/src/handler/mod.rs | 119 ++++++++++++++++++++++++++++++++++++++++- wws/src/macros.rs | 11 ++++ wws/src/route.rs | 119 +++-------------------------------------- 3 files changed, 136 insertions(+), 113 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index f10dc9a0b4..1c25c337c4 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -32,9 +32,126 @@ pub use self::html::*; pub use self::misc::*; pub use self::redirect::*; -use axum::response::Html; +use crate::{ + host::{lookup_host, SiteAndHost}, + path::get_path, + state::ServerState, +}; +use axum::{ + body::Body, + extract::{Request, State}, + response::{Html, IntoResponse, Redirect, Response}, + Router, +}; +use axum_extra::extract::Host; +use tower::util::ServiceExt; #[deprecated] pub async fn handle_hello_world() -> Html<&'static str> { Html("

Hello, World!

") } + +/// Entry route handler to first process host information. +/// +/// Before we can give this request to the right place, +/// we first must determine if it's a main or files request, +/// and then what site it corresponds to. Then we can pass +/// it to the appropriate location. +pub async fn handle_host_delegation( + state: ServerState, + hostname: String, + mut request: Request, + main_router: Router, + files_router: Router, +) -> Response { + { + let headers = request.headers_mut(); + + // Strip internal headers, just to be safe. + headers.remove("x-wikijump-site-id"); + headers.remove("x-wikijump-site-slug"); + headers.remove("x-wikijump-domain"); + + // Also add the domain header since that is the same before lookup_host() + headers.insert("x-wikijump-domain", header_value!(hostname)); + } + + macro_rules! forward_request { + ($router:expr) => { + match $router.oneshot(request).await { + Ok(response) => response, + Err(infallible) => match infallible {}, + } + }; + } + + macro_rules! add_headers { + ($site_id:expr, $site_slug:expr) => {{ + // Validate types + let _: i64 = $site_id; + let _: &str = &$site_slug; + + // Add headers + let headers = request.headers_mut(); + headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); + headers.insert("x-wikijump-site-slug", header_value!($site_slug)); + }}; + } + + // Determine what host and site (e.g. main vs files, what site slug and ID) + let host_data = match lookup_host(&state, &hostname).await { + Ok(host_data) => host_data, + Err(error) => { + // TODO error page response in case of an internal issue + todo!() + } + }; + + // Now that we have the general category of request type, we can + // give it to the right place to be processed. + match host_data { + // Main site route handling + SiteAndHost::Main { site_id, site_slug } => { + add_headers!(site_id, site_slug); + forward_request!(main_router) + } + SiteAndHost::MainCustom { site_id, site_slug } => { + // NOTE: The difference here is site_slug here is String not &str + add_headers!(site_id, site_slug); + forward_request!(main_router) + } + // Main site missing + SiteAndHost::MainMissing { site_slug } => { + // TODO + forward_request!(main_router) + } + SiteAndHost::MainCustomMissing => { + todo!() + } + // Default site redirect + // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" + SiteAndHost::DefaultRedirect => { + let destination = format!( + "https://{}{}", + state.domains.main_domain_no_dot, + get_path(request.uri()), + ); + Redirect::permanent(&destination).into_response() + } + // Files site route handling + SiteAndHost::File { site_id, site_slug } => { + add_headers!(site_id, site_slug); + forward_request!(files_router) + } + SiteAndHost::FileMissing { site_slug } => { + // TODO + forward_request!(files_router) + } + // Files site by itself + // See the case in host.rs for an explanation + SiteAndHost::FileRoot => { + let destination = format!("https://{}", state.domains.main_domain_no_dot); + Redirect::temporary(&destination).into_response() + } + } +} diff --git a/wws/src/macros.rs b/wws/src/macros.rs index 34a5ec29a2..6a6f99f650 100644 --- a/wws/src/macros.rs +++ b/wws/src/macros.rs @@ -31,3 +31,14 @@ macro_rules! str_write { write!($dest, $($arg)*).expect("Writing to string failed"); }}; } + +/// Convert a string to a `HeaderValue`. +/// +/// This code assumes the string in question is valid and can be +/// converted to a header value. +macro_rules! header_value { + ($value:expr) => {{ + use axum::http::header::HeaderValue; + HeaderValue::from_str(&$value).expect("String is not a valid header value") + }}; +} diff --git a/wws/src/route.rs b/wws/src/route.rs index a50447eafd..932902368f 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -18,24 +18,16 @@ * along with this program. If not, see . */ -use crate::{ - handler::*, - host::{lookup_host, SiteAndHost}, - info, - path::get_path, - state::ServerState, -}; +use crate::{handler::*, info, state::ServerState}; use axum::{ body::Body, extract::{Request, State}, http::header::{HeaderName, HeaderValue}, - response::{IntoResponse, Redirect}, routing::{any, get}, Router, }; use axum_extra::extract::Host; -use std::{convert::Infallible, sync::Arc}; -use tower::util::ServiceExt; +use std::sync::Arc; use tower_http::{ compression::CompressionLayer, normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, @@ -46,12 +38,6 @@ pub fn build_router(state: ServerState) -> Router { let file_state = Arc::clone(&state); let header_state = Arc::clone(&state); - macro_rules! header_value { - ($value:expr) => { - HeaderValue::from_str(&$value).expect("String is not a valid header value") - }; - } - // Router that serves framerail // TODO let main_router = Router::new() @@ -85,104 +71,13 @@ pub fn build_router(state: ServerState) -> Router { .with_state(file_state); Router::new() - // Domain delegation logic + // Forward requests to the appropriate sub-router depending on the hostname .fallback( |State(state): State, - Host(hostname): Host, - mut request: Request| async move { - { - let headers = request.headers_mut(); - - // Strip internal headers, just to be safe. - headers.remove("x-wikijump-site-id"); - headers.remove("x-wikijump-site-slug"); - headers.remove("x-wikijump-domain"); - - // Also add the domain header since that is the same before lookup_host() - headers.insert("x-wikijump-domain", header_value!(hostname)); - } - - macro_rules! forward_request { - ($router:expr) => { - match $router.oneshot(request).await { - Ok(response) => response, - Err(infallible) => match infallible {}, - } - }; - } - - macro_rules! add_headers { - ($site_id:expr, $site_slug:expr) => {{ - // Validate types - let _: i64 = $site_id; - let _: &str = &$site_slug; - - // Add headers - let headers = request.headers_mut(); - headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); - headers.insert("x-wikijump-site-slug", header_value!($site_slug)); - }}; - } - - // Determine what host and site (e.g. main vs files, what site slug and ID) - let host_data = match lookup_host(&state, &hostname).await { - Ok(host_data) => host_data, - Err(error) => { - // TODO error page response in case of an internal issue - todo!() - } - }; - - // Now that we have the general category of request type, we can - // give it to the right place to be processed. - match host_data { - // Main site route handling - SiteAndHost::Main { site_id, site_slug } => { - add_headers!(site_id, site_slug); - forward_request!(main_router) - } - SiteAndHost::MainCustom { site_id, site_slug } => { - // NOTE: The difference here is site_slug here is String not &str - add_headers!(site_id, site_slug); - forward_request!(main_router) - } - // Main site missing - SiteAndHost::MainMissing { site_slug } => { - // TODO - forward_request!(main_router) - } - SiteAndHost::MainCustomMissing => { - todo!() - } - // Default site redirect - // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" - SiteAndHost::DefaultRedirect => { - let destination = format!( - "https://{}{}", - state.domains.main_domain_no_dot, - get_path(request.uri()), - ); - Redirect::permanent(&destination).into_response() - } - // Files site route handling - SiteAndHost::File { site_id, site_slug } => { - add_headers!(site_id, site_slug); - forward_request!(files_router) - } - SiteAndHost::FileMissing { site_slug } => { - // TODO - forward_request!(files_router) - } - // Files site by itself - // See the case in host.rs for an explanation - SiteAndHost::FileRoot => { - let destination = - format!("https://{}", state.domains.main_domain_no_dot); - - Redirect::temporary(&destination).into_response() - } - } - } + Host(hostname): Host, + request: Request| async move { + handle_host_delegation(state, hostname, request, main_router, files_router).await + } ) // Easter egg .route("/-/teapot", any(handle_teapot)) From 7764217ba67e9e1ad4fd74422dcfdb1be996293b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:39:06 -0500 Subject: [PATCH 074/306] Address a few warnings. --- wws/src/error.rs | 2 -- wws/src/handler/mod.rs | 3 +-- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/wws/src/error.rs b/wws/src/error.rs index 9c05dc1397..c4c1b7b7b5 100644 --- a/wws/src/error.rs +++ b/wws/src/error.rs @@ -23,8 +23,6 @@ use s3::error::S3Error; use std::io; use thiserror::Error as ThisError; -pub use std::error::Error as StdError; - pub type StdResult = std::result::Result; pub type Result = StdResult; diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 1c25c337c4..5a47d18e84 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -39,11 +39,10 @@ use crate::{ }; use axum::{ body::Body, - extract::{Request, State}, + extract::Request, response::{Html, IntoResponse, Redirect, Response}, Router, }; -use axum_extra::extract::Host; use tower::util::ServiceExt; #[deprecated] From e4f55ce51448a223beb5f0aad030d0bc51d79aa8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:52:26 -0500 Subject: [PATCH 075/306] Add more handlers for files routes. --- wws/src/handler/code.rs | 9 +++++++-- wws/src/handler/html.rs | 9 +++++++-- wws/src/handler/misc.rs | 4 ++++ wws/src/route.rs | 16 +++++++++++++--- 4 files changed, 31 insertions(+), 7 deletions(-) diff --git a/wws/src/handler/code.rs b/wws/src/handler/code.rs index 49dbe35f24..205654ea48 100644 --- a/wws/src/handler/code.rs +++ b/wws/src/handler/code.rs @@ -21,13 +21,18 @@ use crate::state::ServerState; use axum::{ extract::{Path, State}, - response::Html, + response::{Html, Redirect}, }; pub async fn handle_code_block( State(state): State, - Path((page_slug, hash)): Path<(String, String)>, + Path((page_slug, index)): Path<(String, String)>, ) -> Html<&'static str> { // TODO todo!() } + +pub async fn handle_code_redirect(Path((page_slug, index)): Path<(String, String)>) -> Redirect { + let destination = format!("/-/code/{page_slug}/{index}"); + Redirect::permanent(&destination) +} diff --git a/wws/src/handler/html.rs b/wws/src/handler/html.rs index ecf76a12d6..2136b65281 100644 --- a/wws/src/handler/html.rs +++ b/wws/src/handler/html.rs @@ -21,13 +21,18 @@ use crate::state::ServerState; use axum::{ extract::{Path, State}, - response::Html, + response::{Html, Redirect}, }; pub async fn handle_html_block( State(state): State, - Path((page_slug, index)): Path<(String, String)>, + Path((page_slug, id)): Path<(String, String)>, ) -> Html<&'static str> { // TODO todo!() } + +pub async fn handle_html_redirect(Path((page_slug, id)): Path<(String, String)>) -> Redirect { + let destination = format!("/-/html/{page_slug}/{id}"); + Redirect::permanent(&destination) +} diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index 7343b2922c..0d01932c69 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -27,3 +27,7 @@ pub async fn handle_teapot() -> Response { .body(Body::from("🫖")) .expect("Unable to convert response data") } + +pub async fn handle_invalid_method() -> StatusCode { + StatusCode::METHOD_NOT_ALLOWED +} diff --git a/wws/src/route.rs b/wws/src/route.rs index 932902368f..a990757066 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -58,15 +58,25 @@ pub fn build_router(state: ServerState) -> Router { "/local--files/{page_slug}/{filename}", get(handle_hello_world), ) - .route("/local--code/{page_slug}/{index}", get(handle_hello_world)) - .route("/local--html/{page_slug}/{id}", get(handle_hello_world)) + .route( + "/local--code/{page_slug}/{index}", + any(handle_code_redirect), + ) + .route("/local--html/{page_slug}/{id}", any(handle_html_redirect)) .route("/-/file/{page_slug}/{filename}", get(handle_file_fetch)) + .route("/-/file/{page_slug}/{filename}", any(handle_invalid_method)) .route( "/-/download/{page_slug}/{filename}", get(handle_file_download), ) + .route( + "/-/download/{page_slug}/{filename}", + any(handle_invalid_method), + ) .route("/-/code/{page_slug}/{index}", get(handle_code_block)) - .route("/-/html/{page_slug}/{hash}", get(handle_html_block)) + .route("/-/code/{page_slug}/{index}", any(handle_invalid_method)) + .route("/-/html/{page_slug}/{id}", get(handle_html_block)) + .route("/-/html/{page_slug}/{id}", any(handle_invalid_method)) .fallback(handle_hello_world) .with_state(file_state); From 644bec1064062e1bed92266d3229e2caf89a3ad7 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:53:00 -0500 Subject: [PATCH 076/306] Remove TODO for finished router. --- wws/src/route.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index a990757066..55b24b494b 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -39,7 +39,6 @@ pub fn build_router(state: ServerState) -> Router { let header_state = Arc::clone(&state); // Router that serves framerail - // TODO let main_router = Router::new() .route("/local--files/{*rest}", any(redirect_to_files)) .route("/local--code/{*rest}", any(redirect_to_files)) From 9f21bdb81a74d94829845d71cdce492844762fb7 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 02:55:37 -0500 Subject: [PATCH 077/306] Add redirect for files router. --- wws/src/handler/file.rs | 7 ++++++- wws/src/route.rs | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 673d517b95..8aeba3170f 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -21,7 +21,7 @@ use crate::state::ServerState; use axum::{ extract::{Path, State}, - response::Html, + response::{Html, Redirect}, }; use axum_extra::response::Attachment; @@ -40,3 +40,8 @@ pub async fn handle_file_download( // TODO Attachment todo!() } + +pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { + let destination = format!("/-/{page_slug}/{filename}"); + Redirect::permanent(&destination) +} diff --git a/wws/src/route.rs b/wws/src/route.rs index 55b24b494b..72da505f32 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -55,7 +55,7 @@ pub fn build_router(state: ServerState) -> Router { let files_router = Router::new() .route( "/local--files/{page_slug}/{filename}", - get(handle_hello_world), + get(handle_file_redirect), ) .route( "/local--code/{page_slug}/{index}", From 56ab01b3c68218b4069f1244373087a06e493b35 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:17:18 -0500 Subject: [PATCH 078/306] Add more header processing. --- wws/src/handler/mod.rs | 33 +++++++++++++++++++++++++-------- wws/src/handler/redirect.rs | 25 ++++++++++++++++++++++--- wws/src/route.rs | 8 ++++---- 3 files changed, 51 insertions(+), 15 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 5a47d18e84..295a773077 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -40,11 +40,20 @@ use crate::{ use axum::{ body::Body, extract::Request, + http::header::HeaderName, response::{Html, IntoResponse, Redirect, Response}, Router, }; use tower::util::ServiceExt; +pub const HEADER_SITE_ID: HeaderName = HeaderName::from_static("x-wikijump-site-id"); +pub const HEADER_SITE_SLUG: HeaderName = HeaderName::from_static("x-wikijump-site-slug"); +pub const HEADER_DOMAIN: HeaderName = HeaderName::from_static("x-wikijump-domain"); + +pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump"); +pub const HEADER_WWS_VERSION: HeaderName = HeaderName::from_static("x-wikijump-wws-ver"); +pub const HEADER_DEEPWELL_VERSION: HeaderName = HeaderName::from_static("x-wikijump-deepwell-ver"); + #[deprecated] pub async fn handle_hello_world() -> Html<&'static str> { Html("

Hello, World!

") @@ -67,12 +76,12 @@ pub async fn handle_host_delegation( let headers = request.headers_mut(); // Strip internal headers, just to be safe. - headers.remove("x-wikijump-site-id"); - headers.remove("x-wikijump-site-slug"); - headers.remove("x-wikijump-domain"); + headers.remove(HEADER_SITE_ID); + headers.remove(HEADER_SITE_SLUG); + headers.remove(HEADER_DOMAIN); // Also add the domain header since that is the same before lookup_host() - headers.insert("x-wikijump-domain", header_value!(hostname)); + headers.insert(HEADER_DOMAIN, header_value!(hostname)); } macro_rules! forward_request { @@ -85,6 +94,7 @@ pub async fn handle_host_delegation( } macro_rules! add_headers { + // Add both headers ($site_id:expr, $site_slug:expr) => {{ // Validate types let _: i64 = $site_id; @@ -92,8 +102,15 @@ pub async fn handle_host_delegation( // Add headers let headers = request.headers_mut(); - headers.insert("x-wikijump-site-id", header_value!(str!($site_id))); - headers.insert("x-wikijump-site-slug", header_value!($site_slug)); + headers.insert(HEADER_SITE_ID, header_value!(str!($site_id))); + headers.insert(HEADER_SITE_SLUG, header_value!($site_slug)); + }}; + + // Add only slug (site doesn't exist) + ($site_slug:expr) => {{ + let _: &str = &$site_slug; + let headers = request.headers_mut(); + headers.insert(HEADER_SITE_SLUG, header_value!($site_slug)); }}; } @@ -121,7 +138,7 @@ pub async fn handle_host_delegation( } // Main site missing SiteAndHost::MainMissing { site_slug } => { - // TODO + add_headers!(site_slug); forward_request!(main_router) } SiteAndHost::MainCustomMissing => { @@ -143,7 +160,7 @@ pub async fn handle_host_delegation( forward_request!(files_router) } SiteAndHost::FileMissing { site_slug } => { - // TODO + add_headers!(site_slug); forward_request!(files_router) } // Files site by itself diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 738e4889c2..3499fe0636 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -18,10 +18,12 @@ * along with this program. If not, see . */ +use super::HEADER_SITE_SLUG; use crate::path::get_path; use crate::state::ServerState; use axum::{ - extract::{Request, State}, + extract::State, + http::{header::HeaderMap, Uri}, response::Html, }; use axum_extra::extract::Host; @@ -29,9 +31,9 @@ use axum_extra::extract::Host; pub async fn redirect_to_files( State(state): State, Host(hostname): Host, - req: Request, + uri: Uri, ) -> Html<&'static str> { - let path = get_path(req.uri()); + let path = get_path(&uri); // xyz.wikijump.com -> xyz.wjfiles.com // customdomain.com -> xyz.wjfiles.com @@ -39,3 +41,20 @@ pub async fn redirect_to_files( let uri = format!("https://{hostname}{path}"); todo!() } + +pub async fn redirect_to_main( + State(state): State, + headers: HeaderMap, + uri: Uri, +) -> Html<&'static str> { + let site_slug = headers + .get(HEADER_SITE_SLUG) + .expect("Site slug header not set by parent rounter") + .to_str() + .expect("Unable to convert site slug header to string"); + + let path = get_path(&uri); + let uri = format!("https://{}{}{}", site_slug, state.domains.main_domain, path,); + + todo!() +} diff --git a/wws/src/route.rs b/wws/src/route.rs index 72da505f32..9d16a4a841 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -76,7 +76,7 @@ pub fn build_router(state: ServerState) -> Router { .route("/-/code/{page_slug}/{index}", any(handle_invalid_method)) .route("/-/html/{page_slug}/{id}", get(handle_html_block)) .route("/-/html/{page_slug}/{id}", any(handle_invalid_method)) - .fallback(handle_hello_world) + .fallback(redirect_to_main) .with_state(file_state); Router::new() @@ -101,15 +101,15 @@ pub fn build_router(state: ServerState) -> Router { .zstd(true), ) .layer(SetResponseHeaderLayer::overriding( - HeaderName::from_static("x-wikijump"), + HEADER_IS_WIKIJUMP, Some(HeaderValue::from_static("1")), )) .layer(SetResponseHeaderLayer::overriding( - HeaderName::from_static("x-wikijump-wws-ver"), + HEADER_WWS_VERSION, Some(header_value!(&*info::VERSION_INFO)), )) .layer(SetResponseHeaderLayer::overriding( - HeaderName::from_static("x-wikijump-deepwell-ver"), + HEADER_DEEPWELL_VERSION, Some(header_value!(&header_state.domains.deepwell_version)), )) .with_state(state) From 29a6fefe07ece401db60ee65e27731009c3fa788 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:23:05 -0500 Subject: [PATCH 079/306] Fix URL construction for redirect handlers. --- wws/src/handler/redirect.rs | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 3499fe0636..12440282e8 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -26,19 +26,19 @@ use axum::{ http::{header::HeaderMap, Uri}, response::Html, }; -use axum_extra::extract::Host; pub async fn redirect_to_files( State(state): State, - Host(hostname): Host, + headers: HeaderMap, uri: Uri, ) -> Html<&'static str> { - let path = get_path(&uri); - // xyz.wikijump.com -> xyz.wjfiles.com // customdomain.com -> xyz.wjfiles.com - let uri = format!("https://{hostname}{path}"); + let site_slug = get_site_slug(&headers); + let domain = &state.domains.files_domain; + let path = get_path(&uri); + let uri = format!("https://{site_slug}{domain}{path}"); todo!() } @@ -47,14 +47,17 @@ pub async fn redirect_to_main( headers: HeaderMap, uri: Uri, ) -> Html<&'static str> { - let site_slug = headers + let site_slug = get_site_slug(&headers); + let domain = &state.domains.main_domain; + let path = get_path(&uri); + let uri = format!("https://{site_slug}{domain}{path}"); + todo!() +} + +fn get_site_slug(headers: &HeaderMap) -> &str { + headers .get(HEADER_SITE_SLUG) .expect("Site slug header not set by parent rounter") .to_str() - .expect("Unable to convert site slug header to string"); - - let path = get_path(&uri); - let uri = format!("https://{}{}{}", site_slug, state.domains.main_domain, path,); - - todo!() + .expect("Unable to convert site slug header to string") } From 08b9b8ef807c656b67b41dad6f50181bd53fd188 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:23:58 -0500 Subject: [PATCH 080/306] Remove todo!, return redirect for handlers. --- wws/src/handler/redirect.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 12440282e8..9a17f69885 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -24,34 +24,34 @@ use crate::state::ServerState; use axum::{ extract::State, http::{header::HeaderMap, Uri}, - response::Html, + response::Redirect, }; pub async fn redirect_to_files( State(state): State, headers: HeaderMap, uri: Uri, -) -> Html<&'static str> { +) -> Redirect { // xyz.wikijump.com -> xyz.wjfiles.com // customdomain.com -> xyz.wjfiles.com let site_slug = get_site_slug(&headers); let domain = &state.domains.files_domain; let path = get_path(&uri); - let uri = format!("https://{site_slug}{domain}{path}"); - todo!() + let destination = format!("https://{site_slug}{domain}{path}"); + Redirect::permanent(&destination) } pub async fn redirect_to_main( State(state): State, headers: HeaderMap, uri: Uri, -) -> Html<&'static str> { +) -> Redirect { let site_slug = get_site_slug(&headers); let domain = &state.domains.main_domain; let path = get_path(&uri); - let uri = format!("https://{site_slug}{domain}{path}"); - todo!() + let destination = format!("https://{site_slug}{domain}{path}"); + Redirect::permanent(&destination) } fn get_site_slug(headers: &HeaderMap) -> &str { From 337184f2f57eef294b3f9ce4297b1730166ff49d Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:25:02 -0500 Subject: [PATCH 081/306] Remove to-do/stub handler. --- wws/src/handler/mod.rs | 7 +------ wws/src/route.rs | 1 - 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 295a773077..16601fb6ac 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -41,7 +41,7 @@ use axum::{ body::Body, extract::Request, http::header::HeaderName, - response::{Html, IntoResponse, Redirect, Response}, + response::{IntoResponse, Redirect, Response}, Router, }; use tower::util::ServiceExt; @@ -54,11 +54,6 @@ pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump") pub const HEADER_WWS_VERSION: HeaderName = HeaderName::from_static("x-wikijump-wws-ver"); pub const HEADER_DEEPWELL_VERSION: HeaderName = HeaderName::from_static("x-wikijump-deepwell-ver"); -#[deprecated] -pub async fn handle_hello_world() -> Html<&'static str> { - Html("

Hello, World!

") -} - /// Entry route handler to first process host information. /// /// Before we can give this request to the right place, diff --git a/wws/src/route.rs b/wws/src/route.rs index 9d16a4a841..06326140ba 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -51,7 +51,6 @@ pub fn build_router(state: ServerState) -> Router { .with_state(main_state); // Router that serves wjfiles - // TODO let files_router = Router::new() .route( "/local--files/{page_slug}/{filename}", From 79ae814a609ed209f64457773dce4844bba89084 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:30:17 -0500 Subject: [PATCH 082/306] Add default site handling for redirect URL building. --- wws/src/handler/redirect.rs | 19 +++++++++++++------ wws/src/host.rs | 2 +- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 9a17f69885..a482b44065 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -19,8 +19,7 @@ */ use super::HEADER_SITE_SLUG; -use crate::path::get_path; -use crate::state::ServerState; +use crate::{path::get_path, state::ServerState, host::DEFAULT_SITE_SLUG}; use axum::{ extract::State, http::{header::HeaderMap, Uri}, @@ -36,9 +35,8 @@ pub async fn redirect_to_files( // customdomain.com -> xyz.wjfiles.com let site_slug = get_site_slug(&headers); - let domain = &state.domains.files_domain; let path = get_path(&uri); - let destination = format!("https://{site_slug}{domain}{path}"); + let destination = build_url(site_slug, &state.domains.files_domain_no_dot, path); Redirect::permanent(&destination) } @@ -48,9 +46,8 @@ pub async fn redirect_to_main( uri: Uri, ) -> Redirect { let site_slug = get_site_slug(&headers); - let domain = &state.domains.main_domain; let path = get_path(&uri); - let destination = format!("https://{site_slug}{domain}{path}"); + let destination = build_url(site_slug, &state.domains.main_domain_no_dot, path); Redirect::permanent(&destination) } @@ -61,3 +58,13 @@ fn get_site_slug(headers: &HeaderMap) -> &str { .to_str() .expect("Unable to convert site slug header to string") } + +fn build_url(site_slug: &str, domain_no_dot: &str, path: &str) -> String { + if site_slug == DEFAULT_SITE_SLUG { + // We don't include the 'www' for the default site, just do the regular domain + format!("https://{domain_no_dot}{path}") + } else { + // Otherwise, add the site slug as the subdomain + format!("https://{site_slug}.{domain_no_dot}{path}") + } +} diff --git a/wws/src/host.rs b/wws/src/host.rs index e55272e9f7..66a59f494d 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -24,7 +24,7 @@ use crate::{deepwell::Domains, error::Result, state::ServerState}; /// /// This refers to the site displayed when you visit `wikijump.com` /// with no subdomain component. -const DEFAULT_SITE_SLUG: &str = "www"; +pub const DEFAULT_SITE_SLUG: &str = "www"; #[derive(Debug)] pub enum SiteAndHost<'a> { From 3f34521ff2643f9a41b0f40a7db1dae4f28de4e0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:38:50 -0500 Subject: [PATCH 083/306] Change handling of redirect URL construction. --- wws/src/handler/redirect.rs | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index a482b44065..cde63508df 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -36,7 +36,8 @@ pub async fn redirect_to_files( let site_slug = get_site_slug(&headers); let path = get_path(&uri); - let destination = build_url(site_slug, &state.domains.files_domain_no_dot, path); + let domain = &state.domains.files_domain; + let destination = format!("https://{site_slug}{domain}{path}"); Redirect::permanent(&destination) } @@ -47,7 +48,17 @@ pub async fn redirect_to_main( ) -> Redirect { let site_slug = get_site_slug(&headers); let path = get_path(&uri); - let destination = build_url(site_slug, &state.domains.main_domain_no_dot, path); + + // Only remove www for the main site. + // The files site should always have an explicit site slug. + let destination = if site_slug == DEFAULT_SITE_SLUG { + let domain = &state.domains.main_domain_no_dot; + format!("https://{domain}{path}") + } else { + let domain = &state.domains.main_domain; + format!("https://{site_slug}{domain}{path}") + }; + Redirect::permanent(&destination) } @@ -58,13 +69,3 @@ fn get_site_slug(headers: &HeaderMap) -> &str { .to_str() .expect("Unable to convert site slug header to string") } - -fn build_url(site_slug: &str, domain_no_dot: &str, path: &str) -> String { - if site_slug == DEFAULT_SITE_SLUG { - // We don't include the 'www' for the default site, just do the regular domain - format!("https://{domain_no_dot}{path}") - } else { - // Otherwise, add the site slug as the subdomain - format!("https://{site_slug}.{domain_no_dot}{path}") - } -} From 5683e7c7faddc9903554c248144ad6485984294b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:46:22 -0500 Subject: [PATCH 084/306] Fix redirect path. --- wws/src/handler/file.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 8aeba3170f..6da69a15b8 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -42,6 +42,6 @@ pub async fn handle_file_download( } pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { - let destination = format!("/-/{page_slug}/{filename}"); + let destination = format!("/-/file/{page_slug}/{filename}"); Redirect::permanent(&destination) } From 0d544824b8be124c7cbf327e395dec832a4b62d2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:50:29 -0500 Subject: [PATCH 085/306] Add logging for handlers. --- wws/src/handler/code.rs | 6 ++++++ wws/src/handler/file.rs | 12 ++++++++++++ wws/src/handler/framerail.rs | 2 ++ 3 files changed, 20 insertions(+) diff --git a/wws/src/handler/code.rs b/wws/src/handler/code.rs index 205654ea48..732c552f0e 100644 --- a/wws/src/handler/code.rs +++ b/wws/src/handler/code.rs @@ -28,6 +28,12 @@ pub async fn handle_code_block( State(state): State, Path((page_slug, index)): Path<(String, String)>, ) -> Html<&'static str> { + info!( + page_slug = page_slug, + index = index, + "Returning code block data", + ); + // TODO todo!() } diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 6da69a15b8..6ecefbabd5 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -29,6 +29,12 @@ pub async fn handle_file_fetch( State(state): State, Path((page_slug, filename)): Path<(String, String)>, ) -> Html<&'static str> { + info!( + page_slug = page_slug, + filename = filename, + "Returning file data", + ); + // TODO todo!() } @@ -37,6 +43,12 @@ pub async fn handle_file_download( State(state): State, Path((page_slug, filename)): Path<(String, String)>, ) -> Html<&'static str> { + info!( + page_slug = page_slug, + filename = filename, + "Returning file download", + ); + // TODO Attachment todo!() } diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index 2db57c9b67..4ac36cbc78 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -30,6 +30,8 @@ pub async fn proxy_framerail( State(state): State, mut req: Request, ) -> Html<&'static str> { + info!("Proxying request to framerail"); + // Get path and query let path = get_path(req.uri()); From 95db6025e458b5c269a4a97f2eebf383e0a5f5d5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 03:50:39 -0500 Subject: [PATCH 086/306] Run rustfmt. --- wws/src/handler/redirect.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index cde63508df..3610ab3cd3 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -19,7 +19,7 @@ */ use super::HEADER_SITE_SLUG; -use crate::{path::get_path, state::ServerState, host::DEFAULT_SITE_SLUG}; +use crate::{host::DEFAULT_SITE_SLUG, path::get_path, state::ServerState}; use axum::{ extract::State, http::{header::HeaderMap, Uri}, From 8b140ac760d0966e52763c022da806d968dee026 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 04:01:17 -0500 Subject: [PATCH 087/306] Fix get_site_from_slug(). --- wws/src/deepwell.rs | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 6c82f427dd..0259b26762 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -117,23 +117,12 @@ impl Deepwell { } pub async fn get_site_from_slug(&self, slug: &str) -> Result> { - use jsonrpsee::core::ClientError; - - let result = self + let site_data: Option = self .client .request("site_get", rpc_object! { "site" => slug }) - .await; - - match result { - // Site data found - Ok(site_data) => Ok(Some(site_data)), - - // SiteNotFound error case - Err(ClientError::Call(error)) if error.code() == 2004 => Ok(None), + .await?; - // For any other error, forward - Err(error) => Err(Error::Deepwell(error)), - } + Ok(site_data) } pub async fn get_site_from_domain(&self, domain: &str) -> Result> { From 1188ddd021c3cd710b892bc36aee2b974a9ff2f4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 16 Jan 2025 04:13:52 -0500 Subject: [PATCH 088/306] Switch redis to async. --- wws/Cargo.lock | 76 +++++++++++++++++++++++++++++++++++++++++++----- wws/Cargo.toml | 2 +- wws/src/cache.rs | 28 +++++++++--------- wws/src/state.rs | 9 +++--- 4 files changed, 89 insertions(+), 26 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index aa29519bc8..72276bc513 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -1234,6 +1234,16 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + [[package]] name = "log" version = "0.4.22" @@ -1385,6 +1395,29 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -1472,6 +1505,17 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r2d2" +version = "0.8.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51de85fb3fb6524929c8a2eb85e6b6d363de4e8c48f9e2c2eac4944abc181c93" +dependencies = [ + "log", + "parking_lot", + "scheduled-thread-pool", +] + [[package]] name = "rand" version = "0.8.5" @@ -1517,12 +1561,12 @@ dependencies = [ "itoa", "percent-encoding", "pin-project-lite", + "r2d2", "rustls 0.22.4", "rustls-native-certs 0.7.3", "rustls-pemfile 2.2.0", "rustls-pki-types", "ryu", - "sha1_smol", "socket2", "tokio", "tokio-retry", @@ -1531,6 +1575,15 @@ dependencies = [ "url", ] +[[package]] +name = "redox_syscall" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +dependencies = [ + "bitflags", +] + [[package]] name = "ref-map" version = "0.1.3" @@ -1824,6 +1877,21 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "scheduled-thread-pool" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cbc66816425a074528352f5789333ecff06ca41b36b0b0efdfbb29edc391a19" +dependencies = [ + "parking_lot", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + [[package]] name = "sct" version = "0.7.1" @@ -1890,12 +1958,6 @@ dependencies = [ "serde", ] -[[package]] -name = "sha1_smol" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" - [[package]] name = "sha2" version = "0.10.8" diff --git a/wws/Cargo.toml b/wws/Cargo.toml index c16a6151ec..41b0cd2342 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -21,7 +21,7 @@ color-backtrace = "0.6" dotenvy = "0.15" jsonrpsee = { version = "0.24", features = ["async-client", "jsonrpsee-http-client"] } once_cell = "1" -redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "tokio-comp", "tokio-rustls-comp"] } +redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "r2d2", "tokio-comp", "tokio-rustls-comp"], default-features = false } ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } serde = { version = "1", features = ["derive"] } diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 5f2cedcd4c..d69286c6db 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -19,7 +19,7 @@ */ use crate::error::Result; -use redis::Commands; +use redis::AsyncCommands; #[derive(Debug)] pub struct Cache { @@ -34,35 +34,35 @@ impl Cache { } /// Retrieve the site ID from the slug from the cache. - pub fn get_site_slug(&self, site_slug: &str) -> Result> { - let mut conn = self.client.get_connection()?; + pub async fn get_site_slug(&self, site_slug: &str) -> Result> { + let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_slug:{site_slug}"); - let value = conn.hget(key, "id")?; + let value = conn.hget(key, "id").await?; Ok(value) } /// Set the site ID for a site slug. - pub fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { - let mut conn = self.client.get_connection()?; + pub async fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { + let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_slug:{site_slug}"); - conn.hset::<_, _, _, ()>(key, "id", site_id)?; + conn.hset::<_, _, _, ()>(key, "id", site_id).await?; Ok(()) } /// Retrieve the site slug and ID from a custom domain from the cache. - pub fn get_site_domain(&self, domain: &str) -> Result> { - let mut conn = self.client.get_connection()?; + pub async fn get_site_domain(&self, domain: &str) -> Result> { + let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_domain:{domain}"); - let value = conn.hget(key, &["id", "slug"])?; + let value = conn.hget(key, &["id", "slug"]).await?; Ok(value) } /// Set the site slug and ID for a custom domain. - pub fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { - let mut conn = self.client.get_connection()?; + pub async fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { + let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_domain:{domain}"); - conn.hset::<_, _, _, ()>(&key, "id", site_id)?; - conn.hset::<_, _, _, ()>(&key, "slug", site_slug)?; + conn.hset::<_, _, _, ()>(&key, "id", site_id).await?; + conn.hset::<_, _, _, ()>(&key, "slug", site_slug).await?; Ok(()) } } diff --git a/wws/src/state.rs b/wws/src/state.rs index c32eca56dd..eb8fc9c6c7 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -73,12 +73,12 @@ impl ServerStateInner { // if not present, get it from DEEPWELL and populate it". pub async fn get_site_slug(&self, site_slug: &str) -> Result> { - match self.cache.get_site_slug(site_slug)? { + match self.cache.get_site_slug(site_slug).await? { Some(site_id) => Ok(Some(site_id)), None => match self.deepwell.get_site_from_slug(site_slug).await? { None => Ok(None), Some(SiteData { site_id, .. }) => { - self.cache.set_site_slug(site_slug, site_id)?; + self.cache.set_site_slug(site_slug, site_id).await?; Ok(Some(site_id)) } }, @@ -86,7 +86,7 @@ impl ServerStateInner { } pub async fn get_site_domain(&self, site_domain: &str) -> Result> { - match self.cache.get_site_domain(site_domain)? { + match self.cache.get_site_domain(site_domain).await? { Some((site_id, site_slug)) => Ok(Some((site_id, site_slug))), None => match self.deepwell.get_site_from_domain(site_domain).await? { None => Ok(None), @@ -96,7 +96,8 @@ impl ServerStateInner { .. }) => { self.cache - .set_site_domain(site_domain, site_id, &site_slug)?; + .set_site_domain(site_domain, site_id, &site_slug) + .await?; Ok(Some((site_id, site_slug))) } From b6f593b07623bea4cb4c3b67c722cac326401f36 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 17 Jan 2025 04:13:09 -0500 Subject: [PATCH 089/306] Return s3_hash in GetFileOutput. --- deepwell/src/endpoints/file.rs | 1 + deepwell/src/endpoints/page.rs | 3 ++- deepwell/src/services/file/structs.rs | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/deepwell/src/endpoints/file.rs b/deepwell/src/endpoints/file.rs index 82e1cf1666..ba9b166b65 100644 --- a/deepwell/src/endpoints/file.rs +++ b/deepwell/src/endpoints/file.rs @@ -165,6 +165,7 @@ async fn build_file_response( data: data.map(Bytes::from), mime: revision.mime, size: revision.size, + s3_hash: Bytes::from(revision.s3_hash), licensing: revision.licensing, revision_comments: revision.comments, hidden_fields: revision.hidden, diff --git a/deepwell/src/endpoints/page.rs b/deepwell/src/endpoints/page.rs index 7ee28d65c4..7e13458b20 100644 --- a/deepwell/src/endpoints/page.rs +++ b/deepwell/src/endpoints/page.rs @@ -29,7 +29,7 @@ use crate::services::page::{ MovePageOutput, RestorePage, RestorePageOutput, RollbackPage, SetPageLayout, }; use crate::services::{Result, TextService}; -use crate::types::{FileOrder, PageDetails, Reference}; +use crate::types::{Bytes, FileOrder, PageDetails, Reference}; use futures::future::try_join_all; pub async fn page_create( @@ -345,6 +345,7 @@ async fn build_page_file_output( data: None, mime: revision.mime, size: revision.size, + s3_hash: Bytes::from(revision.s3_hash), licensing: revision.licensing, revision_comments: revision.comments, hidden_fields: revision.hidden, diff --git a/deepwell/src/services/file/structs.rs b/deepwell/src/services/file/structs.rs index 941c90547d..6da20e5f28 100644 --- a/deepwell/src/services/file/structs.rs +++ b/deepwell/src/services/file/structs.rs @@ -89,6 +89,7 @@ pub struct GetFileOutput { pub data: Option>, pub mime: String, pub size: i64, + pub s3_hash: Bytes<'static>, pub licensing: JsonValue, pub revision_comments: String, pub hidden_fields: Vec, From 11662d1611aa9e9fe0a2fac1f3977e73fc57d779 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Fri, 17 Jan 2025 04:28:55 -0500 Subject: [PATCH 090/306] Move files redirects to top of module. --- wws/src/handler/code.rs | 10 +++++----- wws/src/handler/file.rs | 10 +++++----- wws/src/handler/html.rs | 10 +++++----- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/wws/src/handler/code.rs b/wws/src/handler/code.rs index 732c552f0e..847d70e982 100644 --- a/wws/src/handler/code.rs +++ b/wws/src/handler/code.rs @@ -24,6 +24,11 @@ use axum::{ response::{Html, Redirect}, }; +pub async fn handle_code_redirect(Path((page_slug, index)): Path<(String, String)>) -> Redirect { + let destination = format!("/-/code/{page_slug}/{index}"); + Redirect::permanent(&destination) +} + pub async fn handle_code_block( State(state): State, Path((page_slug, index)): Path<(String, String)>, @@ -37,8 +42,3 @@ pub async fn handle_code_block( // TODO todo!() } - -pub async fn handle_code_redirect(Path((page_slug, index)): Path<(String, String)>) -> Redirect { - let destination = format!("/-/code/{page_slug}/{index}"); - Redirect::permanent(&destination) -} diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 6ecefbabd5..94a074352b 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -25,6 +25,11 @@ use axum::{ }; use axum_extra::response::Attachment; +pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { + let destination = format!("/-/file/{page_slug}/{filename}"); + Redirect::permanent(&destination) +} + pub async fn handle_file_fetch( State(state): State, Path((page_slug, filename)): Path<(String, String)>, @@ -52,8 +57,3 @@ pub async fn handle_file_download( // TODO Attachment todo!() } - -pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { - let destination = format!("/-/file/{page_slug}/{filename}"); - Redirect::permanent(&destination) -} diff --git a/wws/src/handler/html.rs b/wws/src/handler/html.rs index 2136b65281..270aabf03b 100644 --- a/wws/src/handler/html.rs +++ b/wws/src/handler/html.rs @@ -24,6 +24,11 @@ use axum::{ response::{Html, Redirect}, }; +pub async fn handle_html_redirect(Path((page_slug, id)): Path<(String, String)>) -> Redirect { + let destination = format!("/-/html/{page_slug}/{id}"); + Redirect::permanent(&destination) +} + pub async fn handle_html_block( State(state): State, Path((page_slug, id)): Path<(String, String)>, @@ -31,8 +36,3 @@ pub async fn handle_html_block( // TODO todo!() } - -pub async fn handle_html_redirect(Path((page_slug, id)): Path<(String, String)>) -> Redirect { - let destination = format!("/-/html/{page_slug}/{id}"); - Redirect::permanent(&destination) -} From 2ed7d8e9b5304891e96f170c9e8f4ad13147f2a0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 19:36:52 -0500 Subject: [PATCH 091/306] Add module comment about Redis. --- wws/src/cache.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index d69286c6db..ee506ade93 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -18,6 +18,11 @@ * along with this program. If not, see . */ +//! Manages cached data in Redis. +//! +//! Whenever you make changes to this module, make sure that the code is +//! compatible with DEEPWELL's Redis code. + use crate::error::Result; use redis::AsyncCommands; From 7981bec59647c938afab0343706e887635ea26be Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 20:42:05 -0500 Subject: [PATCH 092/306] Use header constant from http crate. --- wws/src/handler/misc.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index 0d01932c69..a4c9c32ef8 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -18,12 +18,12 @@ * along with this program. If not, see . */ -use axum::{body::Body, http::status::StatusCode, response::Response}; +use axum::{body::Body, http::{status::StatusCode, header}, response::Response}; pub async fn handle_teapot() -> Response { Response::builder() .status(StatusCode::IM_A_TEAPOT) - .header("Content-Type", "text/html; charset=utf-8") + .header(header::CONTENT_TYPE, "text/html; charset=utf-8") .body(Body::from("🫖")) .expect("Unable to convert response data") } From c73d61fa3cb9606e711b7327284f896e3996505e Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 20:43:22 -0500 Subject: [PATCH 093/306] Add initial fetch for files. --- wws/Cargo.lock | 44 +++++++++++++++++++++++++++++++++ wws/Cargo.toml | 1 + wws/src/cache.rs | 14 +++++++++++ wws/src/deepwell.rs | 54 +++++++++++++++++++++++++++++++++++++++++ wws/src/handler/file.rs | 14 ++++++++--- wws/src/state.rs | 21 +++++++++++++++- 6 files changed, 144 insertions(+), 4 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 72276bc513..67a689060a 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -1250,6 +1250,12 @@ version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" +[[package]] +name = "maplit" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" + [[package]] name = "matchers" version = "0.1.0" @@ -2176,6 +2182,21 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinyvec" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.42.0" @@ -2418,6 +2439,15 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + [[package]] name = "untrusted" version = "0.9.0" @@ -2505,6 +2535,19 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "wikidot-normalize" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835a5e9b9a5ab3872ec69ec2b2eb33e096c288d5a4627e601012ab86249281f1" +dependencies = [ + "maplit", + "once_cell", + "regex", + "trim-in-place", + "unicode-normalization", +] + [[package]] name = "winapi" version = "0.3.9" @@ -2654,6 +2697,7 @@ dependencies = [ "tower-http", "tracing", "tracing-subscriber", + "wikidot-normalize", ] [[package]] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 41b0cd2342..92ef0e50bb 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -32,6 +32,7 @@ tower = "0.5" tower-http = { version = "0.6.1", features = ["add-extension", "compression-br", "compression-deflate", "compression-gzip", "compression-zstd", "normalize-path", "set-header", "trace"] } tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } +wikidot-normalize = "0.12" [build-dependencies] built = { version = "0.7", features = ["git2"] } diff --git a/wws/src/cache.rs b/wws/src/cache.rs index ee506ade93..6d3c4f45ee 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -70,4 +70,18 @@ impl Cache { conn.hset::<_, _, _, ()>(&key, "slug", site_slug).await?; Ok(()) } + + pub async fn get_page_slug(&self, site_id: i64, page_slug: &str) -> Result> { + let mut conn = self.client.get_multiplexed_async_connection().await?; + let key = format!("page_slug:{site_id}:{page_slug}"); + let value = conn.hget(key, "id").await?; + Ok(value) + } + + pub async fn set_page_slug(&self, site_id: i64, page_slug: &str, page_id: i64) -> Result<()> { + let mut conn = self.client.get_multiplexed_async_connection().await?; + let key = format!("page_slug:{site_id}:{page_slug}"); + conn.hset::<_, _, _, ()>(&key, "id", page_id).await?; + Ok(()) + } } diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 0259b26762..add59a3ab0 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -133,6 +133,38 @@ impl Deepwell { Ok(site_data) } + + pub async fn get_page_metadata(&self, site_id: i64, page_slug: &str) -> Result> { + let params = rpc_object! { + "site_id" => site_id, + "page" => page_slug, + "wikitext" => false, + "compiled" => false, + }; + + let page_data: Option = self + .client + .request("page_get", params) + .await?; + + Ok(page_data) + } + + pub async fn get_file_metadata(&self, site_id: i64, page_id: i64, filename: &str) -> Result> { + let params = rpc_object! { + "site_id" => site_id, + "page_id" => page_id, + "file" => filename, + "data" => false, + }; + + let file_data: Option = self + .client + .request("file_get", params) + .await?; + + Ok(file_data) + } } #[derive(Debug, Clone)] @@ -151,3 +183,25 @@ pub struct SiteData { pub name: String, pub custom_domain: Option, } + +#[derive(Deserialize, Debug, Clone)] +pub struct PageData { + pub page_id: i64, + pub page_revision_count: i32, + pub page_category_id: i64, + pub page_category_slug: String, + pub revision_id: i64, + pub revision_number: i32, + pub revision_user_id: i64, + pub revision_comments: String, + pub title: String, + pub alt_title: Option, + pub slug: String, + pub hidden_fields: Vec, +} + +#[derive(Deserialize, Debug, Clone)] +pub struct FileData { + pub file_id: i64, + pub s3_hash: Vec, +} diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 94a074352b..5a462d4e14 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -18,12 +18,13 @@ * along with this program. If not, see . */ -use crate::state::ServerState; +use crate::{error::Result, state::ServerState}; use axum::{ extract::{Path, State}, response::{Html, Redirect}, }; use axum_extra::response::Attachment; +use wikidot_normalize::normalize; pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { let destination = format!("/-/file/{page_slug}/{filename}"); @@ -32,7 +33,7 @@ pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, Str pub async fn handle_file_fetch( State(state): State, - Path((page_slug, filename)): Path<(String, String)>, + Path((mut page_slug, filename)): Path<(String, String)>, ) -> Html<&'static str> { info!( page_slug = page_slug, @@ -46,7 +47,7 @@ pub async fn handle_file_fetch( pub async fn handle_file_download( State(state): State, - Path((page_slug, filename)): Path<(String, String)>, + Path((mut page_slug, filename)): Path<(String, String)>, ) -> Html<&'static str> { info!( page_slug = page_slug, @@ -57,3 +58,10 @@ pub async fn handle_file_download( // TODO Attachment todo!() } + +async fn get_file(state: &ServerState, site_id: i64, page_slug: &mut String, filename: &str) -> Result { + normalize(page_slug); + + let page_id = state.get_page_slug(site_id, &page_slug).await?; + todo!() +} diff --git a/wws/src/state.rs b/wws/src/state.rs index eb8fc9c6c7..22f0bec512 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,7 +21,7 @@ use crate::{ cache::Cache, config::Secrets, - deepwell::{Deepwell, Domains, SiteData}, + deepwell::{Deepwell, Domains, PageData, SiteData}, error::Result, }; use s3::bucket::Bucket; @@ -104,4 +104,23 @@ impl ServerStateInner { }, } } + + pub async fn get_page_slug(&self, site_id: i64, page_slug: &str) -> Result> { + match self.cache.get_page_slug(site_id, page_slug).await? { + Some(page_id) =>Ok(Some(page_id)), + None => match self.deepwell.get_page_metadata(site_id, page_slug).await? { + None => Ok(None), + Some(PageData { + page_id, + .. + }) => { + self.cache + .set_page_slug(site_id, page_slug, page_id) + .await?; + + Ok(Some(page_id)) + } + } + } + } } From 5941b8df9ab20fa6a4bad136e37bcc3735ab19ea Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 20:55:07 -0500 Subject: [PATCH 094/306] Remove some fields in PageData. --- wws/src/deepwell.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index add59a3ab0..39066f9fcf 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -187,16 +187,8 @@ pub struct SiteData { #[derive(Deserialize, Debug, Clone)] pub struct PageData { pub page_id: i64, - pub page_revision_count: i32, - pub page_category_id: i64, - pub page_category_slug: String, - pub revision_id: i64, - pub revision_number: i32, - pub revision_user_id: i64, - pub revision_comments: String, pub title: String, pub alt_title: Option, - pub slug: String, pub hidden_fields: Vec, } From 0ee4fcc29798429b5ff49960a8e786b4d66e7905 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 21:39:49 -0500 Subject: [PATCH 095/306] Add page and file methods. --- wws/src/cache.rs | 33 ++++++++++++++++++++++++++++----- wws/src/deepwell.rs | 24 +++++++++++++----------- wws/src/handler/file.rs | 18 ++++++++++++++++-- wws/src/handler/misc.rs | 6 +++++- wws/src/state.rs | 36 +++++++++++++++++++++++++++++------- 5 files changed, 91 insertions(+), 26 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 6d3c4f45ee..c349c7de09 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -23,7 +23,7 @@ //! Whenever you make changes to this module, make sure that the code is //! compatible with DEEPWELL's Redis code. -use crate::error::Result; +use crate::{deepwell::FileData, error::Result}; use redis::AsyncCommands; #[derive(Debug)] @@ -38,7 +38,6 @@ impl Cache { Ok(Cache { client }) } - /// Retrieve the site ID from the slug from the cache. pub async fn get_site_slug(&self, site_slug: &str) -> Result> { let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_slug:{site_slug}"); @@ -46,7 +45,6 @@ impl Cache { Ok(value) } - /// Set the site ID for a site slug. pub async fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_slug:{site_slug}"); @@ -54,7 +52,6 @@ impl Cache { Ok(()) } - /// Retrieve the site slug and ID from a custom domain from the cache. pub async fn get_site_domain(&self, domain: &str) -> Result> { let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_domain:{domain}"); @@ -62,7 +59,6 @@ impl Cache { Ok(value) } - /// Set the site slug and ID for a custom domain. pub async fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { let mut conn = self.client.get_multiplexed_async_connection().await?; let key = format!("site_domain:{domain}"); @@ -84,4 +80,31 @@ impl Cache { conn.hset::<_, _, _, ()>(&key, "id", page_id).await?; Ok(()) } + + pub async fn get_file_name( + &self, + site_id: i64, + page_id: i64, + filename: &str, + ) -> Result> { + let mut conn = self.client.get_multiplexed_async_connection().await?; + let key = format!("file_name:{site_id}:{page_id}:{filename}"); + let (file_id, s3_hash) = conn.hget(key, &["id", "s3_hash"]).await?; + Ok(Some(FileData { file_id, s3_hash })) + } + + pub async fn set_file_name( + &self, + site_id: i64, + page_id: i64, + filename: &str, + data: &FileData, + ) -> Result<()> { + let mut conn = self.client.get_multiplexed_async_connection().await?; + let key = format!("file_name:{site_id}:{page_id}:{filename}"); + conn.hset::<_, _, _, ()>(&key, "id", data.file_id).await?; + conn.hset::<_, _, _, ()>(&key, "s3_hash", &data.s3_hash) + .await?; + Ok(()) + } } diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 39066f9fcf..f90213767f 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -134,7 +134,11 @@ impl Deepwell { Ok(site_data) } - pub async fn get_page_metadata(&self, site_id: i64, page_slug: &str) -> Result> { + pub async fn get_page_metadata( + &self, + site_id: i64, + page_slug: &str, + ) -> Result> { let params = rpc_object! { "site_id" => site_id, "page" => page_slug, @@ -142,15 +146,17 @@ impl Deepwell { "compiled" => false, }; - let page_data: Option = self - .client - .request("page_get", params) - .await?; + let page_data: Option = self.client.request("page_get", params).await?; Ok(page_data) } - pub async fn get_file_metadata(&self, site_id: i64, page_id: i64, filename: &str) -> Result> { + pub async fn get_file_metadata( + &self, + site_id: i64, + page_id: i64, + filename: &str, + ) -> Result> { let params = rpc_object! { "site_id" => site_id, "page_id" => page_id, @@ -158,11 +164,7 @@ impl Deepwell { "data" => false, }; - let file_data: Option = self - .client - .request("file_get", params) - .await?; - + let file_data: Option = self.client.request("file_get", params).await?; Ok(file_data) } } diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 5a462d4e14..208eff9cb1 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -59,9 +59,23 @@ pub async fn handle_file_download( todo!() } -async fn get_file(state: &ServerState, site_id: i64, page_slug: &mut String, filename: &str) -> Result { +async fn get_file( + state: &ServerState, + site_id: i64, + page_slug: &mut String, + filename: &str, +) -> Result> { normalize(page_slug); - let page_id = state.get_page_slug(site_id, &page_slug).await?; + let page_id = match state.get_page_slug(site_id, &page_slug).await? { + Some(page_id) => page_id, + None => return Ok(None), + }; + + let file_info = match state.get_file_name(site_id, page_id, filename).await? { + Some(file_info) => file_info, + None => return Ok(None), + }; + todo!() } diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index a4c9c32ef8..ed98996b1b 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -18,7 +18,11 @@ * along with this program. If not, see . */ -use axum::{body::Body, http::{status::StatusCode, header}, response::Response}; +use axum::{ + body::Body, + http::{header, status::StatusCode}, + response::Response, +}; pub async fn handle_teapot() -> Response { Response::builder() diff --git a/wws/src/state.rs b/wws/src/state.rs index 22f0bec512..7b977e2cc3 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,7 +21,7 @@ use crate::{ cache::Cache, config::Secrets, - deepwell::{Deepwell, Domains, PageData, SiteData}, + deepwell::{Deepwell, Domains, FileData, PageData, SiteData}, error::Result, }; use s3::bucket::Bucket; @@ -107,20 +107,42 @@ impl ServerStateInner { pub async fn get_page_slug(&self, site_id: i64, page_slug: &str) -> Result> { match self.cache.get_page_slug(site_id, page_slug).await? { - Some(page_id) =>Ok(Some(page_id)), + Some(page_id) => Ok(Some(page_id)), None => match self.deepwell.get_page_metadata(site_id, page_slug).await? { None => Ok(None), - Some(PageData { - page_id, - .. - }) => { + Some(PageData { page_id, .. }) => { self.cache .set_page_slug(site_id, page_slug, page_id) .await?; Ok(Some(page_id)) } - } + }, + } + } + + pub async fn get_file_name( + &self, + site_id: i64, + page_id: i64, + filename: &str, + ) -> Result> { + match self.cache.get_file_name(site_id, page_id, filename).await? { + Some(data) => Ok(Some(data)), + None => match self + .deepwell + .get_file_metadata(site_id, page_id, filename) + .await? + { + None => Ok(None), + Some(data) => { + self.cache + .set_file_name(site_id, page_id, filename, &data) + .await?; + + Ok(Some(data)) + } + }, } } } From 3a06067c52859949e0495624e2822075485d9d5c Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 21:45:35 -0500 Subject: [PATCH 096/306] Use macros to avoid Redis boilerplate. --- wws/src/cache.rs | 41 ++++++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index c349c7de09..a8d984f6ba 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -26,6 +26,18 @@ use crate::{deepwell::FileData, error::Result}; use redis::AsyncCommands; +macro_rules! get_connection { + ($client:expr) => { + $client.get_multiplexed_async_connection().await? + }; +} + +macro_rules! hset { + ($conn:expr, $key:expr, $field:expr, $value:expr $(,)?) => { + $conn.hset::<_, _, _, ()>(&$key, $field, $value).await? + }; +} + #[derive(Debug)] pub struct Cache { client: redis::Client, @@ -39,45 +51,45 @@ impl Cache { } pub async fn get_site_slug(&self, site_slug: &str) -> Result> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("site_slug:{site_slug}"); let value = conn.hget(key, "id").await?; Ok(value) } pub async fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("site_slug:{site_slug}"); - conn.hset::<_, _, _, ()>(key, "id", site_id).await?; + hset!(conn, key, "id", site_id); Ok(()) } pub async fn get_site_domain(&self, domain: &str) -> Result> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); let value = conn.hget(key, &["id", "slug"]).await?; Ok(value) } pub async fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); - conn.hset::<_, _, _, ()>(&key, "id", site_id).await?; - conn.hset::<_, _, _, ()>(&key, "slug", site_slug).await?; + hset!(conn, key, "id", site_id); + hset!(conn, key, "slug", site_slug); Ok(()) } pub async fn get_page_slug(&self, site_id: i64, page_slug: &str) -> Result> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("page_slug:{site_id}:{page_slug}"); let value = conn.hget(key, "id").await?; Ok(value) } pub async fn set_page_slug(&self, site_id: i64, page_slug: &str, page_id: i64) -> Result<()> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("page_slug:{site_id}:{page_slug}"); - conn.hset::<_, _, _, ()>(&key, "id", page_id).await?; + hset!(conn, key, "id", page_id); Ok(()) } @@ -87,7 +99,7 @@ impl Cache { page_id: i64, filename: &str, ) -> Result> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("file_name:{site_id}:{page_id}:{filename}"); let (file_id, s3_hash) = conn.hget(key, &["id", "s3_hash"]).await?; Ok(Some(FileData { file_id, s3_hash })) @@ -100,11 +112,10 @@ impl Cache { filename: &str, data: &FileData, ) -> Result<()> { - let mut conn = self.client.get_multiplexed_async_connection().await?; + let mut conn = get_connection!(self.client); let key = format!("file_name:{site_id}:{page_id}:{filename}"); - conn.hset::<_, _, _, ()>(&key, "id", data.file_id).await?; - conn.hset::<_, _, _, ()>(&key, "s3_hash", &data.s3_hash) - .await?; + hset!(conn, key, "id", data.file_id); + hset!(conn, key, "s3_hash", &data.s3_hash); Ok(()) } } From 2f44065fe6e2a1271d9ac5d196f60647ea3e2a74 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 21:53:15 -0500 Subject: [PATCH 097/306] Rename fetch methods for name clarity. --- wws/src/cache.rs | 21 +++++++++++++-------- wws/src/deepwell.rs | 8 ++------ wws/src/handler/file.rs | 4 ++-- wws/src/host.rs | 6 +++--- wws/src/state.rs | 35 ++++++++++++++--------------------- 5 files changed, 34 insertions(+), 40 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index a8d984f6ba..99081e92d2 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -50,28 +50,33 @@ impl Cache { Ok(Cache { client }) } - pub async fn get_site_slug(&self, site_slug: &str) -> Result> { + pub async fn get_site_from_slug(&self, site_slug: &str) -> Result> { let mut conn = get_connection!(self.client); let key = format!("site_slug:{site_slug}"); let value = conn.hget(key, "id").await?; Ok(value) } - pub async fn set_site_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { + pub async fn set_site_from_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { let mut conn = get_connection!(self.client); let key = format!("site_slug:{site_slug}"); hset!(conn, key, "id", site_id); Ok(()) } - pub async fn get_site_domain(&self, domain: &str) -> Result> { + pub async fn get_site_from_domain(&self, domain: &str) -> Result> { let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); let value = conn.hget(key, &["id", "slug"]).await?; Ok(value) } - pub async fn set_site_domain(&self, domain: &str, site_id: i64, site_slug: &str) -> Result<()> { + pub async fn set_site_from_domain( + &self, + domain: &str, + site_id: i64, + site_slug: &str, + ) -> Result<()> { let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); hset!(conn, key, "id", site_id); @@ -79,21 +84,21 @@ impl Cache { Ok(()) } - pub async fn get_page_slug(&self, site_id: i64, page_slug: &str) -> Result> { + pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result> { let mut conn = get_connection!(self.client); let key = format!("page_slug:{site_id}:{page_slug}"); let value = conn.hget(key, "id").await?; Ok(value) } - pub async fn set_page_slug(&self, site_id: i64, page_slug: &str, page_id: i64) -> Result<()> { + pub async fn set_page(&self, site_id: i64, page_slug: &str, page_id: i64) -> Result<()> { let mut conn = get_connection!(self.client); let key = format!("page_slug:{site_id}:{page_slug}"); hset!(conn, key, "id", page_id); Ok(()) } - pub async fn get_file_name( + pub async fn get_file( &self, site_id: i64, page_id: i64, @@ -105,7 +110,7 @@ impl Cache { Ok(Some(FileData { file_id, s3_hash })) } - pub async fn set_file_name( + pub async fn set_file( &self, site_id: i64, page_id: i64, diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index f90213767f..ab558efe75 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -134,11 +134,7 @@ impl Deepwell { Ok(site_data) } - pub async fn get_page_metadata( - &self, - site_id: i64, - page_slug: &str, - ) -> Result> { + pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result> { let params = rpc_object! { "site_id" => site_id, "page" => page_slug, @@ -151,7 +147,7 @@ impl Deepwell { Ok(page_data) } - pub async fn get_file_metadata( + pub async fn get_file( &self, site_id: i64, page_id: i64, diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 208eff9cb1..57914f9a94 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -67,12 +67,12 @@ async fn get_file( ) -> Result> { normalize(page_slug); - let page_id = match state.get_page_slug(site_id, &page_slug).await? { + let page_id = match state.get_page(site_id, &page_slug).await? { Some(page_id) => page_id, None => return Ok(None), }; - let file_info = match state.get_file_name(site_id, page_id, filename).await? { + let file_info = match state.get_file(site_id, page_id, filename).await? { Some(file_info) => file_info, None => return Ok(None), }; diff --git a/wws/src/host.rs b/wws/src/host.rs index 66a59f494d..bb02a4cc7b 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -59,7 +59,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result { // Site exists @@ -98,7 +98,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result { // Site exists info!( @@ -136,7 +136,7 @@ async fn main_site_slug<'a>( }; // Return site present or missing response based on site ID. - let site_id = state.get_site_slug(site_slug).await?; + let site_id = state.get_site_from_slug(site_slug).await?; match site_id { Some(site_id) => { // Site exists diff --git a/wws/src/state.rs b/wws/src/state.rs index 7b977e2cc3..3ff74c44f6 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -72,21 +72,21 @@ impl ServerStateInner { // Contains implementations for the common pattern of "check the cache, // if not present, get it from DEEPWELL and populate it". - pub async fn get_site_slug(&self, site_slug: &str) -> Result> { - match self.cache.get_site_slug(site_slug).await? { + pub async fn get_site_from_slug(&self, site_slug: &str) -> Result> { + match self.cache.get_site_from_slug(site_slug).await? { Some(site_id) => Ok(Some(site_id)), None => match self.deepwell.get_site_from_slug(site_slug).await? { None => Ok(None), Some(SiteData { site_id, .. }) => { - self.cache.set_site_slug(site_slug, site_id).await?; + self.cache.set_site_from_slug(site_slug, site_id).await?; Ok(Some(site_id)) } }, } } - pub async fn get_site_domain(&self, site_domain: &str) -> Result> { - match self.cache.get_site_domain(site_domain).await? { + pub async fn get_site_from_domain(&self, site_domain: &str) -> Result> { + match self.cache.get_site_from_domain(site_domain).await? { Some((site_id, site_slug)) => Ok(Some((site_id, site_slug))), None => match self.deepwell.get_site_from_domain(site_domain).await? { None => Ok(None), @@ -96,7 +96,7 @@ impl ServerStateInner { .. }) => { self.cache - .set_site_domain(site_domain, site_id, &site_slug) + .set_site_from_domain(site_domain, site_id, &site_slug) .await?; Ok(Some((site_id, site_slug))) @@ -105,39 +105,32 @@ impl ServerStateInner { } } - pub async fn get_page_slug(&self, site_id: i64, page_slug: &str) -> Result> { - match self.cache.get_page_slug(site_id, page_slug).await? { + pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result> { + match self.cache.get_page(site_id, page_slug).await? { Some(page_id) => Ok(Some(page_id)), - None => match self.deepwell.get_page_metadata(site_id, page_slug).await? { + None => match self.deepwell.get_page(site_id, page_slug).await? { None => Ok(None), Some(PageData { page_id, .. }) => { - self.cache - .set_page_slug(site_id, page_slug, page_id) - .await?; - + self.cache.set_page(site_id, page_slug, page_id).await?; Ok(Some(page_id)) } }, } } - pub async fn get_file_name( + pub async fn get_file( &self, site_id: i64, page_id: i64, filename: &str, ) -> Result> { - match self.cache.get_file_name(site_id, page_id, filename).await? { + match self.cache.get_file(site_id, page_id, filename).await? { Some(data) => Ok(Some(data)), - None => match self - .deepwell - .get_file_metadata(site_id, page_id, filename) - .await? - { + None => match self.deepwell.get_file(site_id, page_id, filename).await? { None => Ok(None), Some(data) => { self.cache - .set_file_name(site_id, page_id, filename, &data) + .set_file(site_id, page_id, filename, &data) .await?; Ok(Some(data)) From 568cfda1cf079230c421a88674191c458ae91fd8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 18 Jan 2025 22:45:51 -0500 Subject: [PATCH 098/306] Get hexadecimal hash for s3_hash. --- wws/src/deepwell.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index ab558efe75..7d99824f35 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -193,5 +193,5 @@ pub struct PageData { #[derive(Deserialize, Debug, Clone)] pub struct FileData { pub file_id: i64, - pub s3_hash: Vec, + pub s3_hash: String, } From ba227b6ce018428cda0027c614e195e84487f6da Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 19 Jan 2025 00:36:31 -0500 Subject: [PATCH 099/306] Modify Redis return type to optional. --- wws/src/cache.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 99081e92d2..82d2c5aaa1 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -104,10 +104,15 @@ impl Cache { page_id: i64, filename: &str, ) -> Result> { + type FileDataTuple = Option<(i64, String, i64, String)>; + let mut conn = get_connection!(self.client); let key = format!("file_name:{site_id}:{page_id}:{filename}"); - let (file_id, s3_hash) = conn.hget(key, &["id", "s3_hash"]).await?; - Ok(Some(FileData { file_id, s3_hash })) + let data = conn.hget::<_, _, FileDataTuple>(key, &["id", "mime", "size", "s3_hash"]) + .await? + .map(|(file_id, mime, size, s3_hash)| FileData { file_id, mime, size, s3_hash }); + + Ok(data) } pub async fn set_file( From f6439506ab40b26a7ce305bc3e19719aed9b5ae2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 19 Jan 2025 00:47:15 -0500 Subject: [PATCH 100/306] Add mime and size fields to redis/deepwell. --- wws/src/cache.rs | 2 ++ wws/src/deepwell.rs | 2 ++ 2 files changed, 4 insertions(+) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 82d2c5aaa1..5602b5cbaf 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -125,6 +125,8 @@ impl Cache { let mut conn = get_connection!(self.client); let key = format!("file_name:{site_id}:{page_id}:{filename}"); hset!(conn, key, "id", data.file_id); + hset!(conn, key, "mime", &data.mime); + hset!(conn, key, "size", data.size); hset!(conn, key, "s3_hash", &data.s3_hash); Ok(()) } diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 7d99824f35..711737c8d4 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -193,5 +193,7 @@ pub struct PageData { #[derive(Deserialize, Debug, Clone)] pub struct FileData { pub file_id: i64, + pub mime: String, + pub size: i64, pub s3_hash: String, } From 579d9176aa54ed6126c9e09dc90230dc57f39ca5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 19 Jan 2025 00:48:12 -0500 Subject: [PATCH 101/306] Fix redis retrieval. The dynamic type thing doesn't know how to wrap everything in an Option, it does it for each field. So we read that out and then delete stuff if there's inconsistencies. --- wws/src/cache.rs | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 5602b5cbaf..c43467afe5 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -38,6 +38,12 @@ macro_rules! hset { }; } +macro_rules! hdel { + ($conn:expr, $key:expr, $field:expr $(,)?) => { + $conn.hdel::<_, _, ()>(&$key, $field).await? + }; +} + #[derive(Debug)] pub struct Cache { client: redis::Client, @@ -104,15 +110,27 @@ impl Cache { page_id: i64, filename: &str, ) -> Result> { - type FileDataTuple = Option<(i64, String, i64, String)>; + type FileDataTuple = (Option, Option, Option, Option); let mut conn = get_connection!(self.client); let key = format!("file_name:{site_id}:{page_id}:{filename}"); - let data = conn.hget::<_, _, FileDataTuple>(key, &["id", "mime", "size", "s3_hash"]) - .await? - .map(|(file_id, mime, size, s3_hash)| FileData { file_id, mime, size, s3_hash }); - - Ok(data) + let fields = &["id", "mime", "size", "s3_hash"]; + let values = conn.hget::<_, _, FileDataTuple>(&key, fields).await?; + match values { + // Ideally, all of these should be non-null, if it's a cache hit. + (Some(file_id), Some(mime), Some(size), Some(s3_hash)) => { + Ok(Some(FileData { file_id, mime, size, s3_hash })) + } + + // Cache miss + (None, None, None, None) => Ok(None), + + // Some fields are set and others aren't. Let's clear all them out. + _ => { + hdel!(conn, key, fields); + Ok(None) + } + } } pub async fn set_file( From 82dbef1eb667ec96726b7a8866f247d37d9da828 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sun, 19 Jan 2025 00:49:15 -0500 Subject: [PATCH 102/306] Add warning if invalid cache code path is hit. --- wws/src/cache.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index c43467afe5..581004be89 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -127,6 +127,7 @@ impl Cache { // Some fields are set and others aren't. Let's clear all them out. _ => { + warn!(key = key, "Inconsistent cache data, deleting"); hdel!(conn, key, fields); Ok(None) } From f8a1f45fd850696da2a066fac4116ac5ae16ce93 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 20 Jan 2025 23:42:13 -0500 Subject: [PATCH 103/306] Add file redirect routes. And comments. --- wws/src/route.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/wws/src/route.rs b/wws/src/route.rs index 06326140ba..fd02495722 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -43,6 +43,7 @@ pub fn build_router(state: ServerState) -> Router { .route("/local--files/{*rest}", any(redirect_to_files)) .route("/local--code/{*rest}", any(redirect_to_files)) .route("/local--html/{*rest}", any(redirect_to_files)) + .route("/-/files/{*rest}", any(redirect_to_files)) .route("/-/file/{*rest}", any(redirect_to_files)) .route("/-/download/{*rest}", any(redirect_to_files)) .route("/-/code/{*rest}", any(redirect_to_files)) @@ -52,6 +53,7 @@ pub fn build_router(state: ServerState) -> Router { // Router that serves wjfiles let files_router = Router::new() + // Wikidot routes .route( "/local--files/{page_slug}/{filename}", get(handle_file_redirect), @@ -61,6 +63,9 @@ pub fn build_router(state: ServerState) -> Router { any(handle_code_redirect), ) .route("/local--html/{page_slug}/{id}", any(handle_html_redirect)) + // Other redirects + .route("/-/files/{page_slug}/{filename}", any(handle_file_redirect)) + // Files .route("/-/file/{page_slug}/{filename}", get(handle_file_fetch)) .route("/-/file/{page_slug}/{filename}", any(handle_invalid_method)) .route( @@ -71,6 +76,7 @@ pub fn build_router(state: ServerState) -> Router { "/-/download/{page_slug}/{filename}", any(handle_invalid_method), ) + // Code and HTML .route("/-/code/{page_slug}/{index}", get(handle_code_block)) .route("/-/code/{page_slug}/{index}", any(handle_invalid_method)) .route("/-/html/{page_slug}/{id}", get(handle_html_block)) From 1f4d81d3b84d2ce7b092a87d0579e9fc32c9636a Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 20 Jan 2025 23:52:36 -0500 Subject: [PATCH 104/306] Download files via stream. We can avoid downloading the entire files right into memory! How sweet! Yay! --- wws/src/handler/file.rs | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 57914f9a94..98110ca8d5 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -18,10 +18,14 @@ * along with this program. If not, see . */ -use crate::{error::Result, state::ServerState}; +use crate::{deepwell::FileData, error::Result, state::ServerState}; +use super::get_site_info; use axum::{ + body::Body, extract::{Path, State}, - response::{Html, Redirect}, + http::StatusCode, + http::header::{self, HeaderMap}, + response::{Redirect, Response, IntoResponse}, }; use axum_extra::response::Attachment; use wikidot_normalize::normalize; @@ -64,7 +68,7 @@ async fn get_file( site_id: i64, page_slug: &mut String, filename: &str, -) -> Result> { +) -> Result> { normalize(page_slug); let page_id = match state.get_page(site_id, &page_slug).await? { @@ -77,5 +81,8 @@ async fn get_file( None => return Ok(None), }; - todo!() + let s3_result = state.s3_bucket.get_object_stream(&file_info.s3_hash).await?; + assert_eq!(s3_result.status_code, 200, "get_object_stream() succeeded but did not reply 200"); + let body = Body::from_stream(s3_result.bytes); + Ok(Some((file_info, body))) } From 620e9eb82537a7b8e030d0ddf6898bc77a4e9cce Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Mon, 20 Jan 2025 23:53:41 -0500 Subject: [PATCH 105/306] Use struct-deconstruct syntax. --- wws/src/handler/file.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 98110ca8d5..75b2346c50 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -28,6 +28,7 @@ use axum::{ response::{Redirect, Response, IntoResponse}, }; use axum_extra::response::Attachment; +use s3::request::request_trait::ResponseDataStream; use wikidot_normalize::normalize; pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { @@ -81,8 +82,8 @@ async fn get_file( None => return Ok(None), }; - let s3_result = state.s3_bucket.get_object_stream(&file_info.s3_hash).await?; - assert_eq!(s3_result.status_code, 200, "get_object_stream() succeeded but did not reply 200"); - let body = Body::from_stream(s3_result.bytes); + let ResponseDataStream { bytes, status_code } = state.s3_bucket.get_object_stream(&file_info.s3_hash).await?; + assert_eq!(status_code, 200, "get_object_stream() succeeded but did not reply 200"); + let body = Body::from_stream(bytes); Ok(Some((file_info, body))) } From 353c5c6abb319769187851d94507896b598f2de9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 00:49:16 -0500 Subject: [PATCH 106/306] Add incomplete file code for development. --- wws/src/handler/file.rs | 30 +++++++++++++++++++++++++----- wws/src/handler/mod.rs | 25 +++++++++++++++++++++++-- 2 files changed, 48 insertions(+), 7 deletions(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 75b2346c50..83f2cc96e8 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -39,29 +39,49 @@ pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, Str pub async fn handle_file_fetch( State(state): State, Path((mut page_slug, filename)): Path<(String, String)>, -) -> Html<&'static str> { + headers: HeaderMap, +) -> Response { info!( page_slug = page_slug, filename = filename, "Returning file data", ); + let (site_id, _) = get_site_info(&headers); // TODO - todo!() + let _result = get_file(&state, site_id, &mut page_slug, &filename).await; + let (metadata, data) = _result.expect("_TODO").expect("_TODO"); + + let result = Response::builder() + .header(header::CONTENT_TYPE, &metadata.mime) + .body(Body::from(data)); + + match result { + Ok(response) => response, + Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), + } } pub async fn handle_file_download( State(state): State, Path((mut page_slug, filename)): Path<(String, String)>, -) -> Html<&'static str> { + headers: HeaderMap, +) -> Response { info!( page_slug = page_slug, filename = filename, "Returning file download", ); - // TODO Attachment - todo!() + let (site_id, _) = get_site_info(&headers); + // TODO + let _result = get_file(&state, site_id, &mut page_slug, &filename).await; + let (metadata, data) = _result.expect("_TODO").expect("_TODO"); + + Attachment::new(data) + .filename(&filename) + .content_type(&metadata.mime) + .into_response() } async fn get_file( diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 16601fb6ac..16b72b4b8c 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -40,7 +40,7 @@ use crate::{ use axum::{ body::Body, extract::Request, - http::header::HeaderName, + http::header::{HeaderMap, HeaderName}, response::{IntoResponse, Redirect, Response}, Router, }; @@ -54,6 +54,25 @@ pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump") pub const HEADER_WWS_VERSION: HeaderName = HeaderName::from_static("x-wikijump-wws-ver"); pub const HEADER_DEEPWELL_VERSION: HeaderName = HeaderName::from_static("x-wikijump-deepwell-ver"); +/// Helper function to get the site ID and slug from headers. +fn get_site_info(headers: &HeaderMap) -> (i64, &str) { + let site_id = headers + .get(HEADER_SITE_ID) + .expect("No site ID header in request") + .to_str() + .expect("Site ID header is not UTF-8") + .parse() + .expect("Site ID is not a valid integer"); + + let site_slug = headers + .get(HEADER_SITE_SLUG) + .expect("No site slug header in request") + .to_str() + .expect("Site slug header is not UTF-8"); + + (site_id, site_slug) +} + /// Entry route handler to first process host information. /// /// Before we can give this request to the right place, @@ -114,7 +133,9 @@ pub async fn handle_host_delegation( Ok(host_data) => host_data, Err(error) => { // TODO error page response in case of an internal issue - todo!() + //todo!() +// TODO +SiteAndHost::File { site_id: 4, site_slug: "scp-wiki" } } }; From 99a528612ad711bc09f7401e61abb74d04efc9ca Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 00:57:38 -0500 Subject: [PATCH 107/306] Move helper function to top. Going to change it into a macro. --- wws/src/handler/file.rs | 49 +++++++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 24 deletions(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 83f2cc96e8..a4e32559c1 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -31,6 +31,31 @@ use axum_extra::response::Attachment; use s3::request::request_trait::ResponseDataStream; use wikidot_normalize::normalize; +/// Helper function to retrieve file data for returning via HTTP. +async fn get_file( + state: &ServerState, + site_id: i64, + page_slug: &mut String, + filename: &str, +) -> Result> { + normalize(page_slug); + + let page_id = match state.get_page(site_id, &page_slug).await? { + Some(page_id) => page_id, + None => return Ok(None), + }; + + let file_info = match state.get_file(site_id, page_id, filename).await? { + Some(file_info) => file_info, + None => return Ok(None), + }; + + let ResponseDataStream { bytes, status_code } = state.s3_bucket.get_object_stream(&file_info.s3_hash).await?; + assert_eq!(status_code, 200, "get_object_stream() succeeded but did not reply 200"); + let body = Body::from_stream(bytes); + Ok(Some((file_info, body))) +} + pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { let destination = format!("/-/file/{page_slug}/{filename}"); Redirect::permanent(&destination) @@ -83,27 +108,3 @@ pub async fn handle_file_download( .content_type(&metadata.mime) .into_response() } - -async fn get_file( - state: &ServerState, - site_id: i64, - page_slug: &mut String, - filename: &str, -) -> Result> { - normalize(page_slug); - - let page_id = match state.get_page(site_id, &page_slug).await? { - Some(page_id) => page_id, - None => return Ok(None), - }; - - let file_info = match state.get_file(site_id, page_id, filename).await? { - Some(file_info) => file_info, - None => return Ok(None), - }; - - let ResponseDataStream { bytes, status_code } = state.s3_bucket.get_object_stream(&file_info.s3_hash).await?; - assert_eq!(status_code, 200, "get_object_stream() succeeded but did not reply 200"); - let body = Body::from_stream(bytes); - Ok(Some((file_info, body))) -} From 925e680de1e3348f712b1a3f66966646102c7967 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 01:27:53 -0500 Subject: [PATCH 108/306] Convert the helper function to a macro. This eliminates all the .unwrap()s I added on a temporary basis. --- wws/src/cache.rs | 9 ++- wws/src/handler/file.rs | 132 ++++++++++++++++++++++++++++------------ wws/src/handler/mod.rs | 7 ++- 3 files changed, 104 insertions(+), 44 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 581004be89..895956cbbc 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -118,9 +118,12 @@ impl Cache { let values = conn.hget::<_, _, FileDataTuple>(&key, fields).await?; match values { // Ideally, all of these should be non-null, if it's a cache hit. - (Some(file_id), Some(mime), Some(size), Some(s3_hash)) => { - Ok(Some(FileData { file_id, mime, size, s3_hash })) - } + (Some(file_id), Some(mime), Some(size), Some(s3_hash)) => Ok(Some(FileData { + file_id, + mime, + size, + s3_hash, + })), // Cache miss (None, None, None, None) => Ok(None), diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index a4e32559c1..259a97069f 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -18,42 +18,99 @@ * along with this program. If not, see . */ -use crate::{deepwell::FileData, error::Result, state::ServerState}; use super::get_site_info; +use crate::{deepwell::FileData, error::Result, state::ServerState}; use axum::{ body::Body, extract::{Path, State}, - http::StatusCode, http::header::{self, HeaderMap}, - response::{Redirect, Response, IntoResponse}, + http::StatusCode, + response::{IntoResponse, Redirect, Response}, }; use axum_extra::response::Attachment; use s3::request::request_trait::ResponseDataStream; use wikidot_normalize::normalize; -/// Helper function to retrieve file data for returning via HTTP. -async fn get_file( - state: &ServerState, - site_id: i64, - page_slug: &mut String, - filename: &str, -) -> Result> { - normalize(page_slug); - - let page_id = match state.get_page(site_id, &page_slug).await? { - Some(page_id) => page_id, - None => return Ok(None), - }; - - let file_info = match state.get_file(site_id, page_id, filename).await? { - Some(file_info) => file_info, - None => return Ok(None), - }; - - let ResponseDataStream { bytes, status_code } = state.s3_bucket.get_object_stream(&file_info.s3_hash).await?; - assert_eq!(status_code, 200, "get_object_stream() succeeded but did not reply 200"); - let body = Body::from_stream(bytes); - Ok(Some((file_info, body))) +macro_rules! fetch_file { + ($state:expr, $headers:expr, $page_slug:expr, $filename:expr $(,)?) => {{ + normalize(&mut $page_slug); + + let (site_id, _) = get_site_info(&$headers); + + let state = &$state; + let page_slug = &$page_slug; + let filename = &$filename; + + let page_id = match state.get_page(site_id, page_slug).await { + Ok(Some(page_id)) => page_id, + Ok(None) => { + error!( + site_id = site_id, + page_slug = page_slug, + "Cannot get file, no such page", + ); + + return StatusCode::NOT_FOUND.into_response(); + } + Err(error) => { + error!( + site_id = site_id, + page_slug = page_slug, + "Cannot get page info: {error}", + ); + + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + let file_info = match state.get_file(site_id, page_id, filename).await { + Ok(Some(file_info)) => file_info, + Ok(None) => { + error!( + site_id = site_id, + page_id = page_id, + filename = $filename, + "Cannot get file, none with filename", + ); + + return StatusCode::NOT_FOUND.into_response(); + } + Err(error) => { + error!( + site_id = site_id, + page_slug = page_slug, + "Cannot get file info: {error}", + ); + + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + let body = match state.s3_bucket.get_object_stream(&file_info.s3_hash).await { + Ok(ResponseDataStream { bytes, status_code }) => { + assert_eq!( + status_code, + StatusCode::OK, + "get_object_stream() succeeded but did not reply 200", + ); + + Body::from_stream(bytes) + } + Err(error) => { + error!( + site_id = site_id, + page_slug = page_slug, + filename = filename, + s3_hash = &file_info.s3_hash, + "Cannot get blob data: {error}", + ); + + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + (file_info, body) + }}; } pub async fn handle_file_redirect(Path((page_slug, filename)): Path<(String, String)>) -> Redirect { @@ -72,18 +129,18 @@ pub async fn handle_file_fetch( "Returning file data", ); - let (site_id, _) = get_site_info(&headers); - // TODO - let _result = get_file(&state, site_id, &mut page_slug, &filename).await; - let (metadata, data) = _result.expect("_TODO").expect("_TODO"); + let (file_info, body) = fetch_file!(state, headers, page_slug, filename); let result = Response::builder() - .header(header::CONTENT_TYPE, &metadata.mime) - .body(Body::from(data)); + .header(header::CONTENT_TYPE, &file_info.mime) + .body(body); match result { Ok(response) => response, - Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), + Err(error) => { + error!("Unable to convert response: {error}"); + StatusCode::INTERNAL_SERVER_ERROR.into_response() + } } } @@ -98,13 +155,10 @@ pub async fn handle_file_download( "Returning file download", ); - let (site_id, _) = get_site_info(&headers); - // TODO - let _result = get_file(&state, site_id, &mut page_slug, &filename).await; - let (metadata, data) = _result.expect("_TODO").expect("_TODO"); + let (file_info, body) = fetch_file!(state, headers, page_slug, filename); - Attachment::new(data) + Attachment::new(body) .filename(&filename) - .content_type(&metadata.mime) + .content_type(&file_info.mime) .into_response() } diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 16b72b4b8c..83e84054ca 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -134,8 +134,11 @@ pub async fn handle_host_delegation( Err(error) => { // TODO error page response in case of an internal issue //todo!() -// TODO -SiteAndHost::File { site_id: 4, site_slug: "scp-wiki" } + // TODO + SiteAndHost::File { + site_id: 4, + site_slug: "scp-wiki", + } } }; From 1ef67efbdbc9554fbb93dc024c4178a65b1cb626 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 01:32:22 -0500 Subject: [PATCH 109/306] Remove debug hardcoded host. --- wws/src/handler/mod.rs | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 83e84054ca..513c5dd6fa 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -133,12 +133,7 @@ pub async fn handle_host_delegation( Ok(host_data) => host_data, Err(error) => { // TODO error page response in case of an internal issue - //todo!() - // TODO - SiteAndHost::File { - site_id: 4, - site_slug: "scp-wiki", - } + todo!() } }; From 420b49ccf1a5b6ff6fdd4acebda363632e340775 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 01:33:43 -0500 Subject: [PATCH 110/306] Use local variable in macro. --- wws/src/handler/file.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 259a97069f..925949fb5a 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -69,7 +69,7 @@ macro_rules! fetch_file { error!( site_id = site_id, page_id = page_id, - filename = $filename, + filename = filename, "Cannot get file, none with filename", ); From ca8b38794525374fc2a8871359980729d5053b5f Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 01:45:06 -0500 Subject: [PATCH 111/306] Add note about error code for S3 fetch. --- wws/src/handler/file.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 925949fb5a..63f3bbdec8 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -97,6 +97,14 @@ macro_rules! fetch_file { Body::from_stream(bytes) } Err(error) => { + // NOTE: If the error here is 404 we still return 500. + // + // If we have a file record for a file, then the + // corresponding blob *should* exist. + // + // If it doesn't, the data invariant is not being met, + // which is an unexpected error. + error!( site_id = site_id, page_slug = page_slug, From 451cd90cb0e80b9fa827b7e90643849dc5f2f728 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Tue, 21 Jan 2025 01:47:45 -0500 Subject: [PATCH 112/306] Add note about HEAD in axum. --- wws/src/route.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/wws/src/route.rs b/wws/src/route.rs index fd02495722..7cc112dc58 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -52,6 +52,14 @@ pub fn build_router(state: ServerState) -> Router { .with_state(main_state); // Router that serves wjfiles + // + // NOTE: For all GET routes, axum automatically handles HEAD requests. + // The same logic is run, but the body is removed, which is very + // convenient for us. + // + // If we can avoid an expensive operation in a HEAD, then add + // a "method: http::Method" parameter in the request then check + // that before doing the relevant operation. let files_router = Router::new() // Wikidot routes .route( From c6df36df09f4d2a5cbdae02e5adefa8072c83f3e Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 23 Jan 2025 00:12:55 -0500 Subject: [PATCH 113/306] Add stubs for .well-known and robots.txt --- wws/src/handler/mod.rs | 4 ++++ wws/src/handler/robots.rs | 33 ++++++++++++++++++++++++++++++++ wws/src/handler/well_known.rs | 36 +++++++++++++++++++++++++++++++++++ wws/src/route.rs | 4 +++- 4 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 wws/src/handler/robots.rs create mode 100644 wws/src/handler/well_known.rs diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 513c5dd6fa..141f43e0a0 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -24,6 +24,8 @@ mod framerail; mod html; mod misc; mod redirect; +mod robots; +mod well_known; pub use self::code::*; pub use self::file::*; @@ -31,6 +33,8 @@ pub use self::framerail::*; pub use self::html::*; pub use self::misc::*; pub use self::redirect::*; +pub use self::robots::*; +pub use self::well_known::*; use crate::{ host::{lookup_host, SiteAndHost}, diff --git a/wws/src/handler/robots.rs b/wws/src/handler/robots.rs new file mode 100644 index 0000000000..b796a28324 --- /dev/null +++ b/wws/src/handler/robots.rs @@ -0,0 +1,33 @@ +/* + * handler/robots.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +//! Handling for the robots.txt file. + +use axum::{ + body::Body, + http::{header, status::StatusCode}, + response::Response, +}; + +// TODO + +pub async fn handle_robots_txt() -> StatusCode { + StatusCode::NOT_IMPLEMENTED +} diff --git a/wws/src/handler/well_known.rs b/wws/src/handler/well_known.rs new file mode 100644 index 0000000000..a89455ca27 --- /dev/null +++ b/wws/src/handler/well_known.rs @@ -0,0 +1,36 @@ +/* + * handler/well_known.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +//! Handling for the .well-known special discovery path. +//! +//! Many different standard paths are served here, and each +//! should be implemented as a separate handler. + +use axum::{ + body::Body, + http::{header, status::StatusCode}, + response::Response, +}; + +// TODO + +pub async fn handle_well_known() -> StatusCode { + StatusCode::NOT_IMPLEMENTED +} diff --git a/wws/src/route.rs b/wws/src/route.rs index 7cc112dc58..3d4dba3099 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -101,7 +101,9 @@ pub fn build_router(state: ServerState) -> Router { handle_host_delegation(state, hostname, request, main_router, files_router).await } ) - // Easter egg + // General routes + .route("/robots.txt", get(handle_robots_txt)) + .route("/.well-known", any(handle_well_known)) .route("/-/teapot", any(handle_teapot)) // Middleware .layer(TraceLayer::new_for_http()) From 0662ca396451390e3308bedc67255a2257c6af93 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 23 Jan 2025 00:16:18 -0500 Subject: [PATCH 114/306] Add TODOs for new stub routes. --- wws/src/route.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/wws/src/route.rs b/wws/src/route.rs index 3d4dba3099..e4f948c3f4 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -102,8 +102,8 @@ pub fn build_router(state: ServerState) -> Router { } ) // General routes - .route("/robots.txt", get(handle_robots_txt)) - .route("/.well-known", any(handle_well_known)) + .route("/robots.txt", get(handle_robots_txt)) // TODO + .route("/.well-known", any(handle_well_known)) // TODO .route("/-/teapot", any(handle_teapot)) // Middleware .layer(TraceLayer::new_for_http()) From 7541a7420a5d4764fc5d04e3b476a6605b21d91b Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 23 Jan 2025 00:17:36 -0500 Subject: [PATCH 115/306] Use common utility for site header info. --- wws/src/handler/redirect.rs | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 3610ab3cd3..985a68c96f 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -18,7 +18,7 @@ * along with this program. If not, see . */ -use super::HEADER_SITE_SLUG; +use super::get_site_info; use crate::{host::DEFAULT_SITE_SLUG, path::get_path, state::ServerState}; use axum::{ extract::State, @@ -34,7 +34,7 @@ pub async fn redirect_to_files( // xyz.wikijump.com -> xyz.wjfiles.com // customdomain.com -> xyz.wjfiles.com - let site_slug = get_site_slug(&headers); + let (_, site_slug) = get_site_info(&headers); let path = get_path(&uri); let domain = &state.domains.files_domain; let destination = format!("https://{site_slug}{domain}{path}"); @@ -46,7 +46,7 @@ pub async fn redirect_to_main( headers: HeaderMap, uri: Uri, ) -> Redirect { - let site_slug = get_site_slug(&headers); + let (_, site_slug) = get_site_info(&headers); let path = get_path(&uri); // Only remove www for the main site. @@ -61,11 +61,3 @@ pub async fn redirect_to_main( Redirect::permanent(&destination) } - -fn get_site_slug(headers: &HeaderMap) -> &str { - headers - .get(HEADER_SITE_SLUG) - .expect("Site slug header not set by parent rounter") - .to_str() - .expect("Unable to convert site slug header to string") -} From a3ac868e0728aac92d55daef59a5f9c26478ed34 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 23 Jan 2025 00:30:13 -0500 Subject: [PATCH 116/306] Add redirect handlers for main site page convenience routes. --- wws/src/handler/redirect.rs | 35 ++++++++++++++++++++++++++++++++++- wws/src/route.rs | 6 ++++++ 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 985a68c96f..d4d4c9b187 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -21,7 +21,7 @@ use super::get_site_info; use crate::{host::DEFAULT_SITE_SLUG, path::get_path, state::ServerState}; use axum::{ - extract::State, + extract::{Path, State}, http::{header::HeaderMap, Uri}, response::Redirect, }; @@ -61,3 +61,36 @@ pub async fn redirect_to_main( Redirect::permanent(&destination) } + +pub async fn redirect_to_file_route( + State(state): State, + Path((page_slug, filename)): Path<(String, String)>, + headers: HeaderMap, +) -> Redirect { + let (_, site_slug) = get_site_info(&headers); + let domain = &state.domains.files_domain; + let destination = format!("https://{site_slug}{domain}/-/file/{page_slug}/{filename}"); + Redirect::permanent(&destination) +} + +pub async fn redirect_to_code_route( + State(state): State, + Path((page_slug, index)): Path<(String, String)>, + headers: HeaderMap, +) -> Redirect { + let (_, site_slug) = get_site_info(&headers); + let domain = &state.domains.files_domain; + let destination = format!("https://{site_slug}{domain}/-/code/{page_slug}/{index}"); + Redirect::permanent(&destination) +} + +pub async fn redirect_to_html_route( + State(state): State, + Path((page_slug, id)): Path<(String, String)>, + headers: HeaderMap, +) -> Redirect { + let (_, site_slug) = get_site_info(&headers); + let domain = &state.domains.files_domain; + let destination = format!("https://{site_slug}{domain}/-/html/{page_slug}/{id}"); + Redirect::permanent(&destination) +} diff --git a/wws/src/route.rs b/wws/src/route.rs index e4f948c3f4..3698bf85a9 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -40,6 +40,11 @@ pub fn build_router(state: ServerState) -> Router { // Router that serves framerail let main_router = Router::new() + // Convenience redirect routes + .route("/{page_slug}/file/{filename}", any(redirect_to_file_route)) + .route("/{page_slug}/code/{index}", any(redirect_to_code_route)) + .route("/{page_slug}/html/{id}", any(redirect_to_html_route)) + // Routes that are really on wjfiles .route("/local--files/{*rest}", any(redirect_to_files)) .route("/local--code/{*rest}", any(redirect_to_files)) .route("/local--html/{*rest}", any(redirect_to_files)) @@ -48,6 +53,7 @@ pub fn build_router(state: ServerState) -> Router { .route("/-/download/{*rest}", any(redirect_to_files)) .route("/-/code/{*rest}", any(redirect_to_files)) .route("/-/html/{*rest}", any(redirect_to_files)) + // Main handler .fallback(proxy_framerail) .with_state(main_state); From 72dcb3466a36b0e02abe91acca7834af9824f5d0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 23 Jan 2025 00:35:47 -0500 Subject: [PATCH 117/306] Use codegen macro to produce the three new handlers. --- wws/Cargo.lock | 7 +++++ wws/Cargo.toml | 1 + wws/src/handler/redirect.rs | 56 +++++++++++++++++-------------------- 3 files changed, 34 insertions(+), 30 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 67a689060a..5264fcffd8 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -1424,6 +1424,12 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + [[package]] name = "percent-encoding" version = "2.3.1" @@ -2686,6 +2692,7 @@ dependencies = [ "dotenvy", "jsonrpsee", "once_cell", + "paste", "redis", "ref-map", "rust-s3", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 92ef0e50bb..e4b9477147 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -21,6 +21,7 @@ color-backtrace = "0.6" dotenvy = "0.15" jsonrpsee = { version = "0.24", features = ["async-client", "jsonrpsee-http-client"] } once_cell = "1" +paste = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "r2d2", "tokio-comp", "tokio-rustls-comp"], default-features = false } ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index d4d4c9b187..82ef6bfdad 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -25,6 +25,7 @@ use axum::{ http::{header::HeaderMap, Uri}, response::Redirect, }; +use paste::paste; pub async fn redirect_to_files( State(state): State, @@ -62,35 +63,30 @@ pub async fn redirect_to_main( Redirect::permanent(&destination) } -pub async fn redirect_to_file_route( - State(state): State, - Path((page_slug, filename)): Path<(String, String)>, - headers: HeaderMap, -) -> Redirect { - let (_, site_slug) = get_site_info(&headers); - let domain = &state.domains.files_domain; - let destination = format!("https://{site_slug}{domain}/-/file/{page_slug}/{filename}"); - Redirect::permanent(&destination) -} - -pub async fn redirect_to_code_route( - State(state): State, - Path((page_slug, index)): Path<(String, String)>, - headers: HeaderMap, -) -> Redirect { - let (_, site_slug) = get_site_info(&headers); - let domain = &state.domains.files_domain; - let destination = format!("https://{site_slug}{domain}/-/code/{page_slug}/{index}"); - Redirect::permanent(&destination) +/// Code generation macro to create the "page convenience redirect routes". +/// +/// These are routes on the main server like `/my-page/code/1` which really +/// go to `/-/code/my-page/1` on the files server. Since they are identical +/// aside from what special route they go to, we can have a macro generate +/// them for us. +macro_rules! make_redirect_to_route { + ($name:ident) => { + paste! { + pub async fn []( + State(state): State, + Path((page_slug, extra)): Path<(String, String)>, + headers: HeaderMap, + ) -> Redirect { + let (_, site_slug) = get_site_info(&headers); + let domain = &state.domains.files_domain; + let route = stringify!($name); + let destination = format!("https://{site_slug}{domain}/-/{route}/{page_slug}/{extra}"); + Redirect::permanent(&destination) + } + } + }; } -pub async fn redirect_to_html_route( - State(state): State, - Path((page_slug, id)): Path<(String, String)>, - headers: HeaderMap, -) -> Redirect { - let (_, site_slug) = get_site_info(&headers); - let domain = &state.domains.files_domain; - let destination = format!("https://{site_slug}{domain}/-/html/{page_slug}/{id}"); - Redirect::permanent(&destination) -} +make_redirect_to_route!(file); +make_redirect_to_route!(code); +make_redirect_to_route!(html); From c67d7dda7550179b6955d348bdd3d2ffc627494a Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 23 Jan 2025 00:37:16 -0500 Subject: [PATCH 118/306] Add download convenience redirect too. --- wws/src/handler/redirect.rs | 3 ++- wws/src/route.rs | 6 +++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 82ef6bfdad..2df08d51ad 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -87,6 +87,7 @@ macro_rules! make_redirect_to_route { }; } -make_redirect_to_route!(file); make_redirect_to_route!(code); make_redirect_to_route!(html); +make_redirect_to_route!(file); +make_redirect_to_route!(download); diff --git a/wws/src/route.rs b/wws/src/route.rs index 3698bf85a9..d234db5426 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -41,9 +41,13 @@ pub fn build_router(state: ServerState) -> Router { // Router that serves framerail let main_router = Router::new() // Convenience redirect routes - .route("/{page_slug}/file/{filename}", any(redirect_to_file_route)) .route("/{page_slug}/code/{index}", any(redirect_to_code_route)) .route("/{page_slug}/html/{id}", any(redirect_to_html_route)) + .route("/{page_slug}/file/{filename}", any(redirect_to_file_route)) + .route( + "/{page_slug}/download/{filename}", + any(redirect_to_download_route), + ) // Routes that are really on wjfiles .route("/local--files/{*rest}", any(redirect_to_files)) .route("/local--code/{*rest}", any(redirect_to_files)) From 414c6e43f9e96f94c3a7a784876ecd3841598ed5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 6 Feb 2025 01:49:26 -0500 Subject: [PATCH 119/306] Add deepwell error cases for wws use. --- deepwell/src/services/error.rs | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index 20ef2dda2f..ee8147ef8b 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -301,6 +301,21 @@ pub enum Error { #[error("The rate limit for an external API has been reached")] RateLimited, + + // Errors for wws + // See the 8000 section in the error codes table + + #[error("The web server failed to process the request")] + WebServerFailure, + + #[error("The web server cannot fetch page information")] + PageFetch, + + #[error("The web server cannot fetch file information")] + FileFetch, + + #[error("The web server cannot fetch blob data")] + BlobFetch, } impl Error { @@ -430,6 +445,17 @@ impl Error { Error::InvalidSessionToken => 5001, Error::SessionUserId { .. } => 5002, // TODO: permission errors (e.g. locked page, cannot apply bans) + + // 8000 - Web Server / Routing errors + // + // This block is reserved for errors exclusively returned by WWS. + // These errors are not be used by DEEPWELL. + // + // WebServerFailure is pretty general, avoid using it if possible. + Error::WebServerFailure => 6000, + Error::PageFetch => 6001, + Error::FileFetch => 6002, + Error::BlobFetch => 6003, } } From 96b30d0254d69f2d76d710ebca0efc3da74c77be Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 6 Feb 2025 01:50:22 -0500 Subject: [PATCH 120/306] Set up plain HTML (with codes) for wjfiles errors. --- wws/Cargo.lock | 7 ++ wws/Cargo.toml | 1 + wws/src/error/html.rs | 155 ++++++++++++++++++++++++++++ wws/src/error/mod.rs | 31 ++++++ wws/src/{error.rs => error/rust.rs} | 4 +- wws/src/handler/file.rs | 41 +++++--- 6 files changed, 224 insertions(+), 15 deletions(-) create mode 100644 wws/src/error/html.rs create mode 100644 wws/src/error/mod.rs rename wws/src/{error.rs => error/rust.rs} (95%) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 5264fcffd8..3436cbb63d 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2489,6 +2489,12 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "v_htmlescape" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e8257fbc510f0a46eb602c10215901938b5c2a7d5e70fc11483b1d3c9b5b18c" + [[package]] name = "valuable" version = "0.1.0" @@ -2704,6 +2710,7 @@ dependencies = [ "tower-http", "tracing", "tracing-subscriber", + "v_htmlescape", "wikidot-normalize", ] diff --git a/wws/Cargo.toml b/wws/Cargo.toml index e4b9477147..2394bf0c6b 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -33,6 +33,7 @@ tower = "0.5" tower-http = { version = "0.6.1", features = ["add-extension", "compression-br", "compression-deflate", "compression-gzip", "compression-zstd", "normalize-path", "set-header", "trace"] } tracing = "0.1" tracing-subscriber = { version = "0.3", features = ["env-filter"] } +v_htmlescape = "0.15" wikidot-normalize = "0.12" [build-dependencies] diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs new file mode 100644 index 0000000000..06d8c51363 --- /dev/null +++ b/wws/src/error/html.rs @@ -0,0 +1,155 @@ +/* + * error/html.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +//! Helpers for converting error states into axum responses. +//! +//! This is for cases where getting a full or proper error message +//! (complete with localization) is not feasible due to how high +//! up this error is, and so we return an error message annotated +//! with an error code instead. +//! +//! This is very basic HTML generation. If we need to do anything +//! more fancy in the future, then feel free to replace this with +//! something better. + +use axum::{body::Body, http::{header::{self, HeaderValue}, StatusCode}, response::{Response}}; +use v_htmlescape::escape as html_escape; + +const HTML_BEGIN: &str = r""; +const HTML_MIDDLE: &str = ""; +const HTML_END: &str = ""; + +/// Error codes represented in wws. +/// These must match the corresponding errors in deepwell (`src/service/error.rs`) +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum ServerErrorCode<'a> { + PageNotFound { site_id: i64, page_slug: &'a str }, + FileNotFound { site_id: i64, page_id: i64, filename: &'a str }, + PageFetch { site_id: i64, page_slug: &'a str }, + FileFetch { site_id: i64, page_id: i64, filename: &'a str }, + BlobFetch { site_id: i64, page_slug: &'a str, filename: &'a str }, +} + +impl ServerErrorCode<'_> { + /// Returns the error code corresponding to this error. + /// + /// See `src/service/error.rs` for a listing. + /// + /// Note that, despite the acceptable error range only being positive, + /// the same type (`i32`) is used here as in DEEPWELL. + pub fn error_code(self) -> i32 { + match self { + ServerErrorCode::PageNotFound { .. } => 2005, + ServerErrorCode::FileNotFound { .. } => 2009, + ServerErrorCode::PageFetch { .. } => 6001, + ServerErrorCode::FileFetch { .. } => 6002, + ServerErrorCode::BlobFetch { .. } => 6003, + } + } + + /// Returns the HTTP status code for this error. + pub fn status_code(self) -> StatusCode { + match self { + ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => StatusCode::NOT_FOUND, + ServerErrorCode::PageFetch { .. } | ServerErrorCode::FileFetch { .. } | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, + } + } + + /// Returns the HTML title for this error. + fn title(self) -> &'static str { + match self { + ServerErrorCode::PageNotFound { .. } => "Page not found", + ServerErrorCode::FileNotFound { .. } => "File not found", + ServerErrorCode::PageFetch { .. } => "Cannot load page", + ServerErrorCode::FileFetch { .. } => "Cannot load file", + ServerErrorCode::BlobFetch { .. } => "Cannot load file data", + } + } + + pub fn into_response(self) -> Response { + // Build error HTML + let mut body = String::with_capacity(HTML_BEGIN.len() + HTML_END.len() + 70); + body.push_str(HTML_BEGIN); + body.push_str(self.title()); + body.push_str(HTML_MIDDLE); + + let error_code = self.error_code(); + str_write!(&mut body, "[Error {error_code}]"); + + // Write error body + match self { + ServerErrorCode::PageNotFound { site_id, page_slug } => { + str_write!( + body, + "Cannot find page '{}' in site ID {}", + html_escape(page_slug), + site_id, + ); + } + ServerErrorCode::FileNotFound { site_id, page_id, filename } => { + str_write!( + body, + "Cannot find file '{}' in page ID {} in site ID {}", + html_escape(filename), + page_id, + site_id, + ); + } + ServerErrorCode::PageFetch { site_id, page_slug } => { + str_write!( + body, + "Cannot load page '{}' in site ID {}", + html_escape(page_slug), + site_id, + ); + } + ServerErrorCode::FileFetch { site_id, page_id, filename } => { + str_write!( + body, + "Cannot load file '{}', in page ID {} in site ID {}", + html_escape(filename), + page_id, + site_id, + ); + } + ServerErrorCode::BlobFetch { site_id, page_slug, filename } => { + str_write!( + body, + "Cannot load file data for '{}', in page '{}' in site ID {}", + html_escape(filename), + html_escape(page_slug), + site_id, + ); + } + }; + + body.push_str(HTML_END); + + // Build and return response + Response::builder() + .status(self.status_code()) + .header( + header::CONTENT_TYPE, + HeaderValue::from_static("text/html; charset=utf-8"), + ) + .body(Body::from(body)) + .expect("Unable to build response") + } +} diff --git a/wws/src/error/mod.rs b/wws/src/error/mod.rs new file mode 100644 index 0000000000..4aa3001235 --- /dev/null +++ b/wws/src/error/mod.rs @@ -0,0 +1,31 @@ +/* + * error/mod.rs + * + * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +//! Error handling and error responses. +//! +//! This module has two semi-related functions, first, the +//! structures for error handling within Rust, and second, +//! utilities to convert final error states into axum responses. + +mod html; +mod rust; + +pub use self::html::*; +pub use self::rust::*; diff --git a/wws/src/error.rs b/wws/src/error/rust.rs similarity index 95% rename from wws/src/error.rs rename to wws/src/error/rust.rs index c4c1b7b7b5..03d18c9677 100644 --- a/wws/src/error.rs +++ b/wws/src/error/rust.rs @@ -1,5 +1,5 @@ /* - * error.rs + * error/rust.rs * * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) * Copyright (C) 2019-2025 Wikijump Team @@ -18,6 +18,8 @@ * along with this program. If not, see . */ +//! Structures for error handling within Rust. + use jsonrpsee::core::ClientError; use s3::error::S3Error; use std::io; diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 63f3bbdec8..49cb9f691d 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -19,7 +19,11 @@ */ use super::get_site_info; -use crate::{deepwell::FileData, error::Result, state::ServerState}; +use crate::{ + deepwell::FileData, + error::{Result, ServerErrorCode}, + state::ServerState, +}; use axum::{ body::Body, extract::{Path, State}, @@ -49,8 +53,7 @@ macro_rules! fetch_file { page_slug = page_slug, "Cannot get file, no such page", ); - - return StatusCode::NOT_FOUND.into_response(); + return ServerErrorCode::PageNotFound { site_id, page_slug }.into_response(); } Err(error) => { error!( @@ -58,8 +61,7 @@ macro_rules! fetch_file { page_slug = page_slug, "Cannot get page info: {error}", ); - - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + return ServerErrorCode::PageFetch { site_id, page_slug }.into_response(); } }; @@ -72,17 +74,26 @@ macro_rules! fetch_file { filename = filename, "Cannot get file, none with filename", ); - - return StatusCode::NOT_FOUND.into_response(); + return ServerErrorCode::FileNotFound { + site_id, + page_id, + filename, + } + .into_response(); } Err(error) => { error!( site_id = site_id, - page_slug = page_slug, + page_id = page_id, + filename = filename, "Cannot get file info: {error}", ); - - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + return ServerErrorCode::FileFetch { + site_id, + page_id, + filename, + } + .into_response(); } }; @@ -93,7 +104,6 @@ macro_rules! fetch_file { StatusCode::OK, "get_object_stream() succeeded but did not reply 200", ); - Body::from_stream(bytes) } Err(error) => { @@ -104,7 +114,6 @@ macro_rules! fetch_file { // // If it doesn't, the data invariant is not being met, // which is an unexpected error. - error!( site_id = site_id, page_slug = page_slug, @@ -112,8 +121,12 @@ macro_rules! fetch_file { s3_hash = &file_info.s3_hash, "Cannot get blob data: {error}", ); - - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + return ServerErrorCode::BlobFetch { + site_id, + page_slug, + filename, + } + .into_response(); } }; From 1b0ece79bae4905f50012ca58396db160e23fb3c Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 6 Feb 2025 02:09:03 -0500 Subject: [PATCH 121/306] Fix html header. --- wws/src/error/html.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index 06d8c51363..b51d218a26 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -32,7 +32,7 @@ use axum::{body::Body, http::{header::{self, HeaderValue}, StatusCode}, response::{Response}}; use v_htmlescape::escape as html_escape; -const HTML_BEGIN: &str = r""; +const HTML_BEGIN: &str = r""; const HTML_MIDDLE: &str = ""; const HTML_END: &str = ""; From 91b2c9806d5eee67971aab4e13b648cbae93b921 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 6 Feb 2025 02:09:28 -0500 Subject: [PATCH 122/306] Fix error code formatting. --- wws/src/error/html.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index b51d218a26..86b98237a7 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -91,7 +91,7 @@ impl ServerErrorCode<'_> { body.push_str(HTML_MIDDLE); let error_code = self.error_code(); - str_write!(&mut body, "[Error {error_code}]"); + str_write!(&mut body, "[Error {error_code}] "); // Write error body match self { From 4ea5ee1d3e007b82dfb8e18d24da97780ec830d2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 6 Feb 2025 02:10:55 -0500 Subject: [PATCH 123/306] Print slugs in monospace. --- wws/src/error/html.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index 86b98237a7..7103277083 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -98,7 +98,7 @@ impl ServerErrorCode<'_> { ServerErrorCode::PageNotFound { site_id, page_slug } => { str_write!( body, - "Cannot find page '{}' in site ID {}", + "Cannot find page \"{}\" in site ID {}", html_escape(page_slug), site_id, ); @@ -106,7 +106,7 @@ impl ServerErrorCode<'_> { ServerErrorCode::FileNotFound { site_id, page_id, filename } => { str_write!( body, - "Cannot find file '{}' in page ID {} in site ID {}", + "Cannot find file \"{}\" in page ID {} in site ID {}", html_escape(filename), page_id, site_id, @@ -115,7 +115,7 @@ impl ServerErrorCode<'_> { ServerErrorCode::PageFetch { site_id, page_slug } => { str_write!( body, - "Cannot load page '{}' in site ID {}", + "Cannot load page \"{}\" in site ID {}", html_escape(page_slug), site_id, ); @@ -123,7 +123,7 @@ impl ServerErrorCode<'_> { ServerErrorCode::FileFetch { site_id, page_id, filename } => { str_write!( body, - "Cannot load file '{}', in page ID {} in site ID {}", + "Cannot load file \"{}\", in page ID {} in site ID {}", html_escape(filename), page_id, site_id, @@ -132,7 +132,7 @@ impl ServerErrorCode<'_> { ServerErrorCode::BlobFetch { site_id, page_slug, filename } => { str_write!( body, - "Cannot load file data for '{}', in page '{}' in site ID {}", + "Cannot load file data for \"{}\", in page \"{}\" in site ID {}", html_escape(filename), html_escape(page_slug), site_id, From 4b7c5e3b39b481d63b200113759e4d602ccd0f21 Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Thu, 6 Feb 2025 02:14:56 -0500 Subject: [PATCH 124/306] Run rustfmt. --- wws/src/error/html.rs | 63 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 52 insertions(+), 11 deletions(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index 7103277083..f97d705813 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -29,7 +29,14 @@ //! more fancy in the future, then feel free to replace this with //! something better. -use axum::{body::Body, http::{header::{self, HeaderValue}, StatusCode}, response::{Response}}; +use axum::{ + body::Body, + http::{ + header::{self, HeaderValue}, + StatusCode, + }, + response::Response, +}; use v_htmlescape::escape as html_escape; const HTML_BEGIN: &str = r""; @@ -40,11 +47,29 @@ const HTML_END: &str = "</body></html>"; /// These must match the corresponding errors in deepwell (`src/service/error.rs`) #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ServerErrorCode<'a> { - PageNotFound { site_id: i64, page_slug: &'a str }, - FileNotFound { site_id: i64, page_id: i64, filename: &'a str }, - PageFetch { site_id: i64, page_slug: &'a str }, - FileFetch { site_id: i64, page_id: i64, filename: &'a str }, - BlobFetch { site_id: i64, page_slug: &'a str, filename: &'a str }, + PageNotFound { + site_id: i64, + page_slug: &'a str, + }, + FileNotFound { + site_id: i64, + page_id: i64, + filename: &'a str, + }, + PageFetch { + site_id: i64, + page_slug: &'a str, + }, + FileFetch { + site_id: i64, + page_id: i64, + filename: &'a str, + }, + BlobFetch { + site_id: i64, + page_slug: &'a str, + filename: &'a str, + }, } impl ServerErrorCode<'_> { @@ -67,8 +92,12 @@ impl ServerErrorCode<'_> { /// Returns the HTTP status code for this error. pub fn status_code(self) -> StatusCode { match self { - ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => StatusCode::NOT_FOUND, - ServerErrorCode::PageFetch { .. } | ServerErrorCode::FileFetch { .. } | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, + ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => { + StatusCode::NOT_FOUND + } + ServerErrorCode::PageFetch { .. } + | ServerErrorCode::FileFetch { .. } + | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, } } @@ -103,7 +132,11 @@ impl ServerErrorCode<'_> { site_id, ); } - ServerErrorCode::FileNotFound { site_id, page_id, filename } => { + ServerErrorCode::FileNotFound { + site_id, + page_id, + filename, + } => { str_write!( body, "Cannot find file \"<code>{}</code>\" in page ID {} in site ID {}", @@ -120,7 +153,11 @@ impl ServerErrorCode<'_> { site_id, ); } - ServerErrorCode::FileFetch { site_id, page_id, filename } => { + ServerErrorCode::FileFetch { + site_id, + page_id, + filename, + } => { str_write!( body, "Cannot load file \"<code>{}</code>\", in page ID {} in site ID {}", @@ -129,7 +166,11 @@ impl ServerErrorCode<'_> { site_id, ); } - ServerErrorCode::BlobFetch { site_id, page_slug, filename } => { + ServerErrorCode::BlobFetch { + site_id, + page_slug, + filename, + } => { str_write!( body, "Cannot load file data for \"<code>{}</code>\", in page \"<code>{}</code>\" in site ID {}", From 45cde4900b7ae6b0dd896e137ad28732c4e3167f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Thu, 6 Feb 2025 02:17:40 -0500 Subject: [PATCH 125/306] Remove unused import. --- wws/src/deepwell.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 711737c8d4..d23cc6a4bd 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -18,7 +18,7 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::error::{Error, Result}; +use crate::error::Result; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; use serde::Deserialize; use std::time::Duration; From 9345af32ca389ac77f86f270c2aeda54dd133beb Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Thu, 6 Feb 2025 23:31:10 -0500 Subject: [PATCH 126/306] Remove extra newlines. --- wws/src/host.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/wws/src/host.rs b/wws/src/host.rs index bb02a4cc7b..19ee57212b 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -69,7 +69,6 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S site_id = site_id, "Routing files site request", ); - Ok(SiteAndHost::File { site_id, site_slug }) } None => { @@ -79,7 +78,6 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S site_slug = site_slug, "No such site with slug (files)", ); - Ok(SiteAndHost::FileMissing { site_slug }) } } @@ -106,7 +104,6 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S site_id = site_id, "Routing main site request (custom)", ); - Ok(SiteAndHost::MainCustom { site_id, site_slug }) } None => { @@ -146,7 +143,6 @@ async fn main_site_slug<'a>( "Routing main site request ({})", if is_default { "default" } else { "slug" }, ); - Ok(SiteAndHost::Main { site_id, site_slug }) } None => { @@ -156,7 +152,6 @@ async fn main_site_slug<'a>( site_slug = site_slug, "No such site with slug (main)", ); - Ok(SiteAndHost::MainMissing { site_slug }) } } From 9ad23cbc70ddaf2b50b535c528775a4b15fe169e Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 00:03:00 -0500 Subject: [PATCH 127/306] Add rustdoc for SiteAndHost enum. Also, reorder DefaultRedirect to be lower. --- wws/src/host.rs | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/wws/src/host.rs b/wws/src/host.rs index 19ee57212b..121760025d 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -26,15 +26,36 @@ use crate::{deepwell::Domains, error::Result, state::ServerState}; /// with no subdomain component. pub const DEFAULT_SITE_SLUG: &str = "www"; +/// Describes which Wikijump site and router this request is pointed towards. +/// +/// * "Main" refers to the framerail handler, i.e. `[site-slug].wikijump.com`. +/// * "Files" refers to the wjfiles handlers, i.e. `[site-slug].wjfiles.com`. #[derive(Debug)] pub enum SiteAndHost<'a> { + /// Main router existent site, canonical domain. Main { site_id: i64, site_slug: &'a str }, + + /// Main router, non-existent site, canonical domain. MainMissing { site_slug: &'a str }, + + /// Main router, existent site, custom domain. MainCustom { site_id: i64, site_slug: String }, + + /// Main router, non-existent site, custom domain. MainCustomMissing, - DefaultRedirect, + + /// Files router, existent site. File { site_id: i64, site_slug: &'a str }, + + /// Files router, non-existent site. FileMissing { site_slug: &'a str }, + + /// Main router, request to canonical `www`, should be redirected to the root domain. + /// Special case. + DefaultRedirect, + + /// Request is the root domain on the files router, which has no meaning. + /// Special case. FileRoot, } From 95f389f4fdb0a155dfea6f231c8bdea87cb49223 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 00:15:17 -0500 Subject: [PATCH 128/306] Forward MainCustomMissing host request. --- wws/src/handler/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 141f43e0a0..1005312677 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -160,7 +160,7 @@ pub async fn handle_host_delegation( forward_request!(main_router) } SiteAndHost::MainCustomMissing => { - todo!() + forward_request!(main_router) } // Default site redirect // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" From 03ab4abd9f73741dd8b741d9f0423551ea18c1e2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:07:21 -0500 Subject: [PATCH 129/306] Add web errors for missing site. Set that as the response for missing domains. This fixes the issue where those routes, attempting to return data normally, try to get site information which isn't there. Since the error message is the same regardless of what route it's on, we can just do this. --- wws/src/error/html.rs | 32 +++++++++++++++++++++++++++++--- wws/src/handler/mod.rs | 9 ++++----- 2 files changed, 33 insertions(+), 8 deletions(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index f97d705813..dff76696c1 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -47,6 +47,12 @@ const HTML_END: &str = "</body></html>"; /// These must match the corresponding errors in deepwell (`src/service/error.rs`) #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ServerErrorCode<'a> { + SiteNotFound { + site_slug: &'a str, + }, + CustomDomainNotFound { + domain: &'a str, + }, PageNotFound { site_id: i64, page_slug: &'a str, @@ -81,6 +87,8 @@ impl ServerErrorCode<'_> { /// the same type (`i32`) is used here as in DEEPWELL. pub fn error_code(self) -> i32 { match self { + ServerErrorCode::SiteNotFound { .. } => 2004, + ServerErrorCode::CustomDomainNotFound { .. } => 2013, ServerErrorCode::PageNotFound { .. } => 2005, ServerErrorCode::FileNotFound { .. } => 2009, ServerErrorCode::PageFetch { .. } => 6001, @@ -92,9 +100,10 @@ impl ServerErrorCode<'_> { /// Returns the HTTP status code for this error. pub fn status_code(self) -> StatusCode { match self { - ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => { - StatusCode::NOT_FOUND - } + ServerErrorCode::SiteNotFound { .. } + | ServerErrorCode::CustomDomainNotFound { .. } + | ServerErrorCode::PageNotFound { .. } + | ServerErrorCode::FileNotFound { .. } => StatusCode::NOT_FOUND, ServerErrorCode::PageFetch { .. } | ServerErrorCode::FileFetch { .. } | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, @@ -104,6 +113,9 @@ impl ServerErrorCode<'_> { /// Returns the HTML title for this error. fn title(self) -> &'static str { match self { + ServerErrorCode::SiteNotFound { .. } | ServerErrorCode::CustomDomainNotFound { .. } => { + "Site not found" + } ServerErrorCode::PageNotFound { .. } => "Page not found", ServerErrorCode::FileNotFound { .. } => "File not found", ServerErrorCode::PageFetch { .. } => "Cannot load page", @@ -124,6 +136,20 @@ impl ServerErrorCode<'_> { // Write error body match self { + ServerErrorCode::SiteNotFound { site_slug } => { + str_write!( + body, + "No site exists at \"<code>{}</code>\"", + html_escape(site_slug), + ) + } + ServerErrorCode::CustomDomainNotFound { domain } => { + str_write!( + body, + "No site exists with the custom domain \"<code>{}</code>\"", + html_escape(domain), + ) + } ServerErrorCode::PageNotFound { site_id, page_slug } => { str_write!( body, diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 1005312677..67e8520686 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -37,6 +37,7 @@ pub use self::robots::*; pub use self::well_known::*; use crate::{ + error::ServerErrorCode, host::{lookup_host, SiteAndHost}, path::get_path, state::ServerState, @@ -156,11 +157,10 @@ pub async fn handle_host_delegation( } // Main site missing SiteAndHost::MainMissing { site_slug } => { - add_headers!(site_slug); - forward_request!(main_router) + ServerErrorCode::SiteNotFound { site_slug }.into_response() } SiteAndHost::MainCustomMissing => { - forward_request!(main_router) + ServerErrorCode::CustomDomainNotFound { domain: &hostname }.into_response() } // Default site redirect // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" @@ -178,8 +178,7 @@ pub async fn handle_host_delegation( forward_request!(files_router) } SiteAndHost::FileMissing { site_slug } => { - add_headers!(site_slug); - forward_request!(files_router) + ServerErrorCode::SiteNotFound { site_slug }.into_response() } // Files site by itself // See the case in host.rs for an explanation From fe90c13643c121b1616f46cf58265ec3995e976b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:10:16 -0500 Subject: [PATCH 130/306] Remove unused macro case. --- wws/src/handler/mod.rs | 8 -------- 1 file changed, 8 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 67e8520686..dd0831ec05 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -113,7 +113,6 @@ pub async fn handle_host_delegation( } macro_rules! add_headers { - // Add both headers ($site_id:expr, $site_slug:expr) => {{ // Validate types let _: i64 = $site_id; @@ -124,13 +123,6 @@ pub async fn handle_host_delegation( headers.insert(HEADER_SITE_ID, header_value!(str!($site_id))); headers.insert(HEADER_SITE_SLUG, header_value!($site_slug)); }}; - - // Add only slug (site doesn't exist) - ($site_slug:expr) => {{ - let _: &str = &$site_slug; - let headers = request.headers_mut(); - headers.insert(HEADER_SITE_SLUG, header_value!($site_slug)); - }}; } // Determine what host and site (e.g. main vs files, what site slug and ID) From a45f6ea45dd63dbefb8182f03c2e89d7997bb628 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:12:29 -0500 Subject: [PATCH 131/306] Change formatting for error HTML. --- wws/src/error/html.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index dff76696c1..2e67653385 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -139,21 +139,21 @@ impl ServerErrorCode<'_> { ServerErrorCode::SiteNotFound { site_slug } => { str_write!( body, - "No site exists at \"<code>{}</code>\"", + "No site exists at \"<code>{}</code>\".", html_escape(site_slug), ) } ServerErrorCode::CustomDomainNotFound { domain } => { str_write!( body, - "No site exists with the custom domain \"<code>{}</code>\"", + "No site exists with the custom domain \"<code>{}</code>\".", html_escape(domain), ) } ServerErrorCode::PageNotFound { site_id, page_slug } => { str_write!( body, - "Cannot find page \"<code>{}</code>\" in site ID {}", + "Cannot find page \"<code>{}</code>\" in site ID <code>{}</code>.", html_escape(page_slug), site_id, ); @@ -165,7 +165,7 @@ impl ServerErrorCode<'_> { } => { str_write!( body, - "Cannot find file \"<code>{}</code>\" in page ID {} in site ID {}", + "Cannot find file \"<code>{}</code>\" in page ID <code>{}</code> in site ID <code>{}</code>", html_escape(filename), page_id, site_id, @@ -174,7 +174,7 @@ impl ServerErrorCode<'_> { ServerErrorCode::PageFetch { site_id, page_slug } => { str_write!( body, - "Cannot load page \"<code>{}</code>\" in site ID {}", + "Cannot load page \"<code>{}</code>\" in site ID <code>{}</code>.", html_escape(page_slug), site_id, ); @@ -186,7 +186,7 @@ impl ServerErrorCode<'_> { } => { str_write!( body, - "Cannot load file \"<code>{}</code>\", in page ID {} in site ID {}", + "Cannot load file \"<code>{}</code>\", in page ID <code>{}</code> in site ID <code>{}</code>.", html_escape(filename), page_id, site_id, @@ -199,7 +199,7 @@ impl ServerErrorCode<'_> { } => { str_write!( body, - "Cannot load file data for \"<code>{}</code>\", in page \"<code>{}</code>\" in site ID {}", + "Cannot load file data for \"<code>{}</code>\", in page \"<code>{}</code>\" in site ID <code>{}</code>.", html_escape(filename), html_escape(page_slug), site_id, From aea06f75d9a01dd8ca14b3e1a9ab18a81fb3bf54 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:14:53 -0500 Subject: [PATCH 132/306] Add pound sign for error code. --- wws/src/error/html.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index 2e67653385..a16fa799bd 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -132,7 +132,7 @@ impl ServerErrorCode<'_> { body.push_str(HTML_MIDDLE); let error_code = self.error_code(); - str_write!(&mut body, "<strong>[Error {error_code}]</strong> "); + str_write!(&mut body, "<strong>[Error #{error_code}]</strong> "); // Write error body match self { From 0ed7a904995cf74bfdb1636f83cbe01807852a29 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:39:00 -0500 Subject: [PATCH 133/306] Fix redis null typing issue. --- wws/src/cache.rs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 895956cbbc..c42fe443b9 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -71,10 +71,26 @@ impl Cache { } pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<(i64, String)>> { + type SiteDataTuple = (Option<i64>, Option<String>); + let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); - let value = conn.hget(key, &["id", "slug"]).await?; - Ok(value) + let fields = &["id", "slug"]; + let values = conn.hget::<_, _, SiteDataTuple>(&key, fields).await?; + match values { + // Ideally, all of these should be non-null, if it's a cache hit. + (Some(site_id), Some(site_slug)) => Ok(Some((site_id, site_slug))), + + // Cache miss + (None, None) => Ok(None), + + // Some fields are set and others aren't. Let's clear all them out. + _ => { + warn!(key = key, "Inconsistent cache data, deleting"); + hdel!(conn, key, fields); + Ok(None) + } + } } pub async fn set_site_from_domain( @@ -117,7 +133,7 @@ impl Cache { let fields = &["id", "mime", "size", "s3_hash"]; let values = conn.hget::<_, _, FileDataTuple>(&key, fields).await?; match values { - // Ideally, all of these should be non-null, if it's a cache hit. + // Cache hit (Some(file_id), Some(mime), Some(size), Some(s3_hash)) => Ok(Some(FileData { file_id, mime, @@ -128,7 +144,7 @@ impl Cache { // Cache miss (None, None, None, None) => Ok(None), - // Some fields are set and others aren't. Let's clear all them out. + // Like above, we clear out inconsistent fields _ => { warn!(key = key, "Inconsistent cache data, deleting"); hdel!(conn, key, fields); From 5ea5e23f1f553d8cde40f47f9a37e4ca93bbff17 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:42:27 -0500 Subject: [PATCH 134/306] Create helper function for clearing inconsistent nulls. --- wws/src/cache.rs | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index c42fe443b9..73100ae420 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -24,7 +24,7 @@ //! compatible with DEEPWELL's Redis code. use crate::{deepwell::FileData, error::Result}; -use redis::AsyncCommands; +use redis::{aio::MultiplexedConnection, AsyncCommands}; macro_rules! get_connection { ($client:expr) => { @@ -86,8 +86,7 @@ impl Cache { // Some fields are set and others aren't. Let's clear all them out. _ => { - warn!(key = key, "Inconsistent cache data, deleting"); - hdel!(conn, key, fields); + clear_inconsistent_fields(&mut conn, &key, fields).await?; Ok(None) } } @@ -146,8 +145,7 @@ impl Cache { // Like above, we clear out inconsistent fields _ => { - warn!(key = key, "Inconsistent cache data, deleting"); - hdel!(conn, key, fields); + clear_inconsistent_fields(&mut conn, &key, fields).await?; Ok(None) } } @@ -169,3 +167,13 @@ impl Cache { Ok(()) } } + +async fn clear_inconsistent_fields( + conn: &mut MultiplexedConnection, + key: &str, + fields: &[&str], +) -> Result<()> { + warn!(key = key, "Inconsistent cache data, deleting"); + hdel!(conn, key, fields); + Ok(()) +} From 60f7b22ee328afb0b242e828581be244feb467b8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:44:39 -0500 Subject: [PATCH 135/306] Fix error message. --- wws/src/host.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/host.rs b/wws/src/host.rs index 121760025d..bc2ab5d6af 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -129,7 +129,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S } None => { // No such site - warn!(domain = hostname, "No such site with slug (custom)"); + warn!(domain = hostname, "No such site with domain (custom)"); Ok(SiteAndHost::MainCustomMissing) } } From ce12f5cd58d19e113d06c822bc1e95d2def1a4c5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:50:52 -0500 Subject: [PATCH 136/306] Update deepwell dependencies. --- deepwell/Cargo.lock | 745 ++++++++++++++++++++++---------------------- 1 file changed, 368 insertions(+), 377 deletions(-) diff --git a/deepwell/Cargo.lock b/deepwell/Cargo.lock index eff725499a..0111758b7e 100644 --- a/deepwell/Cargo.lock +++ b/deepwell/Cargo.lock @@ -28,18 +28,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "ahash" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - [[package]] name = "aho-corasick" version = "1.1.3" @@ -57,9 +45,9 @@ checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" [[package]] name = "allocator-api2" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45862d1c77f2228b9e10bc609d5bc203d86ebc9b87ad8d5d5167a6c9abf739d9" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android-tzdata" @@ -117,11 +105,12 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "3.0.6" +version = "3.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" dependencies = [ "anstyle", + "once_cell", "windows-sys 0.59.0", ] @@ -189,18 +178,18 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "644dd749086bf3771a2fbc5f256fdb982d53f011c7d5d560304eafeecebce79d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -220,13 +209,13 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "attohttpc" -version = "0.28.0" +version = "0.28.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a13149d0cf3f7f9b9261fad4ec63b2efbf9a80665f52def86282d26255e6331" +checksum = "412b79ce053cef36eda52c25664b45ec92a21769488e20d5a8bf0b3c9e1a28cb" dependencies = [ - "http 1.1.0", + "http 1.2.0", "log", - "rustls 0.22.4", + "rustls 0.23.22", "serde", "serde_json", "url", @@ -321,9 +310,9 @@ dependencies = [ [[package]] name = "bigdecimal" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f850665a0385e070b64c38d2354e6c104c8479c59868d1e48a0c13ee2c7a1c1" +checksum = "7f31f3af01c5c65a07985c804d3366560e6fa7883d640a122819b14ec327482c" dependencies = [ "autocfg", "libm", @@ -390,9 +379,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" [[package]] name = "bytecheck" @@ -424,15 +413,15 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "f61dac84819c6588b558454b194026eb1f09c293b9036ae9b159e74e73ab6cf9" [[package]] name = "cc" -version = "1.2.1" +version = "1.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd9de9f2205d5ef3fd67e685b0df337994ddd4495e2a28d185500d0e1edfea47" +checksum = "755717a7de9ec452bf7f3f1a3099085deabd7f2962b861dae91ecd7a365903d2" dependencies = [ "jobserver", "libc", @@ -453,9 +442,9 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -466,9 +455,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.27" +version = "4.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "769b0145982b4b48713e01ec42d61614425f27b7058bda7180a3a41f30104796" +checksum = "3e77c3243bd94243c03672cb5154667347c457ca271254724f9f393aee1c05ff" dependencies = [ "clap_builder", ] @@ -600,9 +589,9 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] @@ -624,9 +613,9 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crossbeam-deque" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" dependencies = [ "crossbeam-epoch", "crossbeam-utils", @@ -643,24 +632,24 @@ dependencies = [ [[package]] name = "crossbeam-queue" -version = "0.3.11" +version = "0.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" dependencies = [ "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" +checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" [[package]] name = "crypto-common" @@ -696,7 +685,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -707,15 +696,16 @@ checksum = "1d59a706635108a7e8eaae7ec8e6154504fafa4a415ef38690d94fccea051757" [[package]] name = "cuid2" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50e281dc36864ea88fae2ec4e21eb280e8239487acb1ddc59b528b0afa7997bd" +checksum = "7ffc4ec2422180444acb04e5dda013369c9860fe66e8f558aa8c3f265ad195d3" dependencies = [ "cuid-util", "getrandom 0.2.15", "num", "rand 0.8.5", "sha3", + "web-time", ] [[package]] @@ -738,7 +728,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -749,7 +739,7 @@ checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -864,15 +854,15 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.18" +version = "0.99.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +checksum = "3da29a38df43d6f156149c9b43ded5e018ddff2a855cf2cfd62e8cd7d079c69f" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -895,7 +885,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -960,7 +950,7 @@ checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -995,12 +985,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1016,9 +1006,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "5.3.1" +version = "5.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" dependencies = [ "concurrent-queue", "parking", @@ -1027,9 +1017,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486f806e73c5707928240ddc295403b1b93c96a02038563881c4a2fd84b81ac4" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "femme" @@ -1132,6 +1122,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0d2fde1f7b3d48b8395d5f2de76c18a528bd6a9cdde438df747bfcba3e05d6f" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1172,7 +1168,7 @@ dependencies = [ "rand 0.8.5", "ref-map", "regex", - "self_cell 1.0.4", + "self_cell 1.1.0", "serde", "serde-wasm-bindgen", "serde_json", @@ -1261,7 +1257,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -1337,6 +1333,18 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a49c392881ce6d5c3b8cb70f98717b7c07aabbdff06687b9030dbfbe2725f8" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.13.3+wasi-0.2.2", + "windows-targets 0.52.6", +] + [[package]] name = "gimli" version = "0.31.1" @@ -1367,7 +1375,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http 1.1.0", + "http 1.2.0", "indexmap", "slab", "tokio", @@ -1381,7 +1389,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.8", + "ahash", ] [[package]] @@ -1389,24 +1397,25 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash 0.8.11", - "allocator-api2", -] [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] [[package]] name = "hashlink" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.2", ] [[package]] @@ -1421,12 +1430,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - [[package]] name = "hex" version = "0.4.3" @@ -1456,11 +1459,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.9" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +checksum = "589533453244b0995c858700322199b2becb13b627df2851f64a2775d024abcf" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -1487,9 +1490,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -1514,7 +1517,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -1525,16 +1528,16 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.9.5" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" [[package]] name = "httpdate" @@ -1544,9 +1547,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.31" +version = "0.14.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" dependencies = [ "bytes", "futures-channel", @@ -1567,15 +1570,15 @@ dependencies = [ [[package]] name = "hyper" -version = "1.5.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", "h2", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "httparse", "httpdate", @@ -1594,7 +1597,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper 0.14.31", + "hyper 0.14.32", "rustls 0.21.12", "tokio", "tokio-rustls 0.24.1", @@ -1602,18 +1605,18 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.27.3" +version = "0.27.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +checksum = "2d191583f3da1305256f22463b9bb0471acad48a4e534a5218b9963e9c1f59b2" dependencies = [ "futures-util", - "http 1.1.0", - "hyper 1.5.1", + "http 1.2.0", + "hyper 1.6.0", "hyper-util", - "rustls 0.23.18", + "rustls 0.23.22", "rustls-pki-types", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", "tower-service", "webpki-roots", ] @@ -1627,9 +1630,9 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", - "hyper 1.5.1", + "hyper 1.6.0", "pin-project-lite", "socket2", "tokio", @@ -1775,7 +1778,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -1807,9 +1810,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" dependencies = [ "equivalent", "hashbrown 0.15.2", @@ -1823,7 +1826,7 @@ checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -1867,9 +1870,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.10.1" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "is_terminal_polyfill" @@ -1886,20 +1889,11 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itoa" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "540654e97a3f4470a492cd30ff187bc95d89557a903a2bbf112e2fae98104ef2" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" @@ -1912,18 +1906,19 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.72" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a88f1bda2bd75b0452a14784937d796722fdebfe50df998aeb3f0b7603019a9" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ + "once_cell", "wasm-bindgen", ] [[package]] name = "jsonrpsee" -version = "0.24.7" +version = "0.24.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5c71d8c1a731cc4227c2f698d377e7848ca12c8a48866fc5e6951c43a4db843" +checksum = "834af00800e962dee8f7bfc0f60601de215e73e78e5497d733a2919da837d3c8" dependencies = [ "jsonrpsee-core", "jsonrpsee-proc-macros", @@ -1935,20 +1930,20 @@ dependencies = [ [[package]] name = "jsonrpsee-core" -version = "0.24.7" +version = "0.24.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2882f6f8acb9fdaec7cefc4fd607119a9bd709831df7d7672a1d3b644628280" +checksum = "76637f6294b04e747d68e69336ef839a3493ca62b35bf488ead525f7da75c5bb" dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", "jsonrpsee-types", "parking_lot", "rand 0.8.5", - "rustc-hash 2.0.0", + "rustc-hash 2.1.1", "serde", "serde_json", "thiserror 1.0.69", @@ -1958,28 +1953,28 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.24.7" +version = "0.24.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06c01ae0007548e73412c08e2285ffe5d723195bf268bce67b1b77c3bb2a14d" +checksum = "6fcae0c6c159e11541080f1f829873d8f374f81eda0abc67695a13fc8dc1a580" dependencies = [ "heck 0.5.0", "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "jsonrpsee-server" -version = "0.24.7" +version = "0.24.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82ad8ddc14be1d4290cd68046e7d1d37acd408efed6d3ca08aefcc3ad6da069c" +checksum = "66b7a3df90a1a60c3ed68e7ca63916b53e9afa928e33531e87f61a9c8e9ae87b" dependencies = [ "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.1", + "hyper 1.6.0", "hyper-util", "jsonrpsee-core", "jsonrpsee-types", @@ -1992,17 +1987,17 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util", - "tower", + "tower 0.4.13", "tracing", ] [[package]] name = "jsonrpsee-types" -version = "0.24.7" +version = "0.24.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a178c60086f24cc35bb82f57c651d0d25d99c4742b4d335de04e97fa1f08a8a1" +checksum = "ddb81adb1a5ae9182df379e374a79e24e992334e7346af4d065ae5b2acb8d4c6" dependencies = [ - "http 1.1.0", + "http 1.2.0", "serde", "serde_json", "thiserror 1.0.69", @@ -2093,16 +2088,15 @@ version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ - "cc", "pkg-config", "vcpkg", ] [[package]] name = "libz-sys" -version = "1.1.20" +version = "1.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472" +checksum = "df9b68e50e6e0b26f672573834882eb57759f6db9b3be2ea3c35c91188bb4eaa" dependencies = [ "cc", "libc", @@ -2112,9 +2106,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" @@ -2162,7 +2156,7 @@ checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -2193,44 +2187,27 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" dependencies = [ "adler2", ] [[package]] name = "mio" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" dependencies = [ - "hermit-abi", "libc", "log", "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.52.0", ] -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - [[package]] name = "notify" version = "8.0.0" @@ -2355,24 +2332,24 @@ dependencies = [ [[package]] name = "object" -version = "0.36.5" +version = "0.36.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.20.2" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e" [[package]] name = "openssl-probe" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "ordered-float" @@ -2395,9 +2372,9 @@ dependencies = [ [[package]] name = "ouroboros" -version = "0.18.4" +version = "0.18.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "944fa20996a25aded6b4795c6d63f10014a7a83f8be9828a11860b08c5fc4a67" +checksum = "1e0f050db9c44b97a94723127e6be766ac5c340c48f2c4bb3ffa11713744be59" dependencies = [ "aliasable", "ouroboros_macro", @@ -2406,16 +2383,15 @@ dependencies = [ [[package]] name = "ouroboros_macro" -version = "0.18.4" +version = "0.18.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39b0deead1528fd0e5947a8546a9642a9777c25f6e1e26f34c97b204bbb465bd" +checksum = "3c7028bdd3d43083f6d8d4d5187680d0d3560d54df4cc9d752005268b41e64d0" dependencies = [ "heck 0.4.1", - "itertools 0.12.1", "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -2435,7 +2411,7 @@ dependencies = [ "cssparser", "dashmap", "data-encoding", - "itertools 0.10.5", + "itertools", "lazy_static", "parcel_selectors", "parcel_sourcemap", @@ -2538,20 +2514,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879952a81a83930934cbf1786752d6dedc3b1f29e8f8fb2ad1d0a36f377cf442" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 1.0.69", + "thiserror 2.0.11", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d214365f632b123a47fd913301e14c946c61d1c183ee245fa76eb752e59a02dd" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -2559,22 +2535,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb55586734301717aea2ac313f50b2eb8f60d2fc3dc01d190eefa2e625f60c4e" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "pest_meta" -version = "2.7.14" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75da2a70cf4d9cb76833c990ac9cd3923c9a8905a8929789ce347c84564d03d" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -2665,29 +2641,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "dfe2e71e1471fe07709406bf725f710b02927c9c54b2b5b2ec0e8087d97c327d" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "f6e859e6e5bd50440ab63c47e3ebabc90f26251f7c73c3d3e837b74a1cc3fa67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -2795,7 +2771,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -2806,9 +2782,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] @@ -2821,7 +2797,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", "version_check", "yansi", ] @@ -2866,8 +2842,8 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.0.0", - "rustls 0.23.18", + "rustc-hash 2.1.1", + "rustls 0.23.22", "socket2", "thiserror 2.0.11", "tokio", @@ -2884,8 +2860,8 @@ dependencies = [ "getrandom 0.2.15", "rand 0.8.5", "ring", - "rustc-hash 2.0.0", - "rustls 0.23.18", + "rustc-hash 2.1.1", + "rustls 0.23.22", "rustls-pki-types", "slab", "thiserror 2.0.11", @@ -2896,9 +2872,9 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.7" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a626c6807713b15cac82a6acaccd6043c9a5408c24baae07611fec3f243da" +checksum = "1c40286217b4ba3a71d644d752e6a0b71f13f1b6a2c5311acfcbe0c2418ed904" dependencies = [ "cfg_aliases", "libc", @@ -2910,9 +2886,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.37" +version = "1.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" dependencies = [ "proc-macro2", ] @@ -3061,9 +3037,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" dependencies = [ "bitflags 2.8.0", ] @@ -3114,19 +3090,19 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.9" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" dependencies = [ "base64 0.22.1", "bytes", "futures-core", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "http-body-util", - "hyper 1.5.1", - "hyper-rustls 0.27.3", + "hyper 1.6.0", + "hyper-rustls 0.27.5", "hyper-util", "ipnet", "js-sys", @@ -3136,7 +3112,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls 0.23.18", + "rustls 0.23.22", "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", @@ -3144,7 +3120,8 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-rustls 0.26.0", + "tokio-rustls 0.26.1", + "tower 0.5.2", "tower-service", "url", "wasm-bindgen", @@ -3206,9 +3183,9 @@ checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" [[package]] name = "rsa" -version = "0.9.6" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" +checksum = "47c75d7c5c6b673e58bf54d8544a9f432e3a925b0e80f7cd3602ab5c50c55519" dependencies = [ "const-oid", "digest", @@ -3279,7 +3256,7 @@ dependencies = [ "hex", "hmac", "http 0.2.12", - "hyper 0.14.31", + "hyper 0.14.32", "hyper-rustls 0.24.2", "log", "maybe-async", @@ -3325,9 +3302,9 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] name = "rustc_version" @@ -3340,15 +3317,15 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.41" +version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7f649912bc1495e167a6edee79151c84b1bad49748cb4f1f1167f459f6224f6" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ "bitflags 2.8.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3379,9 +3356,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.18" +version = "0.23.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9cc1d47e243d655ace55ed38201c19ae02c148ae56412ab8750e8f0166ab7f" +checksum = "9fb9263ab4eb695e42321db096e3b8fbd715a59b154d5c88d82db2175b681ba7" dependencies = [ "once_cell", "ring", @@ -3436,9 +3413,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" dependencies = [ "web-time", ] @@ -3466,15 +3443,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "6ea1a2d0a644769cc99faa24c3ad26b379b786fe7c36fd3c546254801650e6dd" [[package]] name = "same-file" @@ -3520,14 +3497,14 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "sea-orm" -version = "1.1.1" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5680a8b686985116607ef5f5af2b1f9e1cc2c228330e93101816a0baa279afa" +checksum = "1a93194430b419da0801f404baf3b986399d6a2a4f43bc79bc96dea83f92ca43" dependencies = [ "async-stream", "async-trait", @@ -3553,15 +3530,15 @@ dependencies = [ [[package]] name = "sea-orm-macros" -version = "1.1.1" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a239e3bb1b566ad4ec2654d0d193d6ceddfd733487edc9c21a64d214c773910" +checksum = "d19e8f22fb474a8a622eb516c46885a080535d8d559386188f525977eaad32b3" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", "sea-bae", - "syn 2.0.89", + "syn 2.0.98", "unicode-ident", ] @@ -3600,7 +3577,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", "thiserror 1.0.69", ] @@ -3625,9 +3602,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.12.1" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" dependencies = [ "core-foundation-sys", "libc", @@ -3639,20 +3616,20 @@ version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e14e4d63b804dc0c7ec4a1e52bcb63f02c7ac94476755aa579edac21e01f915d" dependencies = [ - "self_cell 1.0.4", + "self_cell 1.1.0", ] [[package]] name = "self_cell" -version = "1.0.4" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d369a96f978623eb3dc28807c4852d6cc617fed53da5d3c400feff1ef34a714a" +checksum = "c2fdfc24bc566f839a2da4c4295b82db7d25a24253867d5c64355abb5799bdbe" [[package]] name = "semver" -version = "1.0.23" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" +checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" [[package]] name = "serde" @@ -3682,7 +3659,7 @@ checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -3696,9 +3673,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.137" +version = "1.0.138" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" +checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" dependencies = [ "itoa", "memchr", @@ -3714,7 +3691,7 @@ checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -3842,9 +3819,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.7" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", "windows-sys 0.52.0", @@ -3852,14 +3829,14 @@ dependencies = [ [[package]] name = "soketto" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37468c595637c10857701c990f93a40ce0e357cedb0953d1c26c8d8027f9bb53" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" dependencies = [ "base64 0.22.1", "bytes", "futures", - "http 1.1.0", + "http 1.2.0", "httparse", "log", "rand 0.8.5", @@ -3885,21 +3862,11 @@ dependencies = [ "der", ] -[[package]] -name = "sqlformat" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bba3a93db0cc4f7bdece8bb09e77e2e785c20bfebf79eb8340ed80708048790" -dependencies = [ - "nom", - "unicode_categories", -] - [[package]] name = "sqlx" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93334716a037193fac19df402f8571269c84a00852f6a7066b5d2616dcd64d3e" +checksum = "4410e73b3c0d8442c5f99b425d7a435b5ee0ae4167b3196771dd3f7a01be745f" dependencies = [ "sqlx-core", "sqlx-macros", @@ -3910,39 +3877,33 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d8060b456358185f7d50c55d9b5066ad956956fddec42ee2e8567134a8936e" +checksum = "6a007b6936676aa9ab40207cde35daab0a04b823be8ae004368c0793b96a61e0" dependencies = [ - "atoi", - "byteorder", "bytes", "crc", "crossbeam-queue", "either", "event-listener", - "futures-channel", "futures-core", "futures-intrusive", "futures-io", "futures-util", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "hashlink", - "hex", "indexmap", "log", "memchr", "once_cell", - "paste", "percent-encoding", - "rustls 0.23.18", + "rustls 0.23.22", "rustls-pemfile 2.2.0", "serde", "serde_json", "sha2", "smallvec", - "sqlformat", - "thiserror 1.0.69", + "thiserror 2.0.11", "time", "tokio", "tokio-stream", @@ -3953,22 +3914,22 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" +checksum = "3112e2ad78643fef903618d78cf0aec1cb3134b019730edb039b69eaf531f310" dependencies = [ "proc-macro2", "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "sqlx-macros-core" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1804e8a7c7865599c9c79be146dc8a9fd8cc86935fa641d3ea58e5f0688abaa5" +checksum = "4e9f90acc5ab146a99bf5061a7eb4976b573f560bc898ef3bf8435448dd5e7ad" dependencies = [ "dotenvy", "either", @@ -3984,7 +3945,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.89", + "syn 2.0.98", "tempfile", "tokio", "url", @@ -3992,9 +3953,9 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64bb4714269afa44aef2755150a0fc19d756fb580a67db8885608cf02f47d06a" +checksum = "4560278f0e00ce64938540546f59f590d60beee33fffbd3b9cd47851e5fff233" dependencies = [ "atoi", "base64 0.22.1", @@ -4027,7 +3988,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 1.0.69", + "thiserror 2.0.11", "time", "tracing", "whoami", @@ -4035,9 +3996,9 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fa91a732d854c5d7726349bb4bb879bb9478993ceb764247660aee25f67c2f8" +checksum = "c5b98a57f363ed6764d5b3a12bfedf62f07aa16e1856a7ddc2a0bb190a959613" dependencies = [ "atoi", "base64 0.22.1", @@ -4048,7 +4009,6 @@ dependencies = [ "etcetera", "futures-channel", "futures-core", - "futures-io", "futures-util", "hex", "hkdf", @@ -4066,7 +4026,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 1.0.69", + "thiserror 2.0.11", "time", "tracing", "whoami", @@ -4074,9 +4034,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5b2cf34a45953bfd3daaf3db0f7a7878ab9b7a6b91b422d24a7a9e4c857b680" +checksum = "f85ca71d3a5b24e64e1d08dd8fe36c6c95c339a896cc33068148906784620540" dependencies = [ "atoi", "flume", @@ -4147,7 +4107,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -4158,15 +4118,15 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "sval" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6dc0f9830c49db20e73273ffae9b5240f63c42e515af1da1fceefb69fceafd8" +checksum = "d4c2f18f53c889ec3dfe1c08b20fd51406d09b14bf18b366416718763ccff05a" [[package]] name = "sval_buffer" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "429922f7ad43c0ef8fd7309e14d750e38899e32eb7e8da656ea169dd28ee212f" +checksum = "4b8cb1bb48d0bed828b908e6b99e7ab8c7244994dc27948a2e31d42e8c4d77c1" dependencies = [ "sval", "sval_ref", @@ -4174,18 +4134,18 @@ dependencies = [ [[package]] name = "sval_dynamic" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f16ff5d839396c11a30019b659b0976348f3803db0626f736764c473b50ff4" +checksum = "ba574872d4ad653071a9db76c49656082db83a37cd5f559874273d36b4a02b9d" dependencies = [ "sval", ] [[package]] name = "sval_fmt" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c01c27a80b6151b0557f9ccbe89c11db571dc5f68113690c1e028d7e974bae94" +checksum = "944450b2dbbf8aae98537776b399b23d72b19243ee42522cfd110305f3c9ba5a" dependencies = [ "itoa", "ryu", @@ -4194,9 +4154,9 @@ dependencies = [ [[package]] name = "sval_json" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0deef63c70da622b2a8069d8600cf4b05396459e665862e7bdb290fd6cf3f155" +checksum = "411bbd543c413796ccfbaa44f6676e20032b6c69e4996cb6c3e6ef30c79b96d1" dependencies = [ "itoa", "ryu", @@ -4205,9 +4165,9 @@ dependencies = [ [[package]] name = "sval_nested" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a39ce5976ae1feb814c35d290cf7cf8cd4f045782fe1548d6bc32e21f6156e9f" +checksum = "f30582d2a90869b380f8260559138c1b68ac3e0765520959f22a1a1fdca31769" dependencies = [ "sval", "sval_buffer", @@ -4216,18 +4176,18 @@ dependencies = [ [[package]] name = "sval_ref" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7c6ee3751795a728bc9316a092023529ffea1783499afbc5c66f5fabebb1fa" +checksum = "762d3fbf3c0869064b7c93808c67ad2ed0292dde9b060ac282817941d4707dff" dependencies = [ "sval", ] [[package]] name = "sval_serde" -version = "2.13.2" +version = "2.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a5572d0321b68109a343634e3a5d576bf131b82180c6c442dee06349dfc652a" +checksum = "752d307438c6a6a3d095a2fecf6950cfb946d301a5bd6b57f047db4f6f8d97b9" dependencies = [ "serde", "sval", @@ -4247,9 +4207,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.89" +version = "2.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e" +checksum = "36147f1a48ae0ec2b5b3bc5b537d267457555a10dc06f3dbc8cb11ba3006d3b1" dependencies = [ "proc-macro2", "quote", @@ -4285,7 +4245,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -4296,12 +4256,13 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.14.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" +checksum = "38c246215d7d24f48ae091a2902398798e05d978b24315d6efbc00ede9a8bb91" dependencies = [ "cfg-if", "fastrand", + "getrandom 0.3.1", "once_cell", "rustix", "windows-sys 0.59.0", @@ -4342,7 +4303,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -4353,7 +4314,7 @@ checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -4408,9 +4369,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" dependencies = [ "tinyvec_macros", ] @@ -4447,7 +4408,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -4484,20 +4445,19 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" dependencies = [ - "rustls 0.23.18", - "rustls-pki-types", + "rustls 0.23.22", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -4507,9 +4467,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -4521,9 +4481,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "cd87a5cdd6ffab733b2f74bc4fd7ee5fff6634124999ac278c35fc78c6120148" dependencies = [ "serde", "serde_spanned", @@ -4542,9 +4502,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.22" +version = "0.22.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" dependencies = [ "indexmap", "serde", @@ -4568,6 +4528,21 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -4582,9 +4557,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -4594,20 +4569,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", ] @@ -4677,15 +4652,15 @@ checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-bidi" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" -version = "1.0.14" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" [[package]] name = "unicode-normalization" @@ -4708,12 +4683,6 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - [[package]] name = "untrusted" version = "0.9.0" @@ -4751,9 +4720,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.11.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "ced87ca4be083373936a67f8de945faa23b6b42384bd5b64434850802c6dccd0" dependencies = [ "serde", ] @@ -4843,6 +4812,15 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasi" +version = "0.13.3+wasi-0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26816d2e1a4a36a2940b96c5296ce403917633dff8f3440e9b236ed6f6bacad2" +dependencies = [ + "wit-bindgen-rt", +] + [[package]] name = "wasite" version = "0.1.0" @@ -4851,12 +4829,13 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.95" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "128d1e363af62632b8eb57219c8fd7877144af57558fb2ef0368d0087bddeb2e" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", + "rustversion", "serde", "serde_json", "wasm-bindgen-macro", @@ -4864,36 +4843,36 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.95" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb6dd4d3ca0ddffd1dd1c9c04f94b868c37ff5fac97c30b97cff2d74fce3a358" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.45" +version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7ec4f8827a71586374db3e87abdb5a2bb3a15afed140221307c3ec06b1f63b" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.95" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e79384be7f8f5a9dd5d7167216f022090cf1f9ec128e6e6a482a2cb5c5422c56" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4901,28 +4880,31 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.95" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.95" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65fc09f10666a9f147042251e0dda9c18f166ff7de300607007e96bdebc1068d" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "web-sys" -version = "0.3.72" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6488b90108c040df0fe62fa815cbdee25124641df01814dd7282749234c6112" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", @@ -4940,9 +4922,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.7" +version = "0.26.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" dependencies = [ "rustls-pki-types", ] @@ -5188,13 +5170,22 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.20" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" +checksum = "86e376c75f4f43f44db463cf729e0d3acbf954d13e22c51e26e4c264b4ab545f" dependencies = [ "memchr", ] +[[package]] +name = "wit-bindgen-rt" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c" +dependencies = [ + "bitflags 2.8.0", +] + [[package]] name = "write16" version = "1.0.0" @@ -5242,7 +5233,7 @@ checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", "synstructure 0.13.1", ] @@ -5264,7 +5255,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] [[package]] @@ -5284,7 +5275,7 @@ checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", "synstructure 0.13.1", ] @@ -5313,5 +5304,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.89", + "syn 2.0.98", ] From 1a64e57a0c909b1ee4c851c63fd50723f9b660ca Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 15:42:00 -0500 Subject: [PATCH 137/306] Change framerail proxy setup logic. --- wws/src/handler/framerail.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index 4ac36cbc78..483e305fd9 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -26,19 +26,17 @@ use axum::{ response::Html, }; +const FRAMERAIL_HOST: &str = "framerail:3000"; + pub async fn proxy_framerail( State(state): State<ServerState>, mut req: Request, ) -> Html<&'static str> { info!("Proxying request to framerail"); - // Get path and query + // Create framerail URL we're proxying to let path = get_path(req.uri()); - - // Create and set framerail URL - let framerail_host = "framerail"; // TODO - let framerail_port = 3000; // TODO - let uri = format!("http://{framerail_host}:{framerail_port}{path}"); + let uri = format!("http://{FRAMERAIL_HOST}{path}"); *req.uri_mut() = Uri::try_from(uri).expect("Internal framerail URI is invalid"); // TODO From 9c3d4bb9d66e9170385717af109e208403b5b150 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 17:53:33 -0500 Subject: [PATCH 138/306] Log proxied path. --- wws/src/handler/framerail.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index 483e305fd9..f413e726d6 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -23,7 +23,7 @@ use crate::state::ServerState; use axum::{ extract::{Request, State}, http::{status::StatusCode, Uri}, - response::Html, + response::Response, }; const FRAMERAIL_HOST: &str = "framerail:3000"; @@ -31,11 +31,9 @@ const FRAMERAIL_HOST: &str = "framerail:3000"; pub async fn proxy_framerail( State(state): State<ServerState>, mut req: Request, -) -> Html<&'static str> { - info!("Proxying request to framerail"); - - // Create framerail URL we're proxying to +) -> Response { let path = get_path(req.uri()); + info!(path = path, "Proxying request to framerail"); let uri = format!("http://{FRAMERAIL_HOST}{path}"); *req.uri_mut() = Uri::try_from(uri).expect("Internal framerail URI is invalid"); From 1541f802b2516577eab4454296947ff67d95abd1 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 18:28:54 -0500 Subject: [PATCH 139/306] Add http client to ServerState. --- wws/Cargo.lock | 1 + wws/Cargo.toml | 1 + wws/src/state.rs | 6 ++++++ 3 files changed, 8 insertions(+) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 3436cbb63d..f9c5a88b86 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2696,6 +2696,7 @@ dependencies = [ "clap", "color-backtrace", "dotenvy", + "hyper-util", "jsonrpsee", "once_cell", "paste", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 2394bf0c6b..72a7245677 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -19,6 +19,7 @@ axum-extra = { version = "0.10", features = ["attachment"] } clap = "4" color-backtrace = "0.6" dotenvy = "0.15" +hyper-util = { version = "0.1", features = ["client", "client-legacy", "http1", "http2", "tokio"] } jsonrpsee = { version = "0.24", features = ["async-client", "jsonrpsee-http-client"] } once_cell = "1" paste = "1" diff --git a/wws/src/state.rs b/wws/src/state.rs index 3ff74c44f6..d3e0b2801d 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -24,17 +24,21 @@ use crate::{ deepwell::{Deepwell, Domains, FileData, PageData, SiteData}, error::Result, }; +use axum::body::Body; use s3::bucket::Bucket; use std::sync::Arc; use std::time::Duration; +use hyper_util::{client::legacy::{Client as HyperClient, connect::HttpConnector}, rt::TokioExecutor}; const BUCKET_REQUEST_TIMEOUT: Duration = Duration::from_millis(200); pub type ServerState = Arc<ServerStateInner>; +pub type Client = HyperClient<HttpConnector, Body>; #[derive(Debug)] pub struct ServerStateInner { pub domains: Domains, + pub client: Client, pub deepwell: Deepwell, pub cache: Cache, pub s3_bucket: Box<Bucket>, @@ -45,6 +49,7 @@ pub async fn build_server_state(secrets: Secrets) -> Result<ServerState> { deepwell.check().await; let domains = deepwell.domains().await?; let cache = Cache::connect(&secrets.redis_url)?; + let client = HyperClient::builder(TokioExecutor::new()).build(HttpConnector::new()); let s3_bucket = { let mut bucket = Bucket::new( &secrets.s3_bucket, @@ -62,6 +67,7 @@ pub async fn build_server_state(secrets: Secrets) -> Result<ServerState> { Ok(Arc::new(ServerStateInner { domains, + client, deepwell, cache, s3_bucket, From 017c59d3a98b8dcc77895e694a83597fbef0a20c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 18:57:01 -0500 Subject: [PATCH 140/306] Implement proxy step within framerail handler. --- wws/src/handler/framerail.rs | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index f413e726d6..0827c6a972 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -23,7 +23,7 @@ use crate::state::ServerState; use axum::{ extract::{Request, State}, http::{status::StatusCode, Uri}, - response::Response, + response::{IntoResponse, Response}, }; const FRAMERAIL_HOST: &str = "framerail:3000"; @@ -37,6 +37,13 @@ pub async fn proxy_framerail( let uri = format!("http://{FRAMERAIL_HOST}{path}"); *req.uri_mut() = Uri::try_from(uri).expect("Internal framerail URI is invalid"); - // TODO - todo!() + state + .client + .request(req) + .await + .map_err(|error| { + error!("Reverse proxy to framerail failed: {error}"); + StatusCode::BAD_GATEWAY + }) + .into_response() } From 795c59ba7fee2750d951c8f8e3e25eccedf7d546 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 19:01:58 -0500 Subject: [PATCH 141/306] Add FRAMERAIL_HOST to env. --- wws/.env.example | 4 ++++ wws/src/config/mod.rs | 3 ++- wws/src/config/secrets.rs | 8 +++++++- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/wws/.env.example b/wws/.env.example index e5cce909aa..683aeb80b5 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -10,6 +10,10 @@ DEEPWELL_URL=http://localhost:2747 # Includes password (if needed) to connect. REDIS_URL=redis://localhost +# framerail host +# Includes the port number. +FRAMERAIL_HOST=localhost:3000 + # S3 configuration settings S3_BUCKET=deepwell-files diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 2cb96bd773..76e3429f7d 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -65,8 +65,8 @@ pub fn load_config() -> (Config, Secrets) { // Process secrets let deepwell_url = get_env!("DEEPWELL_URL"); - let redis_url = get_env!("REDIS_URL"); + let framerail_host = get_env!("FRAMERAIL_HOST"); let s3_bucket = get_env!("S3_BUCKET"); let s3_region = match env::var("S3_AWS_REGION") { @@ -134,6 +134,7 @@ pub fn load_config() -> (Config, Secrets) { let secrets = Secrets { deepwell_url, redis_url, + framerail_host, s3_bucket, s3_region, s3_path_style, diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index fbc8aa77b2..fcf9e8ace1 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -22,7 +22,7 @@ use s3::{creds::Credentials, region::Region}; #[derive(Debug, Clone)] pub struct Secrets { - /// The URL to the DEEPWELL server to connect to. + /// The URL of the DEEPWELL server to connect to. /// /// Set using environment variable `DEEPWELL_URL`. pub deepwell_url: String, @@ -32,6 +32,12 @@ pub struct Secrets { /// Set using environment variable `REDIS_URL`. pub redis_url: String, + /// The host of the framerail server to reverse proxy from. + /// This includes the port number, if it's not `80`. + /// + /// Set using environment variable `FRAMERAIL_HOST`. + pub framerail_host: String, + /// The name of the S3 bucket that file blobs are kept in. /// The bucket must already exist prior to program invocation. /// From e993c1b585058dd2c3e11b047fffad4b61645950 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 22:40:38 -0500 Subject: [PATCH 142/306] Abstract framerail reverse proxy URI construction. --- wws/src/framerail.rs | 38 ++++++++++++++++++++++++++++++++++++ wws/src/handler/framerail.rs | 12 +++--------- wws/src/main.rs | 1 + wws/src/state.rs | 9 ++++++++- 4 files changed, 50 insertions(+), 10 deletions(-) create mode 100644 wws/src/framerail.rs diff --git a/wws/src/framerail.rs b/wws/src/framerail.rs new file mode 100644 index 0000000000..662e0e262f --- /dev/null +++ b/wws/src/framerail.rs @@ -0,0 +1,38 @@ +/* + * framerail.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +use axum::http::Uri; + +#[derive(Debug)] +pub struct Framerail { + host: String, +} + +impl Framerail { + #[inline] + pub fn new(host: String) -> Self { + Framerail { host } + } + + pub fn proxy_uri(&self, path: &str) -> Uri { + let uri = format!("http://{}{}", self.host, path); + Uri::try_from(uri).expect("Internal framerail URI is invalid") + } +} diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index 0827c6a972..ce152b2c01 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -22,20 +22,14 @@ use crate::path::get_path; use crate::state::ServerState; use axum::{ extract::{Request, State}, - http::{status::StatusCode, Uri}, + http::status::StatusCode, response::{IntoResponse, Response}, }; -const FRAMERAIL_HOST: &str = "framerail:3000"; - -pub async fn proxy_framerail( - State(state): State<ServerState>, - mut req: Request, -) -> Response { +pub async fn proxy_framerail(State(state): State<ServerState>, mut req: Request) -> Response { let path = get_path(req.uri()); info!(path = path, "Proxying request to framerail"); - let uri = format!("http://{FRAMERAIL_HOST}{path}"); - *req.uri_mut() = Uri::try_from(uri).expect("Internal framerail URI is invalid"); + *req.uri_mut() = state.framerail.proxy_uri(path); state .client diff --git a/wws/src/main.rs b/wws/src/main.rs index ca7651ff97..a0dac764b6 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -36,6 +36,7 @@ mod cache; mod config; mod deepwell; mod error; +mod framerail; mod handler; mod host; mod info; diff --git a/wws/src/state.rs b/wws/src/state.rs index d3e0b2801d..5ccc841b98 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -23,12 +23,16 @@ use crate::{ config::Secrets, deepwell::{Deepwell, Domains, FileData, PageData, SiteData}, error::Result, + framerail::Framerail, }; use axum::body::Body; +use hyper_util::{ + client::legacy::{connect::HttpConnector, Client as HyperClient}, + rt::TokioExecutor, +}; use s3::bucket::Bucket; use std::sync::Arc; use std::time::Duration; -use hyper_util::{client::legacy::{Client as HyperClient, connect::HttpConnector}, rt::TokioExecutor}; const BUCKET_REQUEST_TIMEOUT: Duration = Duration::from_millis(200); @@ -40,11 +44,13 @@ pub struct ServerStateInner { pub domains: Domains, pub client: Client, pub deepwell: Deepwell, + pub framerail: Framerail, pub cache: Cache, pub s3_bucket: Box<Bucket>, } pub async fn build_server_state(secrets: Secrets) -> Result<ServerState> { + let framerail = Framerail::new(secrets.framerail_host); let deepwell = Deepwell::connect(&secrets.deepwell_url)?; deepwell.check().await; let domains = deepwell.domains().await?; @@ -69,6 +75,7 @@ pub async fn build_server_state(secrets: Secrets) -> Result<ServerState> { domains, client, deepwell, + framerail, cache, s3_bucket, })) From 83a885250fbf5180514d446bf64d961a8ccce439 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 22:58:10 -0500 Subject: [PATCH 143/306] Add HTML error case for site lookup failure. --- deepwell/src/services/error.rs | 10 +++++++--- wws/src/error/html.rs | 21 +++++++++++++++++---- wws/src/handler/mod.rs | 4 ++-- 3 files changed, 26 insertions(+), 9 deletions(-) diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index ee8147ef8b..44bc01e7c1 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -308,6 +308,9 @@ pub enum Error { #[error("The web server failed to process the request")] WebServerFailure, + #[error("The web server cannot fetch site information")] + SiteFetch, + #[error("The web server cannot fetch page information")] PageFetch, @@ -453,9 +456,10 @@ impl Error { // // WebServerFailure is pretty general, avoid using it if possible. Error::WebServerFailure => 6000, - Error::PageFetch => 6001, - Error::FileFetch => 6002, - Error::BlobFetch => 6003, + Error::SiteFetch => 6001, + Error::PageFetch => 6002, + Error::FileFetch => 6003, + Error::BlobFetch => 6004, } } diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index a16fa799bd..b98211eac6 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -62,6 +62,9 @@ pub enum ServerErrorCode<'a> { page_id: i64, filename: &'a str, }, + SiteFetch { + domain: &'a str, + }, PageFetch { site_id: i64, page_slug: &'a str, @@ -91,9 +94,10 @@ impl ServerErrorCode<'_> { ServerErrorCode::CustomDomainNotFound { .. } => 2013, ServerErrorCode::PageNotFound { .. } => 2005, ServerErrorCode::FileNotFound { .. } => 2009, - ServerErrorCode::PageFetch { .. } => 6001, - ServerErrorCode::FileFetch { .. } => 6002, - ServerErrorCode::BlobFetch { .. } => 6003, + ServerErrorCode::SiteFetch { .. } => 6001, + ServerErrorCode::PageFetch { .. } => 6002, + ServerErrorCode::FileFetch { .. } => 6003, + ServerErrorCode::BlobFetch { .. } => 6004, } } @@ -104,7 +108,8 @@ impl ServerErrorCode<'_> { | ServerErrorCode::CustomDomainNotFound { .. } | ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => StatusCode::NOT_FOUND, - ServerErrorCode::PageFetch { .. } + ServerErrorCode::SiteFetch { .. } + | ServerErrorCode::PageFetch { .. } | ServerErrorCode::FileFetch { .. } | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, } @@ -118,6 +123,7 @@ impl ServerErrorCode<'_> { } ServerErrorCode::PageNotFound { .. } => "Page not found", ServerErrorCode::FileNotFound { .. } => "File not found", + ServerErrorCode::SiteFetch { .. } => "Cannot load site information", ServerErrorCode::PageFetch { .. } => "Cannot load page", ServerErrorCode::FileFetch { .. } => "Cannot load file", ServerErrorCode::BlobFetch { .. } => "Cannot load file data", @@ -171,6 +177,13 @@ impl ServerErrorCode<'_> { site_id, ); } + ServerErrorCode::SiteFetch { domain } => { + str_write!( + body, + "Cannot load site information for domain \"<code>{}</code>\".", + html_escape(domain), + ); + } ServerErrorCode::PageFetch { site_id, page_slug } => { str_write!( body, diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index dd0831ec05..f09b19d219 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -129,8 +129,8 @@ pub async fn handle_host_delegation( let host_data = match lookup_host(&state, &hostname).await { Ok(host_data) => host_data, Err(error) => { - // TODO error page response in case of an internal issue - todo!() + error!("Unable to fetch site/host information: {error}"); + return ServerErrorCode::SiteFetch { domain: &hostname }.into_response(); } }; From b765dc3bd56fee453d16b6bd289af9c465f43cf1 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 22:58:27 -0500 Subject: [PATCH 144/306] Remove some unused imports. --- wws/src/handler/file.rs | 3 +-- wws/src/route.rs | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index 49cb9f691d..ef4c0b2f84 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -20,8 +20,7 @@ use super::get_site_info; use crate::{ - deepwell::FileData, - error::{Result, ServerErrorCode}, + error::ServerErrorCode, state::ServerState, }; use axum::{ diff --git a/wws/src/route.rs b/wws/src/route.rs index d234db5426..c73d8ddfd1 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -22,7 +22,7 @@ use crate::{handler::*, info, state::ServerState}; use axum::{ body::Body, extract::{Request, State}, - http::header::{HeaderName, HeaderValue}, + http::header::HeaderValue, routing::{any, get}, Router, }; From da6109c733cab2e8799801979f70e06d111440b8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:53:39 -0500 Subject: [PATCH 145/306] Bump deepwell version to v2025.2.6 --- deepwell/Cargo.lock | 2 +- deepwell/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/Cargo.lock b/deepwell/Cargo.lock index 0111758b7e..d9dd7ba519 100644 --- a/deepwell/Cargo.lock +++ b/deepwell/Cargo.lock @@ -772,7 +772,7 @@ dependencies = [ [[package]] name = "deepwell" -version = "2025.1.3" +version = "2025.2.6" dependencies = [ "anyhow", "argon2", diff --git a/deepwell/Cargo.toml b/deepwell/Cargo.toml index ec9d308366..3f0f52c7ca 100644 --- a/deepwell/Cargo.toml +++ b/deepwell/Cargo.toml @@ -8,7 +8,7 @@ keywords = ["wikijump", "api", "backend", "wiki"] categories = ["asynchronous", "database", "web-programming::http-server"] exclude = [".gitignore", ".editorconfig"] -version = "2025.1.3" +version = "2025.2.6" authors = ["Emmie Smith <emmie.maeda@gmail.com>"] edition = "2021" From de76f4f530a25b693b94373cf7efbdbf6004977b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 02:54:06 -0500 Subject: [PATCH 146/306] Bump wws version to v2025.2.6 --- wws/Cargo.lock | 2 +- wws/Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index f9c5a88b86..e8ac07703c 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2687,7 +2687,7 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" [[package]] name = "wws" -version = "2025.1.1" +version = "2025.2.6" dependencies = [ "anyhow", "axum", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 72a7245677..a55870585e 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -8,7 +8,7 @@ keywords = ["wikijump", "api", "backend", "wiki"] categories = ["asynchronous", "caching", "web-programming::http-server"] exclude = [".gitignore"] -version = "2025.1.1" +version = "2025.2.6" authors = ["Emmie Smith <emmie.maeda@gmail.com>"] edition = "2021" From d6247200002037a1c6bf490b88ea7b5664e285ce Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 23:00:40 -0500 Subject: [PATCH 147/306] Address clippy lints. --- wws/src/host.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/wws/src/host.rs b/wws/src/host.rs index bc2ab5d6af..f28e61be4b 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -68,7 +68,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S .. } = state.domains; - if &hostname == main_domain_no_dot { + if hostname == main_domain_no_dot { // First, check if it's the default domain by itself. main_site_slug(state, hostname, None).await } else if let Some(site_slug) = hostname.strip_suffix(main_domain) { @@ -102,7 +102,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S Ok(SiteAndHost::FileMissing { site_slug }) } } - } else if &hostname == files_domain_no_dot { + } else if hostname == files_domain_no_dot { // Finally, check if it's the files domain by itself. // // This is weird, wjfiles should always a site slug subdomain, @@ -117,7 +117,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S // If it's anything else, it must be a custom domain. // Do a lookup, then set the site data as appropriate. - match state.get_site_from_domain(&hostname).await? { + match state.get_site_from_domain(hostname).await? { Some((site_id, site_slug)) => { // Site exists info!( From c480ba52d077faaeb65eae58826625be7c52bda4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 23:01:13 -0500 Subject: [PATCH 148/306] Remove unused fields. --- wws/src/deepwell.rs | 5 ----- 1 file changed, 5 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index d23cc6a4bd..c4754fc856 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -178,16 +178,11 @@ pub struct Domains { pub struct SiteData { pub site_id: i64, pub slug: String, - pub name: String, - pub custom_domain: Option<String>, } #[derive(Deserialize, Debug, Clone)] pub struct PageData { pub page_id: i64, - pub title: String, - pub alt_title: Option<String>, - pub hidden_fields: Vec<String>, } #[derive(Deserialize, Debug, Clone)] From fb936ded35bcf5cfed1e461629a52e7d0e74e51c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 7 Feb 2025 23:03:54 -0500 Subject: [PATCH 149/306] Suppress unused imports and variables. --- wws/src/handler/code.rs | 1 + wws/src/handler/html.rs | 3 +++ wws/src/handler/robots.rs | 6 +----- wws/src/handler/well_known.rs | 6 +----- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/wws/src/handler/code.rs b/wws/src/handler/code.rs index 847d70e982..29e8a03b35 100644 --- a/wws/src/handler/code.rs +++ b/wws/src/handler/code.rs @@ -40,5 +40,6 @@ pub async fn handle_code_block( ); // TODO + let _ = state; todo!() } diff --git a/wws/src/handler/html.rs b/wws/src/handler/html.rs index 270aabf03b..f0a5a486cd 100644 --- a/wws/src/handler/html.rs +++ b/wws/src/handler/html.rs @@ -34,5 +34,8 @@ pub async fn handle_html_block( Path((page_slug, id)): Path<(String, String)>, ) -> Html<&'static str> { // TODO + let _ = state; + let _ = page_slug; + let _ = id; todo!() } diff --git a/wws/src/handler/robots.rs b/wws/src/handler/robots.rs index b796a28324..94c6654ea3 100644 --- a/wws/src/handler/robots.rs +++ b/wws/src/handler/robots.rs @@ -20,11 +20,7 @@ //! Handling for the robots.txt file. -use axum::{ - body::Body, - http::{header, status::StatusCode}, - response::Response, -}; +use axum::http::status::StatusCode; // TODO diff --git a/wws/src/handler/well_known.rs b/wws/src/handler/well_known.rs index a89455ca27..02fbe739a1 100644 --- a/wws/src/handler/well_known.rs +++ b/wws/src/handler/well_known.rs @@ -23,11 +23,7 @@ //! Many different standard paths are served here, and each //! should be implemented as a separate handler. -use axum::{ - body::Body, - http::{header, status::StatusCode}, - response::Response, -}; +use axum::http::status::StatusCode; // TODO From 801598d7d2ca42eef12535a90bd4c00583e5c973 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 00:31:58 -0500 Subject: [PATCH 150/306] Fetch site information from headers for view requests. Partial change, this does not remove the domain in runRedirect. --- framerail/src/lib/server/deepwell/user.ts | 4 +-- framerail/src/lib/server/deepwell/views.ts | 8 +++--- framerail/src/lib/server/load/admin.ts | 6 ++--- framerail/src/lib/server/load/info.ts | 4 +-- framerail/src/lib/server/load/login.ts | 4 +-- framerail/src/lib/server/load/logout.ts | 4 +-- framerail/src/lib/server/load/page.ts | 6 ++--- framerail/src/lib/server/load/site-info.ts | 31 ++++++++++++++++++++++ framerail/src/lib/server/load/user.ts | 5 ++-- 9 files changed, 51 insertions(+), 21 deletions(-) create mode 100644 framerail/src/lib/server/load/site-info.ts diff --git a/framerail/src/lib/server/deepwell/user.ts b/framerail/src/lib/server/deepwell/user.ts index e103813259..e01e082b23 100644 --- a/framerail/src/lib/server/deepwell/user.ts +++ b/framerail/src/lib/server/deepwell/user.ts @@ -3,13 +3,13 @@ import type { Optional } from "$lib/types" import { startBlobUpload, uploadToPresignUrl } from "./file" export async function userView( - domain: string, + siteId: number, locales: string[], sessionToken: Optional<string>, username?: string ): Promise<object> { return client.request("user_view", { - domain, + site_id: siteId, session_token: sessionToken, locales, user: username diff --git a/framerail/src/lib/server/deepwell/views.ts b/framerail/src/lib/server/deepwell/views.ts index 3c1b7f48e3..1c9d60c101 100644 --- a/framerail/src/lib/server/deepwell/views.ts +++ b/framerail/src/lib/server/deepwell/views.ts @@ -7,13 +7,13 @@ export interface PageRoute { } export async function pageView( - domain: string, + siteId: number, locales: string[], route: Optional<PageRoute>, sessionToken: Optional<string> ): Promise<object> { return client.request("page_view", { - domain, + site_id: siteId, locales, session_token: sessionToken, route @@ -21,12 +21,12 @@ export async function pageView( } export async function adminView( - domain: string, + siteId: number, locales: string[], sessionToken: Optional<string> ): Promise<object> { return client.request("admin_view", { - domain, + site_id: siteId, locales, session_token: sessionToken }) diff --git a/framerail/src/lib/server/load/admin.ts b/framerail/src/lib/server/load/admin.ts index fb5a9d1b89..a8d604cd86 100644 --- a/framerail/src/lib/server/load/admin.ts +++ b/framerail/src/lib/server/load/admin.ts @@ -2,18 +2,18 @@ import defaults from "$lib/defaults" import { parseAcceptLangHeader } from "$lib/locales" import { translate } from "$lib/server/deepwell/translate" import { adminView } from "$lib/server/deepwell/views" +import { loadSiteInfo } from "$lib/server/load/site-info" import type { TranslateKeys } from "$lib/types" import { error } from "@sveltejs/kit" export async function loadAdminPage(request, cookies) { - const url = new URL(request.url) - const domain = url.hostname + const { siteId } = loadSiteInfo(request.headers) const sessionToken = cookies.get("wikijump_token") let locales = parseAcceptLangHeader(request) if (!locales.includes(defaults.fallbackLocale)) locales.push(defaults.fallbackLocale) - const response = await adminView(domain, locales, sessionToken) + const response = await adminView(siteId, locales, sessionToken) let translateKeys: TranslateKeys = { ...defaults.translateKeys diff --git a/framerail/src/lib/server/load/info.ts b/framerail/src/lib/server/load/info.ts index 70210df978..6b881358cc 100644 --- a/framerail/src/lib/server/load/info.ts +++ b/framerail/src/lib/server/load/info.ts @@ -2,13 +2,13 @@ import defaults from "$lib/defaults" import { parseAcceptLangHeader } from "$lib/locales" import { info } from "$lib/server/deepwell" import { translate } from "$lib/server/deepwell/translate" +import { loadSiteInfo } from "$lib/server/load/site-info" import type { TranslateKeys } from "$lib/types" import "$lib/vite-env.d.ts" import process from "process" export async function loadInfo(request, cookies) { - const url = new URL(request.url) - const domain = url.hostname + const { siteId } = loadSiteInfo(request.headers) const sessionToken = cookies.get("wikijump_token") let locales = parseAcceptLangHeader(request) diff --git a/framerail/src/lib/server/load/login.ts b/framerail/src/lib/server/load/login.ts index 5830a7d207..78f16d480e 100644 --- a/framerail/src/lib/server/load/login.ts +++ b/framerail/src/lib/server/load/login.ts @@ -1,12 +1,12 @@ import defaults from "$lib/defaults" import { parseAcceptLangHeader } from "$lib/locales" import { translate } from "$lib/server/deepwell/translate" +import { loadSiteInfo } from "$lib/server/load/site-info" import type { TranslateKeys } from "$lib/types" export async function loadLoginPage(request, cookies) { // Set up parameters - const url = new URL(request.url) - const domain = url.hostname + const { siteId } = loadSiteInfo(request.headers) const sessionToken = cookies.get("wikijump_token") let locales = parseAcceptLangHeader(request) diff --git a/framerail/src/lib/server/load/logout.ts b/framerail/src/lib/server/load/logout.ts index a0579e4591..9197f9ebaa 100644 --- a/framerail/src/lib/server/load/logout.ts +++ b/framerail/src/lib/server/load/logout.ts @@ -1,12 +1,12 @@ import defaults from "$lib/defaults" import { parseAcceptLangHeader } from "$lib/locales" import { translate } from "$lib/server/deepwell/translate" +import { loadSiteInfo } from "$lib/server/load/site-info" import type { TranslateKeys } from "$lib/types" export async function loadLogoutPage(request, cookies) { // Set up parameters - const url = new URL(request.url) - const domain = url.hostname + const { siteId } = loadSiteInfo(request.headers) const sessionToken = cookies.get("wikijump_token") let locales = parseAcceptLangHeader(request) diff --git a/framerail/src/lib/server/load/page.ts b/framerail/src/lib/server/load/page.ts index 5809073aca..abf20b6192 100644 --- a/framerail/src/lib/server/load/page.ts +++ b/framerail/src/lib/server/load/page.ts @@ -2,6 +2,7 @@ import defaults from "$lib/defaults" import { parseAcceptLangHeader } from "$lib/locales" import { translate } from "$lib/server/deepwell/translate" import { pageView } from "$lib/server/deepwell/views" +import { loadSiteInfo } from "$lib/server/load/site-info" import type { Optional, TranslateKeys } from "$lib/types" import { error, redirect } from "@sveltejs/kit" @@ -14,8 +15,7 @@ export async function loadPage( cookies ) { // Set up parameters - const url = new URL(request.url) - const domain = url.hostname + const { siteId } = loadSiteInfo(request.headers) const route = slug || extra ? { slug, extra } : null const sessionToken = cookies.get("wikijump_token") let locales = parseAcceptLangHeader(request) @@ -25,7 +25,7 @@ export async function loadPage( // Request data from backend // Includes fallback locale in case there is no Accept-Language header const response = await pageView( - domain, + siteId, [...locales, defaults.fallbackLocale], route, sessionToken diff --git a/framerail/src/lib/server/load/site-info.ts b/framerail/src/lib/server/load/site-info.ts new file mode 100644 index 0000000000..cb282be9f8 --- /dev/null +++ b/framerail/src/lib/server/load/site-info.ts @@ -0,0 +1,31 @@ +// Helper to extract site information headers from wws +// +// These headers are set by wws - their values can be trusted. +// If the headers are first set by the client, those values +// get erased. + +const DOMAIN_HEADER = "x-wikijump-domain" +const SITE_ID_HEADER = "x-wikijump-site-id" +const SITE_SLUG_HEADER = "x-wikijump-site-slug" + +export interface SiteInfo { + domain: string + siteId: number + siteSlug: string +} + +function getHeader(headers: Headers, key: string): string { + const value = headers.get(key) + if (value === null) { + throw new Error(`Missing wws internal header '${key}'`) + } + + return value +} + +export function loadSiteInfo(headers: Headers): SiteInfo { + const domain = getHeader(headers, DOMAIN_HEADER) + const siteSlug = getHeader(headers, SITE_SLUG_HEADER) + const siteId = parseInt(getHeader(headers, SITE_ID_HEADER)) + return { domain, siteId, siteSlug } +} diff --git a/framerail/src/lib/server/load/user.ts b/framerail/src/lib/server/load/user.ts index ddec6cde66..6d892ddcb4 100644 --- a/framerail/src/lib/server/load/user.ts +++ b/framerail/src/lib/server/load/user.ts @@ -7,14 +7,13 @@ import type { TranslateKeys } from "$lib/types" import { error, redirect } from "@sveltejs/kit" export async function loadUser(username?: string, request, cookies) { - const url = new URL(request.url) - const domain = url.hostname + const { siteId } = loadSiteInfo(request.headers) const sessionToken = cookies.get("wikijump_token") let locales = parseAcceptLangHeader(request) if (!locales.includes(defaults.fallbackLocale)) locales.push(defaults.fallbackLocale) - const response = await userView(domain, locales, sessionToken, username) + const response = await userView(siteId, locales, sessionToken, username) let translateKeys: TranslateKeys = { ...defaults.translateKeys From b55fcf4782361feb9312e710c75e38a10ee467a4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 01:15:34 -0500 Subject: [PATCH 151/306] Remove extra newline. --- wws/src/deepwell.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index c4754fc856..c137f25dab 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -143,7 +143,6 @@ impl Deepwell { }; let page_data: Option<PageData> = self.client.request("page_get", params).await?; - Ok(page_data) } From 8501cd0b3bc1b1ab6e0de42fa43446e862038f7c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 01:32:08 -0500 Subject: [PATCH 152/306] Move should_redirect_site to domain endpoint. --- deepwell/src/endpoints/domain.rs | 22 +++++++-- deepwell/src/services/domain/service.rs | 14 ++++++ deepwell/src/services/domain/structs.rs | 9 +++- deepwell/src/services/error.rs | 1 - deepwell/src/services/view/service.rs | 65 ++++++------------------- deepwell/src/services/view/structs.rs | 1 - wws/src/deepwell.rs | 3 +- wws/src/host.rs | 1 - wws/src/state.rs | 2 +- 9 files changed, 59 insertions(+), 59 deletions(-) diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs index 16c2c51453..afb3f6e6b1 100644 --- a/deepwell/src/endpoints/domain.rs +++ b/deepwell/src/endpoints/domain.rs @@ -20,14 +20,30 @@ use super::prelude::*; use crate::models::site::Model as SiteModel; -use crate::services::domain::CreateCustomDomain; +use crate::services::domain::{CreateCustomDomain, SiteDomainData}; +use std::borrow::Cow; pub async fn site_get_from_domain( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result<Option<SiteModel>> { +) -> Result<Option<SiteDomainData>> { let domain: String = params.one()?; - DomainService::site_from_domain_optional(ctx, &domain).await + match DomainService::site_from_domain_optional(ctx, &domain).await? { + None => Ok(None), + Some(site) => { + let config = ctx.config(); + let should_redirect = + DomainService::should_redirect_site(config, &site, &domain) + .map(Cow::into_owned); + + let SiteModel { site_id, slug, .. } = site; + Ok(Some(SiteDomainData { + site_id, + site_slug: slug, + should_redirect, + })) + } + } } pub async fn site_custom_domain_create( diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 1b7584db97..7d20664729 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -223,6 +223,20 @@ impl DomainService { } } + /// Returns `None` if the given domain is already the preferred domain for this site. + pub fn should_redirect_site<'a>( + config: &'a Config, + site: &'a SiteModel, + domain: &str, + ) -> Option<Cow<'a, str>> { + let preferred_domain = Self::domain_for_site(config, site); + if domain == preferred_domain { + None + } else { + Some(preferred_domain) + } + } + /// Return the preferred domain for the `www` site. /// /// This site is a special exception, instead of visiting `www.wikijump.com` diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index f26255a6eb..b631fe4a12 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -27,7 +27,14 @@ pub enum SiteDomainResult<'a> { CustomDomain(&'a str), } -#[derive(Deserialize, Debug)] +#[derive(Serialize, Debug, Clone)] +pub struct SiteDomainData { + pub site_id: i64, + pub site_slug: String, + pub should_redirect: Option<String>, +} + +#[derive(Deserialize, Debug, Clone)] pub struct CreateCustomDomain { pub domain: String, pub site_id: i64, diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index 44bc01e7c1..e15da7bb88 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -304,7 +304,6 @@ pub enum Error { // Errors for wws // See the 8000 section in the error codes table - #[error("The web server failed to process the request")] WebServerFailure, diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 24548804da..7dda89a0c7 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -74,11 +74,7 @@ impl ViewService { // Attempt to get a viewer helper structure, but if the site doesn't exist // then return right away with the "no such site" response. - let Viewer { - site, - redirect_site, - user_session, - } = match Self::get_viewer( + let Viewer { site, user_session } = match Self::get_viewer( ctx, &mut locales, &domain, @@ -260,12 +256,7 @@ impl ViewService { // TODO Check if user-agent and IP match? - let viewer = Viewer { - site, - redirect_site, - user_session, - }; - + let viewer = Viewer { site, user_session }; let output = match status { PageStatus::Found { page, @@ -520,32 +511,21 @@ impl ViewService { } // Get site data - let (site, redirect_site) = - match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteDomainResult::Found(site) => { - let redirect_site = Self::should_redirect_site(ctx, &site, domain); - (site, redirect_site) - } - SiteDomainResult::Slug(slug) => { - let html = - Self::missing_site_output(ctx, locales, domain, Some(slug)) - .await?; + let site = match DomainService::parse_site_from_domain(ctx, domain).await? { + SiteDomainResult::Found(site) => site, + SiteDomainResult::Slug(slug) => { + let html = + Self::missing_site_output(ctx, locales, domain, Some(slug)).await?; - return Ok(ViewerResult::MissingSite(html)); - } - SiteDomainResult::CustomDomain(domain) => { - let html = - Self::missing_site_output(ctx, locales, domain, None).await?; - - return Ok(ViewerResult::MissingSite(html)); - } - }; + return Ok(ViewerResult::MissingSite(html)); + } + SiteDomainResult::CustomDomain(domain) => { + let html = Self::missing_site_output(ctx, locales, domain, None).await?; + return Ok(ViewerResult::MissingSite(html)); + } + }; - Ok(ViewerResult::FoundSite(Viewer { - site, - redirect_site, - user_session, - })) + Ok(ViewerResult::FoundSite(Viewer { site, user_session })) } /// Produce output for cases where a site does not exist. @@ -621,21 +601,6 @@ impl ViewService { Ok(true) } - fn should_redirect_site( - ctx: &ServiceContext, - site: &SiteModel, - domain: &str, - ) -> Option<String> { - // NOTE: We have to pass an owned string here, since the Cow borrows from - // SiteModel, which we are also passing in the final output struct. - let preferred_domain = DomainService::domain_for_site(ctx.config(), site); - if domain == preferred_domain { - None - } else { - Some(preferred_domain.into_owned()) - } - } - fn should_redirect_page(slug: &str) -> Option<String> { // Fix typos in the page slug. // See https://scuttle.atlassian.net/browse/WJ-330 diff --git a/deepwell/src/services/view/structs.rs b/deepwell/src/services/view/structs.rs index 6db59c787a..f5069ab036 100644 --- a/deepwell/src/services/view/structs.rs +++ b/deepwell/src/services/view/structs.rs @@ -150,7 +150,6 @@ pub enum ViewerResult { #[derive(Serialize, Debug, Clone)] pub struct Viewer { pub site: SiteModel, - pub redirect_site: Option<String>, pub user_session: Option<UserSession>, } diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index c137f25dab..d2421865d1 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -176,7 +176,8 @@ pub struct Domains { #[derive(Deserialize, Debug, Clone)] pub struct SiteData { pub site_id: i64, - pub slug: String, + pub site_slug: String, + pub should_redirect: Option<String>, } #[derive(Deserialize, Debug, Clone)] diff --git a/wws/src/host.rs b/wws/src/host.rs index f28e61be4b..9c3980ef27 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -116,7 +116,6 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S } else { // If it's anything else, it must be a custom domain. // Do a lookup, then set the site data as appropriate. - match state.get_site_from_domain(hostname).await? { Some((site_id, site_slug)) => { // Site exists diff --git a/wws/src/state.rs b/wws/src/state.rs index 5ccc841b98..b1b9a357ba 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -105,7 +105,7 @@ impl ServerStateInner { None => Ok(None), Some(SiteData { site_id, - slug: site_slug, + site_slug, .. }) => { self.cache From 954e762d20470fb62b6e0600306aa0de6cda2b92 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 02:17:18 -0500 Subject: [PATCH 153/306] Add constant for 'www'. Like in wws. --- deepwell/src/endpoints/domain.rs | 15 +++------------ deepwell/src/services/domain/service.rs | 22 +++++----------------- deepwell/src/services/domain/structs.rs | 6 +++--- 3 files changed, 11 insertions(+), 32 deletions(-) diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs index afb3f6e6b1..93f72a4b0c 100644 --- a/deepwell/src/endpoints/domain.rs +++ b/deepwell/src/endpoints/domain.rs @@ -20,8 +20,7 @@ use super::prelude::*; use crate::models::site::Model as SiteModel; -use crate::services::domain::{CreateCustomDomain, SiteDomainData}; -use std::borrow::Cow; +use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainData}; pub async fn site_get_from_domain( ctx: &ServiceContext<'_>, @@ -32,16 +31,8 @@ pub async fn site_get_from_domain( None => Ok(None), Some(site) => { let config = ctx.config(); - let should_redirect = - DomainService::should_redirect_site(config, &site, &domain) - .map(Cow::into_owned); - - let SiteModel { site_id, slug, .. } = site; - Ok(Some(SiteDomainData { - site_id, - site_slug: slug, - should_redirect, - })) + let preferred_domain = DomainService::preferred_domain(config, &site).into_owned(); + Ok(Some(SiteDomainData { site, preferred_domain })) } } } diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 7d20664729..044bd327b8 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -31,6 +31,8 @@ use crate::models::site_domain::{self, Entity as SiteDomain, Model as SiteDomain use crate::services::SiteService; use std::borrow::Cow; +pub const DEFAULT_SITE_SLUG: &str = "www"; + #[derive(Debug)] pub struct DomainService; @@ -185,7 +187,7 @@ impl DomainService { // Special case, see if it's the root domain (i.e. 'wikijump.com') if domain == main_domain_no_dot { - return Some("www"); + return Some(DEFAULT_SITE_SLUG); } // Remove the '.wikijump.com' suffix, get slug @@ -210,7 +212,7 @@ impl DomainService { } /// Gets the preferred domain for the given site. - pub fn domain_for_site<'a>(config: &'a Config, site: &'a SiteModel) -> Cow<'a, str> { + pub fn preferred_domain<'a>(config: &'a Config, site: &'a SiteModel) -> Cow<'a, str> { debug!( "Getting preferred domain for site '{}' (ID {})", site.slug, site.site_id, @@ -218,25 +220,11 @@ impl DomainService { match &site.custom_domain { Some(domain) => cow!(domain), - None if site.slug == "www" => Self::www_domain(config), + None if site.slug == DEFAULT_SITE_SLUG => Self::www_domain(config), None => Cow::Owned(Self::get_canonical(config, &site.slug)), } } - /// Returns `None` if the given domain is already the preferred domain for this site. - pub fn should_redirect_site<'a>( - config: &'a Config, - site: &'a SiteModel, - domain: &str, - ) -> Option<Cow<'a, str>> { - let preferred_domain = Self::domain_for_site(config, site); - if domain == preferred_domain { - None - } else { - Some(preferred_domain) - } - } - /// Return the preferred domain for the `www` site. /// /// This site is a special exception, instead of visiting `www.wikijump.com` diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index b631fe4a12..4ea9d6a58b 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -29,9 +29,9 @@ pub enum SiteDomainResult<'a> { #[derive(Serialize, Debug, Clone)] pub struct SiteDomainData { - pub site_id: i64, - pub site_slug: String, - pub should_redirect: Option<String>, + #[serde(flatten)] + pub site: SiteModel, + pub preferred_domain: String, } #[derive(Deserialize, Debug, Clone)] From 2cc13c5cbef1227c1a691b51c954a1546905533c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 02:33:45 -0500 Subject: [PATCH 154/306] Add DomainService TODO. --- deepwell/src/services/domain/service.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 044bd327b8..2c182c0fc3 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -24,6 +24,7 @@ //! and custom domains (e.g. `scpwiki.com`). // TODO disallow custom domains that are subdomains of the main domain or files domain +// TODO disallow custom domain for default site (www) use super::prelude::*; use crate::models::site::{self, Entity as Site, Model as SiteModel}; From 508ea65f6f0dd3173712de65e21fec7a3a987d76 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 19:52:32 -0500 Subject: [PATCH 155/306] Add custom domain list API call. --- deepwell/src/api.rs | 2 +- deepwell/src/endpoints/domain.rs | 23 ++++++++++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index ccf8acd952..35c45b5c86 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -214,8 +214,8 @@ async fn build_module(app_state: ServerState) -> anyhow::Result<RpcModule<Server // Site custom domain register!("custom_domain_create", site_custom_domain_create); - register!("custom_domain_get", site_custom_domain_get); register!("custom_domain_delete", site_custom_domain_delete); + register!("custom_domain_list", site_custom_domain_list); // Site membership register!("member_set", membership_set); diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs index 93f72a4b0c..f86d7365f2 100644 --- a/deepwell/src/endpoints/domain.rs +++ b/deepwell/src/endpoints/domain.rs @@ -20,7 +20,7 @@ use super::prelude::*; use crate::models::site::Model as SiteModel; -use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainData}; +use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainResult}; pub async fn site_get_from_domain( ctx: &ServiceContext<'_>, @@ -45,14 +45,6 @@ pub async fn site_custom_domain_create( DomainService::create_custom(ctx, input).await } -pub async fn site_custom_domain_get( - ctx: &ServiceContext<'_>, - params: Params<'static>, -) -> Result<Option<SiteModel>> { - let domain: String = params.one()?; - DomainService::site_from_domain_optional(ctx, &domain).await -} - // TODO rename pub async fn site_custom_domain_delete( ctx: &ServiceContext<'_>, @@ -61,3 +53,16 @@ pub async fn site_custom_domain_delete( let domain: String = params.one()?; DomainService::remove_custom(ctx, domain).await } + +pub async fn site_custom_domain_list( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<Vec<SiteDomainModel>> { + #[derive(Deserialize, Debug)] + struct Input { + site_id: i64, + } + + let Input { site_id } = params.parse()?; + DomainService::list_custom(ctx, site_id).await +} From c027f2a4189a7b4159a4e2a08633e5fa3d949b13 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 19:54:26 -0500 Subject: [PATCH 156/306] Return SiteDomainResult from domain API call. --- deepwell/src/endpoints/domain.rs | 13 ++----- deepwell/src/services/domain/service.rs | 50 +++++++++++-------------- deepwell/src/services/domain/structs.rs | 14 ++++--- deepwell/src/services/view/service.rs | 19 +++++----- 4 files changed, 43 insertions(+), 53 deletions(-) diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs index f86d7365f2..31c775fc07 100644 --- a/deepwell/src/endpoints/domain.rs +++ b/deepwell/src/endpoints/domain.rs @@ -19,22 +19,15 @@ */ use super::prelude::*; -use crate::models::site::Model as SiteModel; +use crate::models::site_domain::Model as SiteDomainModel; use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainResult}; pub async fn site_get_from_domain( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result<Option<SiteDomainData>> { +) -> Result<SiteDomainResult> { let domain: String = params.one()?; - match DomainService::site_from_domain_optional(ctx, &domain).await? { - None => Ok(None), - Some(site) => { - let config = ctx.config(); - let preferred_domain = DomainService::preferred_domain(config, &site).into_owned(); - Ok(Some(SiteDomainData { site, preferred_domain })) - } - } + DomainService::parse_site_from_domain(ctx, &domain).await } pub async fn site_custom_domain_create( diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 2c182c0fc3..e6241893fb 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -117,40 +117,32 @@ impl DomainService { .map(|site| site.is_some()) } - /// Gets the site corresponding with the given domain. - #[inline] - #[allow(dead_code)] // TEMP - pub async fn site_from_domain( - ctx: &ServiceContext<'_>, - domain: &str, - ) -> Result<SiteModel> { - find_or_error!(Self::site_from_domain_optional(ctx, domain), CustomDomain) - } - - /// Optional version of `site_from_domain()`. - pub async fn site_from_domain_optional( - ctx: &ServiceContext<'_>, - domain: &str, - ) -> Result<Option<SiteModel>> { - let result = Self::parse_site_from_domain(ctx, domain).await?; - match result { - SiteDomainResult::Found(site) => Ok(Some(site)), - _ => Ok(None), - } - } - /// Gets the site corresponding with the given domain. /// /// Returns one of three variants: /// * `Found` — Site retrieved from the domain. /// * `Slug` — Site does not exist. If it did, domain would be a canonical domain. /// * `CustomDomain` — Site does not exist. If it did, domain would be a custom domain. - pub async fn parse_site_from_domain<'a>( + pub async fn parse_site_from_domain( ctx: &ServiceContext<'_>, - domain: &'a str, - ) -> Result<SiteDomainResult<'a>> { + domain: &str, + ) -> Result<SiteDomainResult> { info!("Getting site for domain '{domain}'"); + /// Helper macro to produce a `Found` enum case. + /// This is needed to get the preferred domain for the return value. + macro_rules! found { + ($site:expr) => {{ + let config = ctx.config(); + let preferred_domain = + DomainService::preferred_domain(config, &$site).into_owned(); + SiteDomainResult::Found { + site: $site, + preferred_domain, + } + }}; + } + match Self::parse_canonical(ctx.config(), domain) { // Normal canonical domain, return from site slug fetch. Some(subdomain) => { @@ -161,8 +153,8 @@ impl DomainService { .await; match result { - Ok(Some(site)) => Ok(SiteDomainResult::Found(site)), - Ok(None) => Ok(SiteDomainResult::Slug(subdomain)), + Ok(Some(site)) => Ok(found!(site)), + Ok(None) => Ok(SiteDomainResult::Slug(str!(subdomain))), Err(error) => Err(error), } } @@ -173,8 +165,8 @@ impl DomainService { let result = Self::site_from_custom_domain_optional(ctx, domain).await; match result { - Ok(Some(site)) => Ok(SiteDomainResult::Found(site)), - Ok(None) => Ok(SiteDomainResult::CustomDomain(domain)), + Ok(Some(site)) => Ok(found!(site)), + Ok(None) => Ok(SiteDomainResult::CustomDomain(str!(domain))), Err(error) => Err(error), } } diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 4ea9d6a58b..1a5342f04a 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -20,11 +20,15 @@ use crate::models::site::Model as SiteModel; -#[derive(Debug)] -pub enum SiteDomainResult<'a> { - Found(SiteModel), - Slug(&'a str), - CustomDomain(&'a str), +#[derive(Serialize, Debug, Clone)] +#[serde(rename_all = "snake_case", tag = "result", content = "data")] +pub enum SiteDomainResult { + Found { + site: SiteModel, + preferred_domain: String, + }, + Slug(String), + CustomDomain(String), } #[derive(Serialize, Debug, Clone)] diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 7dda89a0c7..e91ab54fa9 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -511,21 +511,22 @@ impl ViewService { } // Get site data - let site = match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteDomainResult::Found(site) => site, + match DomainService::parse_site_from_domain(ctx, domain).await? { + SiteDomainResult::Found { + site, + preferred_domain, + } => Ok(ViewerResult::FoundSite(Viewer { site, user_session })), SiteDomainResult::Slug(slug) => { let html = - Self::missing_site_output(ctx, locales, domain, Some(slug)).await?; + Self::missing_site_output(ctx, locales, domain, Some(&slug)).await?; - return Ok(ViewerResult::MissingSite(html)); + Ok(ViewerResult::MissingSite(html)) } SiteDomainResult::CustomDomain(domain) => { - let html = Self::missing_site_output(ctx, locales, domain, None).await?; - return Ok(ViewerResult::MissingSite(html)); + let html = Self::missing_site_output(ctx, locales, &domain, None).await?; + Ok(ViewerResult::MissingSite(html)) } - }; - - Ok(ViewerResult::FoundSite(Viewer { site, user_session })) + } } /// Produce output for cases where a site does not exist. From b17fc4168e34d9b54836dfed66ed0b9bc9326531 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 9 Feb 2025 22:04:26 -0500 Subject: [PATCH 157/306] Change output of SiteDomainResult. --- deepwell/src/services/domain/service.rs | 23 +++++++++++++---------- deepwell/src/services/domain/structs.rs | 10 ++++------ deepwell/src/services/view/service.rs | 12 ++++++------ 3 files changed, 23 insertions(+), 22 deletions(-) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index e6241893fb..d9c413ee07 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -120,25 +120,28 @@ impl DomainService { /// Gets the site corresponding with the given domain. /// /// Returns one of three variants: - /// * `Found` — Site retrieved from the domain. - /// * `Slug` — Site does not exist. If it did, domain would be a canonical domain. - /// * `CustomDomain` — Site does not exist. If it did, domain would be a custom domain. + /// * `SiteFound` — Site information retrieved from the domain. + /// * `SiteRedirect` — Site found, but needs a redirect to the preferred domain. + /// * `MissingSlug` — Site does not exist. If it did, domain would be a canonical domain. + /// * `MissingCustomDomain` — Site does not exist. If it did, domain would be a custom domain. pub async fn parse_site_from_domain( ctx: &ServiceContext<'_>, domain: &str, ) -> Result<SiteDomainResult> { info!("Getting site for domain '{domain}'"); - /// Helper macro to produce a `Found` enum case. - /// This is needed to get the preferred domain for the return value. + /// Helper macro to produce the result when the site exists. + /// This gets the preferred domain for the return value. macro_rules! found { ($site:expr) => {{ let config = ctx.config(); let preferred_domain = DomainService::preferred_domain(config, &$site).into_owned(); - SiteDomainResult::Found { - site: $site, - preferred_domain, + + if domain == &preferred_domain { + SiteDomainResult::SiteFound($site) + } else { + SiteDomainResult::SiteRedirect(preferred_domain) } }}; } @@ -154,7 +157,7 @@ impl DomainService { match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainResult::Slug(str!(subdomain))), + Ok(None) => Ok(SiteDomainResult::MissingSiteSlug(str!(subdomain))), Err(error) => Err(error), } } @@ -166,7 +169,7 @@ impl DomainService { let result = Self::site_from_custom_domain_optional(ctx, domain).await; match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainResult::CustomDomain(str!(domain))), + Ok(None) => Ok(SiteDomainResult::MissingCustomDomain(str!(domain))), Err(error) => Err(error), } } diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 1a5342f04a..6cc92cf70b 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -23,12 +23,10 @@ use crate::models::site::Model as SiteModel; #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] pub enum SiteDomainResult { - Found { - site: SiteModel, - preferred_domain: String, - }, - Slug(String), - CustomDomain(String), + SiteFound(SiteModel), + SiteRedirect(String), + MissingSiteSlug(String), + MissingCustomDomain(String), } #[derive(Serialize, Debug, Clone)] diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index e91ab54fa9..36ed715d00 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -512,17 +512,17 @@ impl ViewService { // Get site data match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteDomainResult::Found { - site, - preferred_domain, - } => Ok(ViewerResult::FoundSite(Viewer { site, user_session })), - SiteDomainResult::Slug(slug) => { + SiteDomainResult::SiteFound(site) => { + Ok(ViewerResult::FoundSite(Viewer { site, user_session })) + } + SiteDomainResult::SiteRedirect(_preferred_domain) => todo!(), + SiteDomainResult::MissingSiteSlug(slug) => { let html = Self::missing_site_output(ctx, locales, domain, Some(&slug)).await?; Ok(ViewerResult::MissingSite(html)) } - SiteDomainResult::CustomDomain(domain) => { + SiteDomainResult::MissingCustomDomain(domain) => { let html = Self::missing_site_output(ctx, locales, &domain, None).await?; Ok(ViewerResult::MissingSite(html)) } From 55118bbe2e100bcbdc0b54cc101e29b1a80c4546 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 00:42:24 -0500 Subject: [PATCH 158/306] Add TODO comments for cache items in DEEPWELL. --- deepwell/src/services/domain/service.rs | 1 + deepwell/src/services/site/service.rs | 2 ++ 2 files changed, 3 insertions(+) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index d9c413ee07..a3080f4baa 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -25,6 +25,7 @@ // TODO disallow custom domains that are subdomains of the main domain or files domain // TODO disallow custom domain for default site (www) +// TODO expire redis cache on change to domains use super::prelude::*; use crate::models::site::{self, Entity as Site, Model as SiteModel}; diff --git a/deepwell/src/services/site/service.rs b/deepwell/src/services/site/service.rs index b06298c5c5..ba5f550805 100644 --- a/deepwell/src/services/site/service.rs +++ b/deepwell/src/services/site/service.rs @@ -18,6 +18,8 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +// TODO expire redis cache on change to domains + use wikidot_normalize::normalize; use super::prelude::*; From 02c0b8742b39048309e9f004d1f75e91be56c04d Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 00:42:56 -0500 Subject: [PATCH 159/306] Update return value for site_from_domain API call. --- deepwell/src/services/domain/structs.rs | 12 ++++++++--- wws/src/deepwell.rs | 27 ++++++++++++++++++++----- 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 6cc92cf70b..fa245d6133 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -24,9 +24,15 @@ use crate::models::site::Model as SiteModel; #[serde(rename_all = "snake_case", tag = "result", content = "data")] pub enum SiteDomainResult { SiteFound(SiteModel), - SiteRedirect(String), - MissingSiteSlug(String), - MissingCustomDomain(String), + SiteRedirect { + domain: String, + }, + MissingSiteSlug { + slug: String, + }, + MissingCustomDomain { + domain: String, + }, } #[derive(Serialize, Debug, Clone)] diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index d2421865d1..81241ec641 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -20,7 +20,7 @@ use crate::error::Result; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use std::time::Duration; const JSONRPC_MAX_REQUEST: u32 = 16 * 1024; @@ -125,8 +125,8 @@ impl Deepwell { Ok(site_data) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<SiteData>> { - let site_data: Option<SiteData> = self + pub async fn get_site_from_domain(&self, domain: &str) -> Result<SiteDomainResult> { + let site_data: SiteDomainResult = self .client .request("site_from_domain", rpc_params![domain]) .await?; @@ -176,8 +176,25 @@ pub struct Domains { #[derive(Deserialize, Debug, Clone)] pub struct SiteData { pub site_id: i64, - pub site_slug: String, - pub should_redirect: Option<String>, + pub slug: String, +} + +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case", tag = "result", content = "data")] +pub enum SiteDomainResult { + SiteFound { + site_id: i64, + slug: String, + }, + SiteRedirect { + domain: String, + }, + MissingSiteSlug { + slug: String, + }, + MissingCustomDomain { + domain: String, + }, } #[derive(Deserialize, Debug, Clone)] From 9638c091523392cb1c8a8a9047fca1abe2d2fe0f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 00:55:08 -0500 Subject: [PATCH 160/306] Intake SiteDomainResult from deepwell and store in cache. --- wws/src/cache.rs | 70 ++++++++++++++++++++++++++++++++--------- wws/src/deepwell.rs | 17 +++------- wws/src/handler/file.rs | 5 +-- wws/src/handler/mod.rs | 8 ++--- wws/src/host.rs | 41 ++++++++++++------------ wws/src/main.rs | 3 ++ wws/src/state.rs | 26 ++++++--------- 7 files changed, 96 insertions(+), 74 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 73100ae420..2361ffdb67 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -23,8 +23,12 @@ //! Whenever you make changes to this module, make sure that the code is //! compatible with DEEPWELL's Redis code. -use crate::{deepwell::FileData, error::Result}; +use crate::{ + deepwell::{FileData, SiteData, SiteDomainResult}, + error::Result, +}; use redis::{aio::MultiplexedConnection, AsyncCommands}; +use ref_map::*; macro_rules! get_connection { ($client:expr) => { @@ -70,21 +74,35 @@ impl Cache { Ok(()) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<(i64, String)>> { - type SiteDataTuple = (Option<i64>, Option<String>); + pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<SiteDomainResult>> { + type SiteDomainDataTuple = (Option<String>, Option<i64>, Option<String>, Option<String>); let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); - let fields = &["id", "slug"]; - let values = conn.hget::<_, _, SiteDataTuple>(&key, fields).await?; - match values { - // Ideally, all of these should be non-null, if it's a cache hit. - (Some(site_id), Some(site_slug)) => Ok(Some((site_id, site_slug))), + let fields = &["variant", "id", "slug", "domain"]; + let (variant, site_id, slug, domain) = + conn.hget::<_, _, SiteDomainDataTuple>(&key, fields).await?; + let variant = variant.ref_map(|s| s.as_str()); + match (variant, site_id, slug, domain) { + // Each variant value has a set of fields that should be set for it + // If a different group of fields are set, then it's invalid + (Some("site_found"), Some(site_id), Some(slug), None) => { + Ok(Some(SiteDomainResult::SiteFound { site_id, slug })) + } + (Some("site_redirect"), None, None, Some(domain)) => { + Ok(Some(SiteDomainResult::SiteRedirect { domain })) + } + (Some("missing_site_slug"), None, Some(slug), None) => { + Ok(Some(SiteDomainResult::MissingSiteSlug { slug })) + } + (Some("missing_custom_domain"), None, None, Some(domain)) => { + Ok(Some(SiteDomainResult::MissingCustomDomain { domain })) + } // Cache miss - (None, None) => Ok(None), + (None, None, None, None) => Ok(None), - // Some fields are set and others aren't. Let's clear all them out. + // Not a valid variant or set of fields _ => { clear_inconsistent_fields(&mut conn, &key, fields).await?; Ok(None) @@ -95,13 +113,35 @@ impl Cache { pub async fn set_site_from_domain( &self, domain: &str, - site_id: i64, - site_slug: &str, + domain_data: &SiteDomainResult, ) -> Result<()> { let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); + + let (variant, site_id, slug, domain): ( + &'static str, + Option<i64>, + Option<&str>, + Option<&str>, + ) = match domain_data { + SiteDomainResult::SiteFound { site_id, slug } => { + ("site_found", Some(*site_id), Some(slug), Some(domain)) + } + SiteDomainResult::SiteRedirect { domain } => { + ("site_redirect", None, None, Some(domain)) + } + SiteDomainResult::MissingSiteSlug { slug } => { + ("missing_site_slug", None, Some(slug), None) + } + SiteDomainResult::MissingCustomDomain { domain } => { + ("missing_custom_domain", None, None, Some(domain)) + } + }; + + hset!(conn, key, "variant", variant); hset!(conn, key, "id", site_id); - hset!(conn, key, "slug", site_slug); + hset!(conn, key, "slug", slug); + hset!(conn, key, "domain", domain); Ok(()) } @@ -132,7 +172,7 @@ impl Cache { let fields = &["id", "mime", "size", "s3_hash"]; let values = conn.hget::<_, _, FileDataTuple>(&key, fields).await?; match values { - // Cache hit + // Ideally, all of these should be non-null, if it's a cache hit. (Some(file_id), Some(mime), Some(size), Some(s3_hash)) => Ok(Some(FileData { file_id, mime, @@ -143,7 +183,7 @@ impl Cache { // Cache miss (None, None, None, None) => Ok(None), - // Like above, we clear out inconsistent fields + // Some fields are set and others aren't. Let's clear all them out. _ => { clear_inconsistent_fields(&mut conn, &key, fields).await?; Ok(None) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 81241ec641..b9122ffed0 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -182,19 +182,10 @@ pub struct SiteData { #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] pub enum SiteDomainResult { - SiteFound { - site_id: i64, - slug: String, - }, - SiteRedirect { - domain: String, - }, - MissingSiteSlug { - slug: String, - }, - MissingCustomDomain { - domain: String, - }, + SiteFound { site_id: i64, slug: String }, + SiteRedirect { domain: String }, + MissingSiteSlug { slug: String }, + MissingCustomDomain { domain: String }, } #[derive(Deserialize, Debug, Clone)] diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index ef4c0b2f84..acc1d262fa 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -19,10 +19,7 @@ */ use super::get_site_info; -use crate::{ - error::ServerErrorCode, - state::ServerState, -}; +use crate::{error::ServerErrorCode, state::ServerState}; use axum::{ body::Body, extract::{Path, State}, diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index f09b19d219..17cbf6425d 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -156,12 +156,8 @@ pub async fn handle_host_delegation( } // Default site redirect // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" - SiteAndHost::DefaultRedirect => { - let destination = format!( - "https://{}{}", - state.domains.main_domain_no_dot, - get_path(request.uri()), - ); + SiteAndHost::MainSiteRedirect { domain } => { + let destination = format!("https://{}{}", domain, get_path(request.uri()),); Redirect::permanent(&destination).into_response() } // Files site route handling diff --git a/wws/src/host.rs b/wws/src/host.rs index 9c3980ef27..2f50ffc824 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -18,7 +18,11 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::{deepwell::Domains, error::Result, state::ServerState}; +use crate::{ + deepwell::{Domains, SiteData}, + error::Result, + state::ServerState, +}; /// The slug for the default site. /// @@ -44,16 +48,15 @@ pub enum SiteAndHost<'a> { /// Main router, non-existent site, custom domain. MainCustomMissing, + /// Main router, request to preferred domain for the site. + MainSiteRedirect { domain: &'a str }, + /// Files router, existent site. File { site_id: i64, site_slug: &'a str }, /// Files router, non-existent site. FileMissing { site_slug: &'a str }, - /// Main router, request to canonical `www`, should be redirected to the root domain. - /// Special case. - DefaultRedirect, - /// Request is the root domain on the files router, which has no meaning. /// Special case. FileRoot, @@ -68,17 +71,7 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S .. } = state.domains; - if hostname == main_domain_no_dot { - // First, check if it's the default domain by itself. - main_site_slug(state, hostname, None).await - } else if let Some(site_slug) = hostname.strip_suffix(main_domain) { - if site_slug == DEFAULT_SITE_SLUG { - // We should be redirecting to the non-www version of the link - return Ok(SiteAndHost::DefaultRedirect); - } - - main_site_slug(state, hostname, Some(site_slug)).await - } else if let Some(site_slug) = hostname.strip_suffix(files_domain) { + if let Some(site_slug) = hostname.strip_suffix(files_domain) { // Determine if it's a files domain. let site_id = state.get_site_from_slug(site_slug).await?; match site_id { @@ -114,17 +107,23 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S info!(domain = hostname, "Handling lone files site request"); Ok(SiteAndHost::FileRoot) } else { - // If it's anything else, it must be a custom domain. - // Do a lookup, then set the site data as appropriate. + // If it's anything else, it must be a canonical or custom domain. + // Let's do a lookup and let DomainService handle it for us. + + /* + TODO match state.get_site_from_domain(hostname).await? { - Some((site_id, site_slug)) => { + Some(SiteData { site_id, slug: site_slug }) => { // Site exists info!( domain = hostname, site_id = site_id, "Routing main site request (custom)", ); - Ok(SiteAndHost::MainCustom { site_id, site_slug }) + match should_redirect_site(hostname, preferred_domain) { + Some(preferred_domain) => Ok(SiteAndHost::MainSiteRedirect { domain: &preferred_domain }), + None => Ok(SiteAndHost::MainCustom { site_id, site_slug }), + } } None => { // No such site @@ -132,6 +131,8 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S Ok(SiteAndHost::MainCustomMissing) } } + */ + todo!() } } diff --git a/wws/src/main.rs b/wws/src/main.rs index a0dac764b6..7e6177cb25 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -23,6 +23,9 @@ //! Depending on the hostname, requests are routed to either framerail //! or given to logic to serve wjfiles data. +#[macro_use] +extern crate serde; + #[macro_use] extern crate str_macro; diff --git a/wws/src/state.rs b/wws/src/state.rs index b1b9a357ba..e3615287ee 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,7 +21,7 @@ use crate::{ cache::Cache, config::Secrets, - deepwell::{Deepwell, Domains, FileData, PageData, SiteData}, + deepwell::{Deepwell, Domains, FileData, PageData, SiteData, SiteDomainResult}, error::Result, framerail::Framerail, }; @@ -98,23 +98,17 @@ impl ServerStateInner { } } - pub async fn get_site_from_domain(&self, site_domain: &str) -> Result<Option<(i64, String)>> { + pub async fn get_site_from_domain(&self, site_domain: &str) -> Result<SiteDomainResult> { match self.cache.get_site_from_domain(site_domain).await? { - Some((site_id, site_slug)) => Ok(Some((site_id, site_slug))), - None => match self.deepwell.get_site_from_domain(site_domain).await? { - None => Ok(None), - Some(SiteData { - site_id, - site_slug, - .. - }) => { - self.cache - .set_site_from_domain(site_domain, site_id, &site_slug) - .await?; + Some(domain_data) => Ok(domain_data), + None => { + let domain_data = self.deepwell.get_site_from_domain(site_domain).await?; + self.cache + .set_site_from_domain(site_domain, &domain_data) + .await?; - Ok(Some((site_id, site_slug))) - } - }, + Ok(domain_data) + } } } From 4b089b0565918453ba1e37cce240ff105492a079 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 00:55:41 -0500 Subject: [PATCH 161/306] Remove unused struct. --- deepwell/src/services/domain/structs.rs | 7 ------- 1 file changed, 7 deletions(-) diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index fa245d6133..49753ff5c2 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -35,13 +35,6 @@ pub enum SiteDomainResult { }, } -#[derive(Serialize, Debug, Clone)] -pub struct SiteDomainData { - #[serde(flatten)] - pub site: SiteModel, - pub preferred_domain: String, -} - #[derive(Deserialize, Debug, Clone)] pub struct CreateCustomDomain { pub domain: String, From 24734593b47fd5cf75c11886d6ec6b2846b44a4f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 00:57:04 -0500 Subject: [PATCH 162/306] Rename SiteDomainResult -> SiteDomainInfo. --- deepwell/src/endpoints/domain.rs | 4 ++-- deepwell/src/services/domain/service.rs | 10 +++++----- deepwell/src/services/domain/structs.rs | 2 +- deepwell/src/services/view/service.rs | 10 +++++----- wws/src/cache.rs | 24 +++++++++++------------- wws/src/deepwell.rs | 6 +++--- wws/src/state.rs | 4 ++-- 7 files changed, 29 insertions(+), 31 deletions(-) diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs index 31c775fc07..aa9e7b799d 100644 --- a/deepwell/src/endpoints/domain.rs +++ b/deepwell/src/endpoints/domain.rs @@ -20,12 +20,12 @@ use super::prelude::*; use crate::models::site_domain::Model as SiteDomainModel; -use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainResult}; +use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainInfo}; pub async fn site_get_from_domain( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result<SiteDomainResult> { +) -> Result<SiteDomainInfo> { let domain: String = params.one()?; DomainService::parse_site_from_domain(ctx, &domain).await } diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index a3080f4baa..9ffb02d3cf 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -128,7 +128,7 @@ impl DomainService { pub async fn parse_site_from_domain( ctx: &ServiceContext<'_>, domain: &str, - ) -> Result<SiteDomainResult> { + ) -> Result<SiteDomainInfo> { info!("Getting site for domain '{domain}'"); /// Helper macro to produce the result when the site exists. @@ -140,9 +140,9 @@ impl DomainService { DomainService::preferred_domain(config, &$site).into_owned(); if domain == &preferred_domain { - SiteDomainResult::SiteFound($site) + SiteDomainInfo::SiteFound($site) } else { - SiteDomainResult::SiteRedirect(preferred_domain) + SiteDomainInfo::SiteRedirect(preferred_domain) } }}; } @@ -158,7 +158,7 @@ impl DomainService { match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainResult::MissingSiteSlug(str!(subdomain))), + Ok(None) => Ok(SiteDomainInfo::MissingSiteSlug(str!(subdomain))), Err(error) => Err(error), } } @@ -170,7 +170,7 @@ impl DomainService { let result = Self::site_from_custom_domain_optional(ctx, domain).await; match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainResult::MissingCustomDomain(str!(domain))), + Ok(None) => Ok(SiteDomainInfo::MissingCustomDomain(str!(domain))), Err(error) => Err(error), } } diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 49753ff5c2..644822fa2f 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -22,7 +22,7 @@ use crate::models::site::Model as SiteModel; #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] -pub enum SiteDomainResult { +pub enum SiteDomainInfo { SiteFound(SiteModel), SiteRedirect { domain: String, diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 36ed715d00..bb5f5004fd 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -33,7 +33,7 @@ use super::prelude::*; use crate::models::page::Model as PageModel; use crate::models::page_revision::Model as PageRevisionModel; use crate::models::site::Model as SiteModel; -use crate::services::domain::SiteDomainResult; +use crate::services::domain::SiteDomainInfo; use crate::services::render::RenderOutput; use crate::services::special_page::{GetSpecialPageOutput, SpecialPageType}; use crate::services::{ @@ -512,17 +512,17 @@ impl ViewService { // Get site data match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteDomainResult::SiteFound(site) => { + SiteDomainInfo::SiteFound(site) => { Ok(ViewerResult::FoundSite(Viewer { site, user_session })) } - SiteDomainResult::SiteRedirect(_preferred_domain) => todo!(), - SiteDomainResult::MissingSiteSlug(slug) => { + SiteDomainInfo::SiteRedirect(_preferred_domain) => todo!(), + SiteDomainInfo::MissingSiteSlug(slug) => { let html = Self::missing_site_output(ctx, locales, domain, Some(&slug)).await?; Ok(ViewerResult::MissingSite(html)) } - SiteDomainResult::MissingCustomDomain(domain) => { + SiteDomainInfo::MissingCustomDomain(domain) => { let html = Self::missing_site_output(ctx, locales, &domain, None).await?; Ok(ViewerResult::MissingSite(html)) } diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 2361ffdb67..9f7bd651b3 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -24,7 +24,7 @@ //! compatible with DEEPWELL's Redis code. use crate::{ - deepwell::{FileData, SiteData, SiteDomainResult}, + deepwell::{FileData, SiteData, SiteDomainInfo}, error::Result, }; use redis::{aio::MultiplexedConnection, AsyncCommands}; @@ -74,7 +74,7 @@ impl Cache { Ok(()) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<SiteDomainResult>> { + pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<SiteDomainInfo>> { type SiteDomainDataTuple = (Option<String>, Option<i64>, Option<String>, Option<String>); let mut conn = get_connection!(self.client); @@ -87,16 +87,16 @@ impl Cache { // Each variant value has a set of fields that should be set for it // If a different group of fields are set, then it's invalid (Some("site_found"), Some(site_id), Some(slug), None) => { - Ok(Some(SiteDomainResult::SiteFound { site_id, slug })) + Ok(Some(SiteDomainInfo::SiteFound { site_id, slug })) } (Some("site_redirect"), None, None, Some(domain)) => { - Ok(Some(SiteDomainResult::SiteRedirect { domain })) + Ok(Some(SiteDomainInfo::SiteRedirect { domain })) } (Some("missing_site_slug"), None, Some(slug), None) => { - Ok(Some(SiteDomainResult::MissingSiteSlug { slug })) + Ok(Some(SiteDomainInfo::MissingSiteSlug { slug })) } (Some("missing_custom_domain"), None, None, Some(domain)) => { - Ok(Some(SiteDomainResult::MissingCustomDomain { domain })) + Ok(Some(SiteDomainInfo::MissingCustomDomain { domain })) } // Cache miss @@ -113,7 +113,7 @@ impl Cache { pub async fn set_site_from_domain( &self, domain: &str, - domain_data: &SiteDomainResult, + domain_data: &SiteDomainInfo, ) -> Result<()> { let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); @@ -124,16 +124,14 @@ impl Cache { Option<&str>, Option<&str>, ) = match domain_data { - SiteDomainResult::SiteFound { site_id, slug } => { + SiteDomainInfo::SiteFound { site_id, slug } => { ("site_found", Some(*site_id), Some(slug), Some(domain)) } - SiteDomainResult::SiteRedirect { domain } => { - ("site_redirect", None, None, Some(domain)) - } - SiteDomainResult::MissingSiteSlug { slug } => { + SiteDomainInfo::SiteRedirect { domain } => ("site_redirect", None, None, Some(domain)), + SiteDomainInfo::MissingSiteSlug { slug } => { ("missing_site_slug", None, Some(slug), None) } - SiteDomainResult::MissingCustomDomain { domain } => { + SiteDomainInfo::MissingCustomDomain { domain } => { ("missing_custom_domain", None, None, Some(domain)) } }; diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index b9122ffed0..39db16d179 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -125,8 +125,8 @@ impl Deepwell { Ok(site_data) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result<SiteDomainResult> { - let site_data: SiteDomainResult = self + pub async fn get_site_from_domain(&self, domain: &str) -> Result<SiteDomainInfo> { + let site_data: SiteDomainInfo = self .client .request("site_from_domain", rpc_params![domain]) .await?; @@ -181,7 +181,7 @@ pub struct SiteData { #[derive(Deserialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] -pub enum SiteDomainResult { +pub enum SiteDomainInfo { SiteFound { site_id: i64, slug: String }, SiteRedirect { domain: String }, MissingSiteSlug { slug: String }, diff --git a/wws/src/state.rs b/wws/src/state.rs index e3615287ee..d956823513 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,7 +21,7 @@ use crate::{ cache::Cache, config::Secrets, - deepwell::{Deepwell, Domains, FileData, PageData, SiteData, SiteDomainResult}, + deepwell::{Deepwell, Domains, FileData, PageData, SiteData, SiteDomainInfo}, error::Result, framerail::Framerail, }; @@ -98,7 +98,7 @@ impl ServerStateInner { } } - pub async fn get_site_from_domain(&self, site_domain: &str) -> Result<SiteDomainResult> { + pub async fn get_site_from_domain(&self, site_domain: &str) -> Result<SiteDomainInfo> { match self.cache.get_site_from_domain(site_domain).await? { Some(domain_data) => Ok(domain_data), None => { From 1f649a44fc095fbbbf54cc89ddf91ade6a753db7 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 00:59:19 -0500 Subject: [PATCH 163/306] Fix deepwell compilation with SiteDomainInfo field changes. --- deepwell/src/services/domain/service.rs | 12 +++++++++--- deepwell/src/services/domain/structs.rs | 12 +++--------- deepwell/src/services/view/service.rs | 8 +++++--- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 9ffb02d3cf..94b1adddf6 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -142,7 +142,9 @@ impl DomainService { if domain == &preferred_domain { SiteDomainInfo::SiteFound($site) } else { - SiteDomainInfo::SiteRedirect(preferred_domain) + SiteDomainInfo::SiteRedirect { + domain: preferred_domain, + } } }}; } @@ -158,7 +160,9 @@ impl DomainService { match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainInfo::MissingSiteSlug(str!(subdomain))), + Ok(None) => Ok(SiteDomainInfo::MissingSiteSlug { + slug: str!(subdomain), + }), Err(error) => Err(error), } } @@ -170,7 +174,9 @@ impl DomainService { let result = Self::site_from_custom_domain_optional(ctx, domain).await; match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainInfo::MissingCustomDomain(str!(domain))), + Ok(None) => Ok(SiteDomainInfo::MissingCustomDomain { + domain: str!(domain), + }), Err(error) => Err(error), } } diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 644822fa2f..15305f7367 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -24,15 +24,9 @@ use crate::models::site::Model as SiteModel; #[serde(rename_all = "snake_case", tag = "result", content = "data")] pub enum SiteDomainInfo { SiteFound(SiteModel), - SiteRedirect { - domain: String, - }, - MissingSiteSlug { - slug: String, - }, - MissingCustomDomain { - domain: String, - }, + SiteRedirect { domain: String }, + MissingSiteSlug { slug: String }, + MissingCustomDomain { domain: String }, } #[derive(Deserialize, Debug, Clone)] diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index bb5f5004fd..eac2215d5b 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -515,14 +515,16 @@ impl ViewService { SiteDomainInfo::SiteFound(site) => { Ok(ViewerResult::FoundSite(Viewer { site, user_session })) } - SiteDomainInfo::SiteRedirect(_preferred_domain) => todo!(), - SiteDomainInfo::MissingSiteSlug(slug) => { + SiteDomainInfo::SiteRedirect { + domain: _preferred_domain, + } => todo!(), + SiteDomainInfo::MissingSiteSlug { slug } => { let html = Self::missing_site_output(ctx, locales, domain, Some(&slug)).await?; Ok(ViewerResult::MissingSite(html)) } - SiteDomainInfo::MissingCustomDomain(domain) => { + SiteDomainInfo::MissingCustomDomain { domain } => { let html = Self::missing_site_output(ctx, locales, &domain, None).await?; Ok(ViewerResult::MissingSite(html)) } From ef432ab8f2ede895c9420ee43ababe6832cd4070 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 20:53:46 -0500 Subject: [PATCH 164/306] Address some unused value warnings. --- wws/src/cache.rs | 2 +- wws/src/deepwell.rs | 2 +- wws/src/host.rs | 50 +-------------------------------------------- wws/src/main.rs | 3 --- 4 files changed, 3 insertions(+), 54 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 9f7bd651b3..ea4298ce82 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -24,7 +24,7 @@ //! compatible with DEEPWELL's Redis code. use crate::{ - deepwell::{FileData, SiteData, SiteDomainInfo}, + deepwell::{FileData, SiteDomainInfo}, error::Result, }; use redis::{aio::MultiplexedConnection, AsyncCommands}; diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 39db16d179..9a9be09c1d 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -20,7 +20,7 @@ use crate::error::Result; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use std::time::Duration; const JSONRPC_MAX_REQUEST: u32 = 16 * 1024; diff --git a/wws/src/host.rs b/wws/src/host.rs index 2f50ffc824..6e36476f18 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -18,11 +18,7 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::{ - deepwell::{Domains, SiteData}, - error::Result, - state::ServerState, -}; +use crate::{deepwell::Domains, error::Result, state::ServerState}; /// The slug for the default site. /// @@ -64,8 +60,6 @@ pub enum SiteAndHost<'a> { pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<SiteAndHost<'a>> { let Domains { - ref main_domain, - ref main_domain_no_dot, ref files_domain, ref files_domain_no_dot, .. @@ -135,45 +129,3 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S todo!() } } - -/// Process a request from `[site-slug].wikijump.com`. -/// -/// Because `wikijump.com` (default) and specifying a slug -/// have essentially the same code paths, we avoid code -/// duplication by using this helper function. -async fn main_site_slug<'a>( - state: &ServerState, - hostname: &str, - site_slug: Option<&'a str>, -) -> Result<SiteAndHost<'a>> { - // This is our way of passing in "is default site" or not. - // If it's None, it's 'wikijump.com', if it's Some(_), it's 'xxx.wikijump.com'. - let (site_slug, is_default) = match site_slug { - Some(site_slug) => (site_slug, false), - None => (DEFAULT_SITE_SLUG, true), - }; - - // Return site present or missing response based on site ID. - let site_id = state.get_site_from_slug(site_slug).await?; - match site_id { - Some(site_id) => { - // Site exists - info!( - domain = hostname, - site_id = site_id, - "Routing main site request ({})", - if is_default { "default" } else { "slug" }, - ); - Ok(SiteAndHost::Main { site_id, site_slug }) - } - None => { - // No such site - warn!( - domain = hostname, - site_slug = site_slug, - "No such site with slug (main)", - ); - Ok(SiteAndHost::MainMissing { site_slug }) - } - } -} diff --git a/wws/src/main.rs b/wws/src/main.rs index 7e6177cb25..a0dac764b6 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -23,9 +23,6 @@ //! Depending on the hostname, requests are routed to either framerail //! or given to logic to serve wjfiles data. -#[macro_use] -extern crate serde; - #[macro_use] extern crate str_macro; From 3add813b52a2d924d834eb07b438b7e8c7e2a804 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 22:00:00 -0500 Subject: [PATCH 165/306] Convert lookup_host() to fetch site from deepwell/cache. --- wws/src/handler/mod.rs | 21 +++++------ wws/src/host.rs | 81 ++++++++++++++++++++++++++++-------------- 2 files changed, 63 insertions(+), 39 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 17cbf6425d..df8e4008e8 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -142,29 +142,24 @@ pub async fn handle_host_delegation( add_headers!(site_id, site_slug); forward_request!(main_router) } - SiteAndHost::MainCustom { site_id, site_slug } => { - // NOTE: The difference here is site_slug here is String not &str - add_headers!(site_id, site_slug); - forward_request!(main_router) + // Main site redirect + SiteAndHost::MainSiteRedirect { domain } => { + let destination = format!("https://{}{}", domain, get_path(request.uri())); + Redirect::permanent(&destination).into_response() } // Main site missing - SiteAndHost::MainMissing { site_slug } => { + SiteAndHost::MainSiteSlugMissing { ref site_slug } => { ServerErrorCode::SiteNotFound { site_slug }.into_response() } - SiteAndHost::MainCustomMissing => { - ServerErrorCode::CustomDomainNotFound { domain: &hostname }.into_response() - } - // Default site redirect - // e.g. "www.wikijump.com/foo" -> "wikijump.com/foo" - SiteAndHost::MainSiteRedirect { domain } => { - let destination = format!("https://{}{}", domain, get_path(request.uri()),); - Redirect::permanent(&destination).into_response() + SiteAndHost::MainCustomMissing { ref domain } => { + ServerErrorCode::CustomDomainNotFound { domain }.into_response() } // Files site route handling SiteAndHost::File { site_id, site_slug } => { add_headers!(site_id, site_slug); forward_request!(files_router) } + // Files site missing SiteAndHost::FileMissing { site_slug } => { ServerErrorCode::SiteNotFound { site_slug }.into_response() } diff --git a/wws/src/host.rs b/wws/src/host.rs index 6e36476f18..c46a26a52b 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -18,7 +18,11 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::{deepwell::Domains, error::Result, state::ServerState}; +use crate::{ + deepwell::{Domains, SiteDomainInfo}, + error::Result, + state::ServerState, +}; /// The slug for the default site. /// @@ -33,19 +37,16 @@ pub const DEFAULT_SITE_SLUG: &str = "www"; #[derive(Debug)] pub enum SiteAndHost<'a> { /// Main router existent site, canonical domain. - Main { site_id: i64, site_slug: &'a str }, + Main { site_id: i64, site_slug: String }, /// Main router, non-existent site, canonical domain. - MainMissing { site_slug: &'a str }, - - /// Main router, existent site, custom domain. - MainCustom { site_id: i64, site_slug: String }, + MainSiteSlugMissing { site_slug: String }, /// Main router, non-existent site, custom domain. - MainCustomMissing, + MainCustomMissing { domain: String }, /// Main router, request to preferred domain for the site. - MainSiteRedirect { domain: &'a str }, + MainSiteRedirect { domain: String }, /// Files router, existent site. File { site_id: i64, site_slug: &'a str }, @@ -72,19 +73,21 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S Some(site_id) => { // Site exists info!( + r#type = "files", domain = hostname, site_slug = site_slug, site_id = site_id, - "Routing files site request", + "Routing site request", ); Ok(SiteAndHost::File { site_id, site_slug }) } None => { // No such site warn!( + r#type = "files", domain = hostname, site_slug = site_slug, - "No such site with slug (files)", + "No such site with slug", ); Ok(SiteAndHost::FileMissing { site_slug }) } @@ -98,34 +101,60 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S // // Since this is expected to be uncommon, we're putting it after // the site files check. - info!(domain = hostname, "Handling lone files site request"); + info!( + r#type = "files", + domain = hostname, + "Handling lone files site request", + ); Ok(SiteAndHost::FileRoot) } else { // If it's anything else, it must be a canonical or custom domain. // Let's do a lookup and let DomainService handle it for us. + // + // This also caches the lookup, to avoid us having to talk to + // DEEPWELL more than necessary. + // + // Then we map it to the corresponding SiteAndHost variant. - /* - TODO match state.get_site_from_domain(hostname).await? { - Some(SiteData { site_id, slug: site_slug }) => { - // Site exists + SiteDomainInfo::SiteFound { + site_id, + slug: site_slug, + } => { info!( + r#type = "main", domain = hostname, site_id = site_id, - "Routing main site request (custom)", + site_slug = site_slug, + "Routing site request", ); - match should_redirect_site(hostname, preferred_domain) { - Some(preferred_domain) => Ok(SiteAndHost::MainSiteRedirect { domain: &preferred_domain }), - None => Ok(SiteAndHost::MainCustom { site_id, site_slug }), - } + Ok(SiteAndHost::Main { site_id, site_slug }) } - None => { - // No such site - warn!(domain = hostname, "No such site with domain (custom)"); - Ok(SiteAndHost::MainCustomMissing) + SiteDomainInfo::SiteRedirect { domain } => { + info!( + r#type = "main", + domain = domain, + "Found site, but needs redirect to preferred", + ); + Ok(SiteAndHost::MainSiteRedirect { domain }) + } + SiteDomainInfo::MissingSiteSlug { slug: site_slug } => { + info!( + r#type = "main", + domain = hostname, + site_slug = site_slug, + "No such site with slug", + ); + Ok(SiteAndHost::MainSiteSlugMissing { site_slug }) + } + SiteDomainInfo::MissingCustomDomain { domain } => { + info!( + r#type = "main", + domain = domain, + "No such site with custom domain", + ); + Ok(SiteAndHost::MainCustomMissing { domain }) } } - */ - todo!() } } From 96dc5a6479b9d0e5ba0382b860b8433972f8e577 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 22:15:41 -0500 Subject: [PATCH 166/306] Rename SiteDomainInfo -> SiteAndHost in DEEPWELL. --- deepwell/src/endpoints/domain.rs | 4 ++-- deepwell/src/services/domain/service.rs | 10 +++++----- deepwell/src/services/domain/structs.rs | 2 +- deepwell/src/services/view/service.rs | 10 +++++----- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/deepwell/src/endpoints/domain.rs b/deepwell/src/endpoints/domain.rs index aa9e7b799d..ddd035f455 100644 --- a/deepwell/src/endpoints/domain.rs +++ b/deepwell/src/endpoints/domain.rs @@ -20,12 +20,12 @@ use super::prelude::*; use crate::models::site_domain::Model as SiteDomainModel; -use crate::services::domain::{CreateCustomDomain, DomainService, SiteDomainInfo}; +use crate::services::domain::{CreateCustomDomain, DomainService, SiteAndHost}; pub async fn site_get_from_domain( ctx: &ServiceContext<'_>, params: Params<'static>, -) -> Result<SiteDomainInfo> { +) -> Result<SiteAndHost> { let domain: String = params.one()?; DomainService::parse_site_from_domain(ctx, &domain).await } diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 94b1adddf6..226464b108 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -128,7 +128,7 @@ impl DomainService { pub async fn parse_site_from_domain( ctx: &ServiceContext<'_>, domain: &str, - ) -> Result<SiteDomainInfo> { + ) -> Result<SiteAndHost> { info!("Getting site for domain '{domain}'"); /// Helper macro to produce the result when the site exists. @@ -140,9 +140,9 @@ impl DomainService { DomainService::preferred_domain(config, &$site).into_owned(); if domain == &preferred_domain { - SiteDomainInfo::SiteFound($site) + SiteAndHost::SiteFound($site) } else { - SiteDomainInfo::SiteRedirect { + SiteAndHost::SiteRedirect { domain: preferred_domain, } } @@ -160,7 +160,7 @@ impl DomainService { match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainInfo::MissingSiteSlug { + Ok(None) => Ok(SiteAndHost::MissingSiteSlug { slug: str!(subdomain), }), Err(error) => Err(error), @@ -174,7 +174,7 @@ impl DomainService { let result = Self::site_from_custom_domain_optional(ctx, domain).await; match result { Ok(Some(site)) => Ok(found!(site)), - Ok(None) => Ok(SiteDomainInfo::MissingCustomDomain { + Ok(None) => Ok(SiteAndHost::MissingCustomDomain { domain: str!(domain), }), Err(error) => Err(error), diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 15305f7367..32a62062f2 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -22,7 +22,7 @@ use crate::models::site::Model as SiteModel; #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] -pub enum SiteDomainInfo { +pub enum SiteAndHost { SiteFound(SiteModel), SiteRedirect { domain: String }, MissingSiteSlug { slug: String }, diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index eac2215d5b..1749d9570c 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -33,7 +33,7 @@ use super::prelude::*; use crate::models::page::Model as PageModel; use crate::models::page_revision::Model as PageRevisionModel; use crate::models::site::Model as SiteModel; -use crate::services::domain::SiteDomainInfo; +use crate::services::domain::SiteAndHost; use crate::services::render::RenderOutput; use crate::services::special_page::{GetSpecialPageOutput, SpecialPageType}; use crate::services::{ @@ -512,19 +512,19 @@ impl ViewService { // Get site data match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteDomainInfo::SiteFound(site) => { + SiteAndHost::SiteFound(site) => { Ok(ViewerResult::FoundSite(Viewer { site, user_session })) } - SiteDomainInfo::SiteRedirect { + SiteAndHost::SiteRedirect { domain: _preferred_domain, } => todo!(), - SiteDomainInfo::MissingSiteSlug { slug } => { + SiteAndHost::MissingSiteSlug { slug } => { let html = Self::missing_site_output(ctx, locales, domain, Some(&slug)).await?; Ok(ViewerResult::MissingSite(html)) } - SiteDomainInfo::MissingCustomDomain { domain } => { + SiteAndHost::MissingCustomDomain { domain } => { let html = Self::missing_site_output(ctx, locales, &domain, None).await?; Ok(ViewerResult::MissingSite(html)) } From 9fc3b914d6f82549c74a669aa06db4308c8c2a59 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 23:44:25 -0500 Subject: [PATCH 167/306] Merge SiteAndHost, same enum in deepwell and wws. Avoid one layer of match-mapping. The files options are added on the wws side only. --- deepwell/src/services/domain/service.rs | 10 +- deepwell/src/services/domain/structs.rs | 6 +- deepwell/src/services/view/service.rs | 6 +- wws/src/cache.rs | 47 +++++---- wws/src/deepwell.rs | 17 +--- wws/src/handler/mod.rs | 58 ++++++++--- wws/src/host.rs | 122 ++++++------------------ wws/src/state.rs | 17 ++-- 8 files changed, 121 insertions(+), 162 deletions(-) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 226464b108..11f6cb74cd 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -140,9 +140,15 @@ impl DomainService { DomainService::preferred_domain(config, &$site).into_owned(); if domain == &preferred_domain { - SiteAndHost::SiteFound($site) + let SiteModel { + site_id, + slug: site_slug, + .. + } = $site; + + SiteAndHost::MainSite { site_id, site_slug } } else { - SiteAndHost::SiteRedirect { + SiteAndHost::MainSiteRedirect { domain: preferred_domain, } } diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 32a62062f2..78bfe9cb96 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -18,13 +18,11 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::models::site::Model as SiteModel; - #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] pub enum SiteAndHost { - SiteFound(SiteModel), - SiteRedirect { domain: String }, + MainSite { site_id: i64, site_slug: String }, + MainSiteRedirect { domain: String }, MissingSiteSlug { slug: String }, MissingCustomDomain { domain: String }, } diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 1749d9570c..330cb13ad0 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -511,11 +511,13 @@ impl ViewService { } // Get site data + // TODO match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteAndHost::SiteFound(site) => { + SiteAndHost::MainSite { site_id, site_slug } => { + let site = todo!(); Ok(ViewerResult::FoundSite(Viewer { site, user_session })) } - SiteAndHost::SiteRedirect { + SiteAndHost::MainSiteRedirect { domain: _preferred_domain, } => todo!(), SiteAndHost::MissingSiteSlug { slug } => { diff --git a/wws/src/cache.rs b/wws/src/cache.rs index ea4298ce82..91e7ff022b 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -23,10 +23,7 @@ //! Whenever you make changes to this module, make sure that the code is //! compatible with DEEPWELL's Redis code. -use crate::{ - deepwell::{FileData, SiteDomainInfo}, - error::Result, -}; +use crate::{deepwell::FileData, error::Result, host::SiteAndHost}; use redis::{aio::MultiplexedConnection, AsyncCommands}; use ref_map::*; @@ -74,7 +71,7 @@ impl Cache { Ok(()) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result<Option<SiteDomainInfo>> { + pub async fn get_host_from_domain(&self, domain: &str) -> Result<Option<SiteAndHost>> { type SiteDomainDataTuple = (Option<String>, Option<i64>, Option<String>, Option<String>); let mut conn = get_connection!(self.client); @@ -82,21 +79,22 @@ impl Cache { let fields = &["variant", "id", "slug", "domain"]; let (variant, site_id, slug, domain) = conn.hget::<_, _, SiteDomainDataTuple>(&key, fields).await?; + let variant = variant.ref_map(|s| s.as_str()); match (variant, site_id, slug, domain) { // Each variant value has a set of fields that should be set for it // If a different group of fields are set, then it's invalid - (Some("site_found"), Some(site_id), Some(slug), None) => { - Ok(Some(SiteDomainInfo::SiteFound { site_id, slug })) + (Some("main_site"), Some(site_id), Some(site_slug), None) => { + Ok(Some(SiteAndHost::MainSite { site_id, site_slug })) } - (Some("site_redirect"), None, None, Some(domain)) => { - Ok(Some(SiteDomainInfo::SiteRedirect { domain })) + (Some("main_site_redirect"), None, None, Some(domain)) => { + Ok(Some(SiteAndHost::MainSiteRedirect { domain })) } - (Some("missing_site_slug"), None, Some(slug), None) => { - Ok(Some(SiteDomainInfo::MissingSiteSlug { slug })) + (Some("missing_site_slug"), None, Some(site_slug), None) => { + Ok(Some(SiteAndHost::MissingSiteSlug { site_slug })) } (Some("missing_custom_domain"), None, None, Some(domain)) => { - Ok(Some(SiteDomainInfo::MissingCustomDomain { domain })) + Ok(Some(SiteAndHost::MissingCustomDomain { domain })) } // Cache miss @@ -110,11 +108,7 @@ impl Cache { } } - pub async fn set_site_from_domain( - &self, - domain: &str, - domain_data: &SiteDomainInfo, - ) -> Result<()> { + pub async fn set_host_from_domain(&self, domain: &str, host: &SiteAndHost) -> Result<()> { let mut conn = get_connection!(self.client); let key = format!("site_domain:{domain}"); @@ -123,17 +117,22 @@ impl Cache { Option<i64>, Option<&str>, Option<&str>, - ) = match domain_data { - SiteDomainInfo::SiteFound { site_id, slug } => { - ("site_found", Some(*site_id), Some(slug), Some(domain)) + ) = match host { + SiteAndHost::MainSite { site_id, site_slug } => { + ("site_found", Some(*site_id), Some(site_slug), Some(domain)) } - SiteDomainInfo::SiteRedirect { domain } => ("site_redirect", None, None, Some(domain)), - SiteDomainInfo::MissingSiteSlug { slug } => { - ("missing_site_slug", None, Some(slug), None) + SiteAndHost::MainSiteRedirect { domain } => ("site_redirect", None, None, Some(domain)), + SiteAndHost::MissingSiteSlug { site_slug } => { + ("missing_site_slug", None, Some(site_slug), None) } - SiteDomainInfo::MissingCustomDomain { domain } => { + SiteAndHost::MissingCustomDomain { domain } => { ("missing_custom_domain", None, None, Some(domain)) } + SiteAndHost::FileSite { .. } | SiteAndHost::FileRoot => { + panic!( + "Cannot cache SiteAndHost value corresponding to the files router: {host:#?}" + ); + } }; hset!(conn, key, "variant", variant); diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 9a9be09c1d..4f3368ee06 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -18,7 +18,7 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::error::Result; +use crate::{error::Result, host::SiteAndHost}; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; use serde::Deserialize; use std::time::Duration; @@ -125,13 +125,13 @@ impl Deepwell { Ok(site_data) } - pub async fn get_site_from_domain(&self, domain: &str) -> Result<SiteDomainInfo> { - let site_data: SiteDomainInfo = self + pub async fn get_site_from_domain(&self, domain: &str) -> Result<SiteAndHost> { + let host: SiteAndHost = self .client .request("site_from_domain", rpc_params![domain]) .await?; - Ok(site_data) + Ok(host) } pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result<Option<PageData>> { @@ -179,15 +179,6 @@ pub struct SiteData { pub slug: String, } -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "snake_case", tag = "result", content = "data")] -pub enum SiteDomainInfo { - SiteFound { site_id: i64, slug: String }, - SiteRedirect { domain: String }, - MissingSiteSlug { slug: String }, - MissingCustomDomain { domain: String }, -} - #[derive(Deserialize, Debug, Clone)] pub struct PageData { pub page_id: i64, diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index df8e4008e8..35acd11e16 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -138,36 +138,68 @@ pub async fn handle_host_delegation( // give it to the right place to be processed. match host_data { // Main site route handling - SiteAndHost::Main { site_id, site_slug } => { + SiteAndHost::MainSite { site_id, site_slug } => { + info!( + r#type = "main", + domain = hostname, + site_id = site_id, + site_slug = site_slug, + "Routing site request", + ); add_headers!(site_id, site_slug); forward_request!(main_router) } // Main site redirect SiteAndHost::MainSiteRedirect { domain } => { + info!( + r#type = "main", + domain = domain, + "Found site, but needs redirect to preferred domain", + ); let destination = format!("https://{}{}", domain, get_path(request.uri())); Redirect::permanent(&destination).into_response() } - // Main site missing - SiteAndHost::MainSiteSlugMissing { ref site_slug } => { - ServerErrorCode::SiteNotFound { site_slug }.into_response() - } - SiteAndHost::MainCustomMissing { ref domain } => { - ServerErrorCode::CustomDomainNotFound { domain }.into_response() - } // Files site route handling - SiteAndHost::File { site_id, site_slug } => { + SiteAndHost::FileSite { site_id, site_slug } => { + info!( + r#type = "files", + domain = hostname, + site_slug = site_slug, + site_id = site_id, + "Routing site request", + ); add_headers!(site_id, site_slug); forward_request!(files_router) } - // Files site missing - SiteAndHost::FileMissing { site_slug } => { - ServerErrorCode::SiteNotFound { site_slug }.into_response() - } // Files site by itself // See the case in host.rs for an explanation SiteAndHost::FileRoot => { + info!( + r#type = "files", + domain = hostname, + "Handling lone files site request", + ); let destination = format!("https://{}", state.domains.main_domain_no_dot); Redirect::temporary(&destination).into_response() } + // Canonical domain, site missing + SiteAndHost::MissingSiteSlug { ref site_slug } => { + info!( + r#type = "main", + domain = hostname, + site_slug = site_slug, + "No such site with slug", + ); + ServerErrorCode::SiteNotFound { site_slug }.into_response() + } + // Custom domain missing + SiteAndHost::MissingCustomDomain { ref domain } => { + info!( + r#type = "main", + domain = domain, + "No such site with custom domain", + ); + ServerErrorCode::CustomDomainNotFound { domain }.into_response() + } } } diff --git a/wws/src/host.rs b/wws/src/host.rs index c46a26a52b..6641586b10 100644 --- a/wws/src/host.rs +++ b/wws/src/host.rs @@ -18,11 +18,8 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::{ - deepwell::{Domains, SiteDomainInfo}, - error::Result, - state::ServerState, -}; +use crate::{deepwell::Domains, error::Result, state::ServerState}; +use serde::Deserialize; /// The slug for the default site. /// @@ -31,35 +28,34 @@ use crate::{ pub const DEFAULT_SITE_SLUG: &str = "www"; /// Describes which Wikijump site and router this request is pointed towards. +/// Gets the data from DEEPWELL, but adds fields for the files server routing. /// /// * "Main" refers to the framerail handler, i.e. `[site-slug].wikijump.com`. /// * "Files" refers to the wjfiles handlers, i.e. `[site-slug].wjfiles.com`. -#[derive(Debug)] -pub enum SiteAndHost<'a> { - /// Main router existent site, canonical domain. - Main { site_id: i64, site_slug: String }, +#[derive(Deserialize, Debug, Clone)] +#[serde(rename_all = "snake_case", tag = "result", content = "data")] +pub enum SiteAndHost { + /// Main router existent site, ready to process request. + MainSite { site_id: i64, site_slug: String }, - /// Main router, non-existent site, canonical domain. - MainSiteSlugMissing { site_slug: String }, - - /// Main router, non-existent site, custom domain. - MainCustomMissing { domain: String }, - - /// Main router, request to preferred domain for the site. + /// Main router existent site, request to preferred domain. MainSiteRedirect { domain: String }, /// Files router, existent site. - File { site_id: i64, site_slug: &'a str }, - - /// Files router, non-existent site. - FileMissing { site_slug: &'a str }, + FileSite { site_id: i64, site_slug: String }, /// Request is the root domain on the files router, which has no meaning. /// Special case. FileRoot, + + /// Any router, non-existent site, canonical domain. + MissingSiteSlug { site_slug: String }, + + /// Any router, non-existent site, custom domain. + MissingCustomDomain { domain: String }, } -pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<SiteAndHost<'a>> { +pub async fn lookup_host(state: &ServerState, hostname: &str) -> Result<SiteAndHost> { let Domains { ref files_domain, ref files_domain_no_dot, @@ -69,92 +65,28 @@ pub async fn lookup_host<'a>(state: &ServerState, hostname: &'a str) -> Result<S if let Some(site_slug) = hostname.strip_suffix(files_domain) { // Determine if it's a files domain. let site_id = state.get_site_from_slug(site_slug).await?; + let site_slug = site_slug.to_owned(); // We cannot use the borrowed version because + // the struct is Deserialize. match site_id { - Some(site_id) => { - // Site exists - info!( - r#type = "files", - domain = hostname, - site_slug = site_slug, - site_id = site_id, - "Routing site request", - ); - Ok(SiteAndHost::File { site_id, site_slug }) - } - None => { - // No such site - warn!( - r#type = "files", - domain = hostname, - site_slug = site_slug, - "No such site with slug", - ); - Ok(SiteAndHost::FileMissing { site_slug }) - } + // Site exists + Some(site_id) => Ok(SiteAndHost::FileSite { site_id, site_slug }), + // Site missing + None => Ok(SiteAndHost::MissingSiteSlug { site_slug }), } } else if hostname == files_domain_no_dot { - // Finally, check if it's the files domain by itself. + // Check if it's the files domain by itself. // // This is weird, wjfiles should always a site slug subdomain, // so in this case we just temporary redirect to the main domain, // stripping the path. - // - // Since this is expected to be uncommon, we're putting it after - // the site files check. - info!( - r#type = "files", - domain = hostname, - "Handling lone files site request", - ); Ok(SiteAndHost::FileRoot) } else { // If it's anything else, it must be a canonical or custom domain. - // Let's do a lookup and let DomainService handle it for us. + // That means it's the main site. Let's do a lookup and let + // DomainService handle it for us. // // This also caches the lookup, to avoid us having to talk to // DEEPWELL more than necessary. - // - // Then we map it to the corresponding SiteAndHost variant. - - match state.get_site_from_domain(hostname).await? { - SiteDomainInfo::SiteFound { - site_id, - slug: site_slug, - } => { - info!( - r#type = "main", - domain = hostname, - site_id = site_id, - site_slug = site_slug, - "Routing site request", - ); - Ok(SiteAndHost::Main { site_id, site_slug }) - } - SiteDomainInfo::SiteRedirect { domain } => { - info!( - r#type = "main", - domain = domain, - "Found site, but needs redirect to preferred", - ); - Ok(SiteAndHost::MainSiteRedirect { domain }) - } - SiteDomainInfo::MissingSiteSlug { slug: site_slug } => { - info!( - r#type = "main", - domain = hostname, - site_slug = site_slug, - "No such site with slug", - ); - Ok(SiteAndHost::MainSiteSlugMissing { site_slug }) - } - SiteDomainInfo::MissingCustomDomain { domain } => { - info!( - r#type = "main", - domain = domain, - "No such site with custom domain", - ); - Ok(SiteAndHost::MainCustomMissing { domain }) - } - } + state.get_host_from_domain(hostname).await } } diff --git a/wws/src/state.rs b/wws/src/state.rs index d956823513..918749ba7f 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,9 +21,10 @@ use crate::{ cache::Cache, config::Secrets, - deepwell::{Deepwell, Domains, FileData, PageData, SiteData, SiteDomainInfo}, + deepwell::{Deepwell, Domains, FileData, PageData, SiteData}, error::Result, framerail::Framerail, + host::SiteAndHost, }; use axum::body::Body; use hyper_util::{ @@ -98,16 +99,14 @@ impl ServerStateInner { } } - pub async fn get_site_from_domain(&self, site_domain: &str) -> Result<SiteDomainInfo> { - match self.cache.get_site_from_domain(site_domain).await? { - Some(domain_data) => Ok(domain_data), + pub async fn get_host_from_domain(&self, domain: &str) -> Result<SiteAndHost> { + match self.cache.get_host_from_domain(domain).await? { + Some(host) => Ok(host), None => { - let domain_data = self.deepwell.get_site_from_domain(site_domain).await?; - self.cache - .set_site_from_domain(site_domain, &domain_data) - .await?; + let host = self.deepwell.get_site_from_domain(domain).await?; + self.cache.set_host_from_domain(domain, &host).await?; - Ok(domain_data) + Ok(host) } } } From 3cd921546855e4d09a888ff18761141a088e527c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 23:50:45 -0500 Subject: [PATCH 168/306] Add rustdoc for SiteAndHost struct in deepwell. --- deepwell/src/services/domain/structs.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/deepwell/src/services/domain/structs.rs b/deepwell/src/services/domain/structs.rs index 78bfe9cb96..342973186c 100644 --- a/deepwell/src/services/domain/structs.rs +++ b/deepwell/src/services/domain/structs.rs @@ -18,6 +18,14 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +/// Represents the result of doing a site lookup. +/// +/// This tells WWS whether it should route the user to +/// framerail with the site handler, or display a missing +/// site error. +/// +/// Be sure the values here are serde-compatible with `SiteAndHost` +/// in WWS's codebase (`src/host.rs`). #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "result", content = "data")] pub enum SiteAndHost { From d20ffd36b4792948c83046004ce2a87cae817ec3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 23:52:53 -0500 Subject: [PATCH 169/306] Remove unused field. --- wws/src/deepwell.rs | 1 - wws/src/state.rs | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 4f3368ee06..cf8d953333 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -176,7 +176,6 @@ pub struct Domains { #[derive(Deserialize, Debug, Clone)] pub struct SiteData { pub site_id: i64, - pub slug: String, } #[derive(Deserialize, Debug, Clone)] diff --git a/wws/src/state.rs b/wws/src/state.rs index 918749ba7f..b8efcb848d 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -91,7 +91,7 @@ impl ServerStateInner { Some(site_id) => Ok(Some(site_id)), None => match self.deepwell.get_site_from_slug(site_slug).await? { None => Ok(None), - Some(SiteData { site_id, .. }) => { + Some(SiteData { site_id }) => { self.cache.set_site_from_slug(site_slug, site_id).await?; Ok(Some(site_id)) } From f23e1ea6d97b0745eff4be323ebead9c1c3fe97a Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 10 Feb 2025 23:57:47 -0500 Subject: [PATCH 170/306] Combine match arms. --- deepwell/src/services/view/service.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 330cb13ad0..fe1c209a6e 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -468,8 +468,7 @@ impl ViewService { // Get user data from session token (if present) let user_session = match session_token { - None => None, - Some("") => None, + Some("") | None => None, Some(token) => { let session = SessionService::get(ctx, token).await?; let user = UserService::get(ctx, Reference::Id(session.user_id)).await?; From ae075313039175f8dccbdbfa4189160f2bedfb6d Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:07:33 -0500 Subject: [PATCH 171/306] Remove ViewerResult. We're not handling missing sites at this level. --- deepwell/src/services/view/service.rs | 108 ++++++++------------------ deepwell/src/services/view/structs.rs | 24 +----- 2 files changed, 35 insertions(+), 97 deletions(-) diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index fe1c209a6e..ed9f9e181e 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -37,8 +37,8 @@ use crate::services::domain::SiteAndHost; use crate::services::render::RenderOutput; use crate::services::special_page::{GetSpecialPageOutput, SpecialPageType}; use crate::services::{ - DomainService, PageRevisionService, PageService, SessionService, SpecialPageService, - TextService, UserService, + DomainService, PageRevisionService, PageService, SessionService, SiteService, + SpecialPageService, TextService, UserService, }; use crate::utils::split_category; use fluent::{FluentArgs, FluentValue}; @@ -57,36 +57,26 @@ impl ViewService { pub async fn page( ctx: &ServiceContext<'_>, GetPageView { - domain, + site_id, locales: locales_str, route, session_token, }: GetPageView, ) -> Result<GetPageViewOutput> { info!( - "Getting page view data for domain '{}', route '{:?}', locales '{:?}'", - domain, route, locales_str, + "Getting page view data for site ID {}, route '{:?}', locales '{:?}'", + site_id, route, locales_str, ); - // Parse all locales let mut locales = parse_locales(&locales_str)?; let config = ctx.config(); - - // Attempt to get a viewer helper structure, but if the site doesn't exist - // then return right away with the "no such site" response. - let Viewer { site, user_session } = match Self::get_viewer( + let Viewer { site, user_session } = Self::get_viewer( ctx, &mut locales, - &domain, + site_id, session_token.ref_map(|s| s.as_str()), ) - .await? - { - ViewerResult::FoundSite(viewer) => viewer, - ViewerResult::MissingSite(html) => { - return Ok(GetPageViewOutput::SiteMissing { html }); - } - }; + .await?; // If None, means the main page for the site. Pull from site data. let (page_full_slug, page_extra): (&str, &str) = match &route { @@ -299,35 +289,25 @@ impl ViewService { pub async fn user( ctx: &ServiceContext<'_>, GetUserView { - domain, + site_id, locales: locales_str, user: user_ref, session_token, }: GetUserView<'_>, ) -> Result<GetUserViewOutput> { info!( - "Getting user view data for domain '{}', user '{:?}', locales '{:?}'", - domain, user_ref, locales_str, + "Getting user view data for site ID {}, user '{:?}', locales '{:?}'", + site_id, user_ref, locales_str, ); - // Parse all locales let mut locales = parse_locales(&locales_str)?; - - // Attempt to get a viewer helper structure, but if the site doesn't exist - // then return right away with the "no such site" response. - let viewer = match Self::get_viewer( + let viewer = Self::get_viewer( ctx, &mut locales, - &domain, + site_id, session_token.ref_map(|s| s.as_str()), ) - .await? - { - ViewerResult::FoundSite(viewer) => viewer, - ViewerResult::MissingSite(html) => { - return Ok(GetUserViewOutput::SiteMissing { html }); - } - }; + .await?; // TODO Check if user-agent and IP match? @@ -352,35 +332,25 @@ impl ViewService { pub async fn admin( ctx: &ServiceContext<'_>, GetAdminView { - domain, + site_id, locales: locales_str, session_token, }: GetAdminView, ) -> Result<GetAdminViewOutput> { info!( - "Getting site view data for domain '{}', locales '{:?}'", - domain, locales_str, + "Getting site view data for site ID {}, locales '{:?}'", + site_id, locales_str, ); - // Parse all locales let mut locales = parse_locales(&locales_str)?; let config = ctx.config(); - - // Attempt to get a viewer helper structure, but if the site doesn't exist - // then return right away with the "no such site" response. - let viewer = match Self::get_viewer( + let viewer = Self::get_viewer( ctx, &mut locales, - &domain, + site_id, session_token.ref_map(|s| s.as_str()), ) - .await? - { - ViewerResult::FoundSite(viewer) => viewer, - ViewerResult::MissingSite(html) => { - return Ok(GetAdminViewOutput::SiteMissing { html }); - } - }; + .await?; let page_info = PageInfo { page: cow!(""), @@ -424,7 +394,6 @@ impl ViewService { Some(ref session) => session.user_permissions, None => { debug!("No user for session, disallow admin access"); - return Ok(GetAdminViewOutput::AdminPermissions { viewer, html: compiled_html, @@ -453,18 +422,21 @@ impl ViewService { /// All views seen by end users require a few translations before /// a request can be serviced: /// - /// * Hostname of request → Site ID and data + /// * Site ID → Site data /// * Session token → User ID and their permissions /// + /// Note that we do *not* need to get the site ID from the domain + /// since WWS has already done the domain lookup logic for ups. + /// /// Then using this information, the caller can perform some common /// operations, such as slug normalization or redirect site aliases. pub async fn get_viewer( ctx: &ServiceContext<'_>, locales: &mut Vec<LanguageIdentifier>, - domain: &str, + site_id: i64, session_token: Option<&str>, - ) -> Result<ViewerResult> { - info!("Getting viewer data from domain '{domain}' and session token"); + ) -> Result<Viewer> { + info!("Getting viewer data site ID {site_id} and session token"); // Get user data from session token (if present) let user_session = match session_token { @@ -509,27 +481,11 @@ impl ViewService { return Err(Error::NoLocalesSpecified); } - // Get site data - // TODO - match DomainService::parse_site_from_domain(ctx, domain).await? { - SiteAndHost::MainSite { site_id, site_slug } => { - let site = todo!(); - Ok(ViewerResult::FoundSite(Viewer { site, user_session })) - } - SiteAndHost::MainSiteRedirect { - domain: _preferred_domain, - } => todo!(), - SiteAndHost::MissingSiteSlug { slug } => { - let html = - Self::missing_site_output(ctx, locales, domain, Some(&slug)).await?; - - Ok(ViewerResult::MissingSite(html)) - } - SiteAndHost::MissingCustomDomain { domain } => { - let html = Self::missing_site_output(ctx, locales, &domain, None).await?; - Ok(ViewerResult::MissingSite(html)) - } - } + // Get site information + let site = SiteService::get(ctx, Reference::Id(site_id)).await?; + + // Return + Ok(Viewer { site, user_session }) } /// Produce output for cases where a site does not exist. diff --git a/deepwell/src/services/view/structs.rs b/deepwell/src/services/view/structs.rs index f5069ab036..9efe8ac585 100644 --- a/deepwell/src/services/view/structs.rs +++ b/deepwell/src/services/view/structs.rs @@ -39,7 +39,7 @@ impl UserPermissions { #[derive(Deserialize, Debug, Clone)] pub struct GetPageView { - pub domain: String, + pub site_id: i64, pub session_token: Option<String>, pub route: Option<PageRoute>, pub locales: Vec<String>, @@ -82,15 +82,11 @@ pub enum GetPageViewOutput { compiled_html: String, banned: bool, }, - - SiteMissing { - html: String, - }, } #[derive(Deserialize, Debug, Clone)] pub struct GetUserView<'a> { - pub domain: String, + pub site_id: i64, pub session_token: Option<String>, pub user: Option<Reference<'a>>, pub locales: Vec<String>, @@ -109,15 +105,11 @@ pub enum GetUserViewOutput { #[serde(flatten)] viewer: Viewer, }, - - SiteMissing { - html: String, - }, } #[derive(Deserialize, Debug, Clone)] pub struct GetAdminView { - pub domain: String, + pub site_id: i64, pub session_token: Option<String>, pub locales: Vec<String>, } @@ -135,16 +127,6 @@ pub enum GetAdminViewOutput { viewer: Viewer, html: String, }, - - SiteMissing { - html: String, - }, -} - -#[derive(Debug, Clone)] -pub enum ViewerResult { - FoundSite(Viewer), - MissingSite(String), } #[derive(Serialize, Debug, Clone)] From 8633eae94c2c5e5c2d81026369c9cd085513126f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:17:16 -0500 Subject: [PATCH 172/306] Interpolate variables in the string. --- deepwell/src/services/special_page/service.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deepwell/src/services/special_page/service.rs b/deepwell/src/services/special_page/service.rs index 455355a881..eed066a131 100644 --- a/deepwell/src/services/special_page/service.rs +++ b/deepwell/src/services/special_page/service.rs @@ -43,7 +43,8 @@ impl SpecialPageService { page_info: PageInfo<'_>, ) -> Result<GetSpecialPageOutput> { info!( - "Getting special page {sp_page_type:?} for site ID {}", + "Getting special page {:?} for site ID {}", + sp_page_type, site.site_id, ); From ec0a33dfd7eb31e39281f2a3d67b53f84b51c305 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:27:01 -0500 Subject: [PATCH 173/306] Create SpecialErrorService for routes like the missing site. --- deepwell/src/services/mod.rs | 2 + deepwell/src/services/special_error.rs | 77 +++++++++++++++++++ deepwell/src/services/special_page/service.rs | 3 +- deepwell/src/services/view/service.rs | 44 ----------- 4 files changed, 80 insertions(+), 46 deletions(-) create mode 100644 deepwell/src/services/special_error.rs diff --git a/deepwell/src/services/mod.rs b/deepwell/src/services/mod.rs index 840c5c5e21..897b839031 100644 --- a/deepwell/src/services/mod.rs +++ b/deepwell/src/services/mod.rs @@ -84,6 +84,7 @@ pub mod score; pub mod session; pub mod settings; pub mod site; +pub mod special_error; pub mod special_page; pub mod text; pub mod user; @@ -119,6 +120,7 @@ pub use self::score::ScoreService; pub use self::session::SessionService; pub use self::settings::SettingsService; pub use self::site::SiteService; +pub use self::special_error::SpecialErrorService; pub use self::special_page::SpecialPageService; pub use self::text::TextService; pub use self::user::UserService; diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs new file mode 100644 index 0000000000..567a2f3948 --- /dev/null +++ b/deepwell/src/services/special_error.rs @@ -0,0 +1,77 @@ +/* + * services/special_error.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +//! The "special error" service. +//! +//! This produces localized HTML pages that correspond +//! to different special error conditions, such as a +//! missing site or unknown custom domain. + +use super::prelude::*; +use fluent::{FluentArgs, FluentValue}; +use unic_langid::LanguageIdentifier; + +#[derive(Debug)] +pub struct SpecialErrorService; + +impl SpecialErrorService { + /// Produce output for cases where a site does not exist. + pub async fn missing_site( + ctx: &ServiceContext<'_>, + locales: &[LanguageIdentifier], + domain: &str, + site_slug: Option<&str>, + ) -> Result<String> { + let config = ctx.config(); + match site_slug { + // No site with slug error + Some(site_slug) => { + let mut args = FluentArgs::new(); + args.set("slug", fluent_str!(site_slug)); + args.set("domain", fluent_str!(config.main_domain_no_dot)); + args.set("files-domain", fluent_str!(config.files_domain_no_dot)); + + let html = ctx.localization().translate( + locales, + "wiki-page-site-slug", + &args, + )?; + + Ok(html.to_string()) + } + + // Custom domain missing error + None => { + let mut args = FluentArgs::new(); + args.set("custom_domain", fluent_str!(domain)); + args.set("domain", fluent_str!(config.main_domain_no_dot)); + args.set("files-domain", fluent_str!(config.files_domain_no_dot)); + + let html = ctx.localization().translate( + locales, + "wiki-page-site-custom", + &args, + )?; + + Ok(html.to_string()) + } + } + } +} diff --git a/deepwell/src/services/special_page/service.rs b/deepwell/src/services/special_page/service.rs index eed066a131..9f1dfe1563 100644 --- a/deepwell/src/services/special_page/service.rs +++ b/deepwell/src/services/special_page/service.rs @@ -44,8 +44,7 @@ impl SpecialPageService { ) -> Result<GetSpecialPageOutput> { info!( "Getting special page {:?} for site ID {}", - sp_page_type, - site.site_id, + sp_page_type, site.site_id, ); // Extract fields based on special page type. diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index ed9f9e181e..5d766ca797 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -41,7 +41,6 @@ use crate::services::{ SpecialPageService, TextService, UserService, }; use crate::utils::split_category; -use fluent::{FluentArgs, FluentValue}; use ftml::prelude::*; use ftml::render::html::HtmlOutput; use ref_map::*; @@ -488,49 +487,6 @@ impl ViewService { Ok(Viewer { site, user_session }) } - /// Produce output for cases where a site does not exist. - async fn missing_site_output( - ctx: &ServiceContext<'_>, - locales: &[LanguageIdentifier], - domain: &str, - site_slug: Option<&str>, - ) -> Result<String> { - let config = ctx.config(); - match site_slug { - // No site with slug error - Some(site_slug) => { - let mut args = FluentArgs::new(); - args.set("slug", fluent_str!(site_slug)); - args.set("domain", fluent_str!(config.main_domain_no_dot)); - args.set("files-domain", fluent_str!(config.files_domain_no_dot)); - - let html = ctx.localization().translate( - locales, - "wiki-page-site-slug", - &args, - )?; - - Ok(html.to_string()) - } - - // Custom domain missing error - None => { - let mut args = FluentArgs::new(); - args.set("custom_domain", fluent_str!(domain)); - args.set("domain", fluent_str!(config.main_domain_no_dot)); - args.set("files-domain", fluent_str!(config.files_domain_no_dot)); - - let html = ctx.localization().translate( - locales, - "wiki-page-site-custom", - &args, - )?; - - Ok(html.to_string()) - } - } - } - async fn can_access_page( _ctx: &ServiceContext<'_>, permissions: UserPermissions, From 0ed1fd23ad7f5a3a4791cf5296adc11768820ebb Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:30:52 -0500 Subject: [PATCH 174/306] Split site slug and custom domain special errors. --- deepwell/src/services/special_error.rs | 59 ++++++++++++-------------- 1 file changed, 27 insertions(+), 32 deletions(-) diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index 567a2f3948..22bccbf302 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -32,46 +32,41 @@ use unic_langid::LanguageIdentifier; pub struct SpecialErrorService; impl SpecialErrorService { - /// Produce output for cases where a site does not exist. - pub async fn missing_site( + /// Produce output for a canonical site that does not exist. + pub async fn missing_site_slug( ctx: &ServiceContext<'_>, locales: &[LanguageIdentifier], - domain: &str, - site_slug: Option<&str>, + site_slug: &str, ) -> Result<String> { let config = ctx.config(); - match site_slug { - // No site with slug error - Some(site_slug) => { - let mut args = FluentArgs::new(); - args.set("slug", fluent_str!(site_slug)); - args.set("domain", fluent_str!(config.main_domain_no_dot)); - args.set("files-domain", fluent_str!(config.files_domain_no_dot)); + let mut args = FluentArgs::new(); + args.set("slug", fluent_str!(site_slug)); + args.set("main-domain", fluent_str!(config.main_domain_no_dot)); + args.set("files-domain", fluent_str!(config.files_domain_no_dot)); - let html = ctx.localization().translate( - locales, - "wiki-page-site-slug", - &args, - )?; + let html = ctx + .localization() + .translate(locales, "wiki-page-site-slug", &args)?; - Ok(html.to_string()) - } + Ok(html.to_string()) + } - // Custom domain missing error - None => { - let mut args = FluentArgs::new(); - args.set("custom_domain", fluent_str!(domain)); - args.set("domain", fluent_str!(config.main_domain_no_dot)); - args.set("files-domain", fluent_str!(config.files_domain_no_dot)); + /// Produce output for a custom domain that does not exist. + pub async fn missing_custom_domain( + ctx: &ServiceContext<'_>, + locales: &[LanguageIdentifier], + domain: &str, + ) -> Result<String> { + let config = ctx.config(); + let mut args = FluentArgs::new(); + args.set("custom_domain", fluent_str!(domain)); + args.set("main-domain", fluent_str!(config.main_domain_no_dot)); + args.set("files-domain", fluent_str!(config.files_domain_no_dot)); - let html = ctx.localization().translate( - locales, - "wiki-page-site-custom", - &args, - )?; + let html = + ctx.localization() + .translate(locales, "wiki-page-site-custom", &args)?; - Ok(html.to_string()) - } - } + Ok(html.to_string()) } } From 359b403301e51570c0e2de4453c4b161669fb16a Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:35:17 -0500 Subject: [PATCH 175/306] Create special-error fluent directory. --- deepwell/src/services/special_error.rs | 12 ++++++------ locales/fluent/special-error/en.ftl | 13 +++++++++++++ locales/fluent/special-error/zh_Hans.ftl | 13 +++++++++++++ locales/fluent/wiki-page/en.ftl | 12 ------------ locales/fluent/wiki-page/zh_Hans.ftl | 12 ------------ 5 files changed, 32 insertions(+), 30 deletions(-) create mode 100644 locales/fluent/special-error/en.ftl create mode 100644 locales/fluent/special-error/zh_Hans.ftl diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index 22bccbf302..466d3ed815 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -41,12 +41,12 @@ impl SpecialErrorService { let config = ctx.config(); let mut args = FluentArgs::new(); args.set("slug", fluent_str!(site_slug)); - args.set("main-domain", fluent_str!(config.main_domain_no_dot)); - args.set("files-domain", fluent_str!(config.files_domain_no_dot)); + args.set("main_domain", fluent_str!(config.main_domain_no_dot)); + args.set("files_domain", fluent_str!(config.files_domain_no_dot)); let html = ctx .localization() - .translate(locales, "wiki-page-site-slug", &args)?; + .translate(locales, "special-error-site-slug", &args)?; Ok(html.to_string()) } @@ -60,12 +60,12 @@ impl SpecialErrorService { let config = ctx.config(); let mut args = FluentArgs::new(); args.set("custom_domain", fluent_str!(domain)); - args.set("main-domain", fluent_str!(config.main_domain_no_dot)); - args.set("files-domain", fluent_str!(config.files_domain_no_dot)); + args.set("main_domain", fluent_str!(config.main_domain_no_dot)); + args.set("files_domain", fluent_str!(config.files_domain_no_dot)); let html = ctx.localization() - .translate(locales, "wiki-page-site-custom", &args)?; + .translate(locales, "special-error-site-custom", &args)?; Ok(html.to_string()) } diff --git a/locales/fluent/special-error/en.ftl b/locales/fluent/special-error/en.ftl new file mode 100644 index 0000000000..aa29f22ba1 --- /dev/null +++ b/locales/fluent/special-error/en.ftl @@ -0,0 +1,13 @@ +### Special Error HTML templates + +special-error-site-slug = <h1>No { -service-name } site exists with this address.</h1> + <p> + <a href="https://{ $slug }.{ $domain }/">{ $slug }.{ $domain }</a> does not exist. + Return to <a href="https://{ $domain }/">{ -service-name }</a>. + </p> + +special-error-site-custom = <h1>No { -service-name } site exists with this address.</h1> + <p> + No site has the custom domain <a href="https://{ $custom_domain }/">{ $custom_domain }</a>. + Return to <a href="https://{ $main_domain }/">{ -service-name }</a>. + </p> diff --git a/locales/fluent/special-error/zh_Hans.ftl b/locales/fluent/special-error/zh_Hans.ftl new file mode 100644 index 0000000000..67c12779df --- /dev/null +++ b/locales/fluent/special-error/zh_Hans.ftl @@ -0,0 +1,13 @@ +### ??? + +special-error-site-slug = <h1>使用 { -service-name } 作名字的站点并不存在。</h1> + <p> + <a href="https://{ $slug }.{ $domain }/">{ $slug }.{ $domain }</a> 并不存在。 + 返回 <a href="https://{ $domain }/">{ -service-name }</a>。 + </p> + +special-error-site-custom = <h1>使用 { -service-name } 作名字的站点并不存在。</h1> + <p> + 没有站点使用自订域名 <a href="https://{ $custom_domain }/">{ $custom_domain }</a>。 + 返回 <a href="https://{ $main_domain }/">{ -service-name }</a>。 + </p> diff --git a/locales/fluent/wiki-page/en.ftl b/locales/fluent/wiki-page/en.ftl index 26730f8dfa..e620d1af93 100644 --- a/locales/fluent/wiki-page/en.ftl +++ b/locales/fluent/wiki-page/en.ftl @@ -102,16 +102,4 @@ wiki-page-banned = + You have been banned You are currently banned from this site, and the site settings do not allow banned users to view pages. -wiki-page-site-slug = <h1>No { -service-name } site exists with this address.</h1> - <p> - <a href="https://{ $slug }.{ $domain }/">{ $slug }.{ $domain }</a> does not exist. - Return to <a href="https://{ $domain }/">{ -service-name }</a>. - </p> - -wiki-page-site-custom = <h1>No { -service-name } site exists with this address.</h1> - <p> - No site has the custom domain <a href="https://{ $custom_domain }/">{ $custom_domain }</a>. - Return to <a href="https://{ $domain }/">{ -service-name }</a>. - </p> - wiki-page-no-render = Content not shown. diff --git a/locales/fluent/wiki-page/zh_Hans.ftl b/locales/fluent/wiki-page/zh_Hans.ftl index 3a212161e1..1c50c68050 100644 --- a/locales/fluent/wiki-page/zh_Hans.ftl +++ b/locales/fluent/wiki-page/zh_Hans.ftl @@ -102,16 +102,4 @@ wiki-page-banned = + 您已经被封禁 您已经被本站封禁,而本站设定并不允许被封禁用户访问页面。 -wiki-page-site-slug = <h1>使用 { -service-name } 作名字的站点并不存在。</h1> - <p> - <a href="https://{ $slug }.{ $domain }/">{ $slug }.{ $domain }</a> 并不存在。 - 返回 <a href="https://{ $domain }/">{ -service-name }</a>。 - </p> - -wiki-page-site-custom = <h1>使用 { -service-name } 作名字的站点并不存在。</h1> - <p> - 没有站点使用自订域名 <a href="https://{ $custom_domain }/">{ $custom_domain }</a>。 - 返回 <a href="https://{ $domain }/">{ -service-name }</a>。 - </p> - wiki-page-no-render = 内容停止显示。 From 70ab1a5c6beb10892b3aa8be1bef77e40861f947 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:43:53 -0500 Subject: [PATCH 176/306] Add stubs for special error methods. --- deepwell/src/api.rs | 13 ++++++++- deepwell/src/endpoints/mod.rs | 1 + deepwell/src/endpoints/special_error.rs | 35 +++++++++++++++++++++++++ deepwell/src/services/special_error.rs | 6 ++--- 4 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 deepwell/src/endpoints/special_error.rs diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 35c45b5c86..c68ffd74fe 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -30,7 +30,8 @@ use crate::config::{Config, Secrets}; use crate::endpoints::{ auth::*, blob::*, category::*, domain::*, email::*, file::*, file_revision::*, info::*, link::*, locale::*, message::*, misc::*, page::*, page_revision::*, - parent::*, site::*, site_member::*, text::*, user::*, user_bot::*, view::*, vote::*, + parent::*, site::*, site_member::*, special_error::*, text::*, user::*, user_bot::*, + view::*, vote::*, }; use crate::locales::Localizations; use crate::services::blob::MimeAnalyzer; @@ -194,6 +195,16 @@ async fn build_module(app_state: ServerState) -> anyhow::Result<RpcModule<Server register!("user_view", user_view); register!("admin_view", admin_view); + // Special errors + register!( + "special_error_missing_site_slug", + special_error_missing_site_slug, + ); + register!( + "special_error_missing_custom_domain", + special_error_missing_custom_domain, + ); + // Authentication register!("login", auth_login); register!("logout", auth_logout); diff --git a/deepwell/src/endpoints/mod.rs b/deepwell/src/endpoints/mod.rs index 61a8b9a43e..0fe77ee569 100644 --- a/deepwell/src/endpoints/mod.rs +++ b/deepwell/src/endpoints/mod.rs @@ -59,6 +59,7 @@ pub mod page_revision; pub mod parent; pub mod site; pub mod site_member; +pub mod special_error; pub mod text; pub mod user; pub mod user_bot; diff --git a/deepwell/src/endpoints/special_error.rs b/deepwell/src/endpoints/special_error.rs new file mode 100644 index 0000000000..b90ddc4d28 --- /dev/null +++ b/deepwell/src/endpoints/special_error.rs @@ -0,0 +1,35 @@ +/* + * endpoints/special_error.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +use super::prelude::*; + +pub async fn special_error_missing_site_slug( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<String> { + todo!() +} + +pub async fn special_error_missing_custom_domain( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<String> { + todo!() +} diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index 466d3ed815..723d014864 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -44,9 +44,9 @@ impl SpecialErrorService { args.set("main_domain", fluent_str!(config.main_domain_no_dot)); args.set("files_domain", fluent_str!(config.files_domain_no_dot)); - let html = ctx - .localization() - .translate(locales, "special-error-site-slug", &args)?; + let html = + ctx.localization() + .translate(locales, "special-error-site-slug", &args)?; Ok(html.to_string()) } From 831a479a399af06a86597b1ead0cf034571eedd6 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 00:47:36 -0500 Subject: [PATCH 177/306] Move parse_locales() to utils. --- deepwell/src/services/special_error.rs | 1 + deepwell/src/services/view/service.rs | 15 +-------------- deepwell/src/utils/locale.rs | 16 ++++++++++++++++ 3 files changed, 18 insertions(+), 14 deletions(-) diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index 723d014864..22c7244b61 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -25,6 +25,7 @@ //! missing site or unknown custom domain. use super::prelude::*; +use crate::utils::parse_locales; use fluent::{FluentArgs, FluentValue}; use unic_langid::LanguageIdentifier; diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 5d766ca797..0724aeb81c 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -40,7 +40,7 @@ use crate::services::{ DomainService, PageRevisionService, PageService, SessionService, SiteService, SpecialPageService, TextService, UserService, }; -use crate::utils::split_category; +use crate::utils::{parse_locales, split_category}; use ftml::prelude::*; use ftml::render::html::HtmlOutput; use ref_map::*; @@ -534,16 +534,3 @@ impl ViewService { } } } - -/// Converts an array of strings to a list of locales. -/// -/// Empty locales lists _are_ allowed, since we have not -/// yet checked the user's locale preferences. -fn parse_locales<S: AsRef<str>>(locales_str: &[S]) -> Result<Vec<LanguageIdentifier>> { - let mut locales = Vec::with_capacity(locales_str.len()); - for locale_str in locales_str { - let locale = LanguageIdentifier::from_bytes(locale_str.as_ref().as_bytes())?; - locales.push(locale); - } - Ok(locales) -} diff --git a/deepwell/src/utils/locale.rs b/deepwell/src/utils/locale.rs index 941c0e4984..477421a679 100644 --- a/deepwell/src/utils/locale.rs +++ b/deepwell/src/utils/locale.rs @@ -21,9 +21,25 @@ use crate::services::{Error, Result}; use unic_langid::LanguageIdentifier; +/// Ensure the given locale string is valid, returning the parsed locale. pub fn validate_locale(locale_str: &str) -> Result<LanguageIdentifier> { LanguageIdentifier::from_bytes(locale_str.as_bytes()).map_err(|error| { warn!("Invalid locale '{}' passed: {:?}", locale_str, error); Error::LocaleInvalid(error) }) } + +/// Helper function to convert an array of strings to a list of locales. +/// +/// Empty locales lists _are_ allowed, since we have not +/// yet checked the user's locale preferences. +pub fn parse_locales<S: AsRef<str>>( + locales_str: &[S], +) -> Result<Vec<LanguageIdentifier>> { + let mut locales = Vec::with_capacity(locales_str.len()); + for locale_str in locales_str { + let locale = LanguageIdentifier::from_bytes(locale_str.as_ref().as_bytes())?; + locales.push(locale); + } + Ok(locales) +} From 2806325f687674274628c6e50997cad51b47c9ba Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 01:04:35 -0500 Subject: [PATCH 178/306] Add rustdoc for locales module. --- deepwell/src/locales/mod.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/deepwell/src/locales/mod.rs b/deepwell/src/locales/mod.rs index f907951a0c..1726404aef 100644 --- a/deepwell/src/locales/mod.rs +++ b/deepwell/src/locales/mod.rs @@ -18,6 +18,15 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +//! This module concerns parsing and using the localization bundle `locales/`. +//! +//! The exposed structures and functions permit easy use of the bundle to +//! perform basic operations. +//! +//! General locale logic should _not_ go in here, consider what it does +//! and consider a location like `utils/locale.rs` or the service using +//! the code. + #![allow(unused_imports)] mod arguments; From 2dd003284be979f6ec99b9b33f0a5c309266a0ac Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 01:09:08 -0500 Subject: [PATCH 179/306] Augment rustdoc for locale parse. --- deepwell/src/utils/locale.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/deepwell/src/utils/locale.rs b/deepwell/src/utils/locale.rs index 477421a679..8a5f71a34e 100644 --- a/deepwell/src/utils/locale.rs +++ b/deepwell/src/utils/locale.rs @@ -22,6 +22,7 @@ use crate::services::{Error, Result}; use unic_langid::LanguageIdentifier; /// Ensure the given locale string is valid, returning the parsed locale. +/// If it is invalid, then the appropriate `Error` variant is returned. pub fn validate_locale(locale_str: &str) -> Result<LanguageIdentifier> { LanguageIdentifier::from_bytes(locale_str.as_bytes()).map_err(|error| { warn!("Invalid locale '{}' passed: {:?}", locale_str, error); From 4ad713f748c48f89763c0def0d56c0403238bb85 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 11 Feb 2025 01:12:01 -0500 Subject: [PATCH 180/306] Add handling for locale extraction in special error routes. --- deepwell/src/endpoints/special_error.rs | 22 ++++++++++++++++++++-- deepwell/src/services/special_error.rs | 1 + 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/deepwell/src/endpoints/special_error.rs b/deepwell/src/endpoints/special_error.rs index b90ddc4d28..5e5151f376 100644 --- a/deepwell/src/endpoints/special_error.rs +++ b/deepwell/src/endpoints/special_error.rs @@ -19,17 +19,35 @@ */ use super::prelude::*; +use crate::services::special_error::SpecialErrorService; +use crate::utils::parse_locales; pub async fn special_error_missing_site_slug( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result<String> { - todo!() + #[derive(Deserialize, Debug)] + struct Input { + locales: Vec<String>, + site_slug: String, + } + + let Input { locales, site_slug } = params.parse()?; + let locales = parse_locales(&locales)?; + SpecialErrorService::missing_site_slug(ctx, &locales, &site_slug).await } pub async fn special_error_missing_custom_domain( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result<String> { - todo!() + #[derive(Deserialize, Debug)] + struct Input { + locales: Vec<String>, + domain: String, + } + + let Input { locales, domain } = params.parse()?; + let locales = parse_locales(&locales)?; + SpecialErrorService::missing_custom_domain(ctx, &locales, &domain).await } diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index 22c7244b61..07107f0753 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -27,6 +27,7 @@ use super::prelude::*; use crate::utils::parse_locales; use fluent::{FluentArgs, FluentValue}; +use serde::Deserialize; use unic_langid::LanguageIdentifier; #[derive(Debug)] From fab634ab20e8aaf5455f8dd6620eaf64d2b0ffe6 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 12 Feb 2025 22:02:47 -0500 Subject: [PATCH 181/306] Start Accept-Language parsing, add special errors. --- wws/Cargo.lock | 7 +++++++ wws/Cargo.toml | 1 + wws/src/deepwell.rs | 36 ++++++++++++++++++++++++++++++++++++ wws/src/handler/mod.rs | 23 +++++++++++++++++++++++ 4 files changed, 67 insertions(+) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index e8ac07703c..415a977ace 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2,6 +2,12 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "accept-language" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f27d075294830fcab6f66e320dab524bc6d048f4a151698e153205559113772" + [[package]] name = "addr2line" version = "0.24.2" @@ -2689,6 +2695,7 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" name = "wws" version = "2025.2.6" dependencies = [ + "accept-language", "anyhow", "axum", "axum-extra", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index a55870585e..0260b09521 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -13,6 +13,7 @@ authors = ["Emmie Smith <emmie.maeda@gmail.com>"] edition = "2021" [dependencies] +accept-language = "3" anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } axum-extra = { version = "0.10", features = ["attachment"] } diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index cf8d953333..b8e5d1a7d0 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -162,6 +162,42 @@ impl Deepwell { let file_data: Option<FileData> = self.client.request("file_get", params).await?; Ok(file_data) } + + pub async fn get_special_error_missing_site_slug( + &self, + locales: &[&str], + site_slug: &str, + ) -> Result<String> { + let params = rpc_object! { + "locales" => locales, + "site_slug" => site_slug, + }; + + let html: String = self + .client + .request("special_error_missing_site_slug", params) + .await?; + + Ok(html) + } + + pub async fn get_special_error_missing_custom_domain( + &self, + locales: &[&str], + domain: &str, + ) -> Result<String> { + let params = rpc_object! { + "locales" => locales, + "domain" => domain, + }; + + let html: String = self + .client + .request("special_error_missing_custom_domain", params) + .await?; + + Ok(html) + } } #[derive(Debug, Clone)] diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 35acd11e16..4942ef941e 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -203,3 +203,26 @@ pub async fn handle_host_delegation( } } } + +/// Parse the `Accept-Language` header. +/// If there are no languages, or there is no header, then use English. +fn parse_accept_language(headers: &HeaderMap) -> Vec<String> { + fn get_header_value(headers: &HeaderMap) -> Option<&str> { + match headers.get("accept-language") { + Some(value) => value.to_str().ok(), + None => None, + } + } + + let header_value = match get_header_value(headers) { + Some(value) => value, + None => return vec![str!("en")], + }; + + let mut languages = accept_language::parse(header_value); + if languages.is_empty() { + languages.push(str!("en")); + } + + languages +} From 4232dd1634faf20a6ef2701fbc280cbd02cc5813 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 12 Feb 2025 22:19:51 -0500 Subject: [PATCH 182/306] Add assertions for locales. --- deepwell/src/services/special_error.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index 07107f0753..ea7b228ef5 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -40,6 +40,7 @@ impl SpecialErrorService { locales: &[LanguageIdentifier], site_slug: &str, ) -> Result<String> { + assert!(!locales.is_empty(), "No languages specified"); let config = ctx.config(); let mut args = FluentArgs::new(); args.set("slug", fluent_str!(site_slug)); @@ -59,6 +60,7 @@ impl SpecialErrorService { locales: &[LanguageIdentifier], domain: &str, ) -> Result<String> { + assert!(!locales.is_empty(), "No languages specified"); let config = ctx.config(); let mut args = FluentArgs::new(); args.set("custom_domain", fluent_str!(domain)); From 9c9982c92df7471ee9ba151eacd6e429e38e3468 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 12 Feb 2025 22:20:20 -0500 Subject: [PATCH 183/306] Move parse_accept_language() helper function. --- wws/src/handler/mod.rs | 46 +++++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 4942ef941e..cc9e49b2a9 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -78,6 +78,29 @@ fn get_site_info(headers: &HeaderMap) -> (i64, &str) { (site_id, site_slug) } +/// Parse the `Accept-Language` header. +/// If there are no languages, or there is no header, then use English. +fn parse_accept_language(headers: &HeaderMap) -> Vec<String> { + fn get_header_value(headers: &HeaderMap) -> Option<&str> { + match headers.get("accept-language") { + Some(value) => value.to_str().ok(), + None => None, + } + } + + let header_value = match get_header_value(headers) { + Some(value) => value, + None => return vec![str!("en")], + }; + + let mut languages = accept_language::parse(header_value); + if languages.is_empty() { + languages.push(str!("en")); + } + + languages +} + /// Entry route handler to first process host information. /// /// Before we can give this request to the right place, @@ -203,26 +226,3 @@ pub async fn handle_host_delegation( } } } - -/// Parse the `Accept-Language` header. -/// If there are no languages, or there is no header, then use English. -fn parse_accept_language(headers: &HeaderMap) -> Vec<String> { - fn get_header_value(headers: &HeaderMap) -> Option<&str> { - match headers.get("accept-language") { - Some(value) => value.to_str().ok(), - None => None, - } - } - - let header_value = match get_header_value(headers) { - Some(value) => value, - None => return vec![str!("en")], - }; - - let mut languages = accept_language::parse(header_value); - if languages.is_empty() { - languages.push(str!("en")); - } - - languages -} From f9c2d26568617a029f91c7cd0bb1a8cc5682b2b2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 12 Feb 2025 22:40:25 -0500 Subject: [PATCH 184/306] Set up special_error() helper function. --- wws/src/deepwell.rs | 4 ++-- wws/src/handler/mod.rs | 37 +++++++++++++++++++++++++++++++++---- 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index b8e5d1a7d0..fb2c70daf5 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -165,7 +165,7 @@ impl Deepwell { pub async fn get_special_error_missing_site_slug( &self, - locales: &[&str], + locales: &[String], site_slug: &str, ) -> Result<String> { let params = rpc_object! { @@ -183,7 +183,7 @@ impl Deepwell { pub async fn get_special_error_missing_custom_domain( &self, - locales: &[&str], + locales: &[String], domain: &str, ) -> Result<String> { let params = rpc_object! { diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index cc9e49b2a9..668c8eef7c 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -37,7 +37,7 @@ pub use self::robots::*; pub use self::well_known::*; use crate::{ - error::ServerErrorCode, + error::{Result, ServerErrorCode}, host::{lookup_host, SiteAndHost}, path::get_path, state::ServerState, @@ -46,9 +46,10 @@ use axum::{ body::Body, extract::Request, http::header::{HeaderMap, HeaderName}, - response::{IntoResponse, Redirect, Response}, + response::{Html, IntoResponse, Redirect, Response}, Router, }; +use std::future::Future; use tower::util::ServiceExt; pub const HEADER_SITE_ID: HeaderName = HeaderName::from_static("x-wikijump-site-id"); @@ -101,6 +102,22 @@ fn parse_accept_language(headers: &HeaderMap) -> Vec<String> { languages } +/// Helper function to return a special error response. +async fn special_error<F, Fut>(headers: &HeaderMap, f: F) -> Response +where + F: FnOnce(Vec<String>) -> Fut, + Fut: Future<Output = Result<String>>, +{ + let locales = parse_accept_language(headers); + match f(locales).await { + Ok(html) => Html(html).into_response(), + Err(error) => { + error!("Unable to get special error HTML: {error}"); + todo!() // TODO error/html return + } + } +} + /// Entry route handler to first process host information. /// /// Before we can give this request to the right place, @@ -213,7 +230,13 @@ pub async fn handle_host_delegation( site_slug = site_slug, "No such site with slug", ); - ServerErrorCode::SiteNotFound { site_slug }.into_response() + special_error(request.headers(), |locales| async move { + state + .deepwell + .get_special_error_missing_site_slug(&locales, site_slug) + .await + }) + .await } // Custom domain missing SiteAndHost::MissingCustomDomain { ref domain } => { @@ -222,7 +245,13 @@ pub async fn handle_host_delegation( domain = domain, "No such site with custom domain", ); - ServerErrorCode::CustomDomainNotFound { domain }.into_response() + special_error(request.headers(), |locales| async move { + state + .deepwell + .get_special_error_missing_custom_domain(&locales, domain) + .await + }) + .await } } } From e980bcf34639c1f939ddffaf431a155553afb362 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 12 Feb 2025 22:48:44 -0500 Subject: [PATCH 185/306] Add deepwell DeepwellFailure error case. --- deepwell/src/services/error.rs | 12 ++++++++---- wws/src/error/html.rs | 9 +++++---- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index e15da7bb88..b520a0fb2f 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -307,6 +307,9 @@ pub enum Error { #[error("The web server failed to process the request")] WebServerFailure, + #[error("The web server did not get a successful DEEPWELL response")] + DeepwellFailure, + #[error("The web server cannot fetch site information")] SiteFetch, @@ -455,10 +458,11 @@ impl Error { // // WebServerFailure is pretty general, avoid using it if possible. Error::WebServerFailure => 6000, - Error::SiteFetch => 6001, - Error::PageFetch => 6002, - Error::FileFetch => 6003, - Error::BlobFetch => 6004, + Error::DeepwellFailure => 6001, + Error::SiteFetch => 6002, + Error::PageFetch => 6003, + Error::FileFetch => 6004, + Error::BlobFetch => 6005, } } diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index b98211eac6..6eeade6c88 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -94,10 +94,11 @@ impl ServerErrorCode<'_> { ServerErrorCode::CustomDomainNotFound { .. } => 2013, ServerErrorCode::PageNotFound { .. } => 2005, ServerErrorCode::FileNotFound { .. } => 2009, - ServerErrorCode::SiteFetch { .. } => 6001, - ServerErrorCode::PageFetch { .. } => 6002, - ServerErrorCode::FileFetch { .. } => 6003, - ServerErrorCode::BlobFetch { .. } => 6004, + ServerErrorCode::DeepwellFailure => 6001, + ServerErrorCode::SiteFetch { .. } => 6002, + ServerErrorCode::PageFetch { .. } => 6003, + ServerErrorCode::FileFetch { .. } => 6004, + ServerErrorCode::BlobFetch { .. } => 6005, } } From 37cdf4cdd01cea393c1ceb7856e5519f573e3bd0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Thu, 13 Feb 2025 23:52:29 -0500 Subject: [PATCH 186/306] Add error case for severe issues. --- wws/src/error/html.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index 6eeade6c88..a0caafac27 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -62,6 +62,7 @@ pub enum ServerErrorCode<'a> { page_id: i64, filename: &'a str, }, + DeepwellFailure, SiteFetch { domain: &'a str, }, @@ -109,7 +110,8 @@ impl ServerErrorCode<'_> { | ServerErrorCode::CustomDomainNotFound { .. } | ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => StatusCode::NOT_FOUND, - ServerErrorCode::SiteFetch { .. } + ServerErrorCode::DeepwellFailure + | ServerErrorCode::SiteFetch { .. } | ServerErrorCode::PageFetch { .. } | ServerErrorCode::FileFetch { .. } | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, @@ -124,6 +126,7 @@ impl ServerErrorCode<'_> { } ServerErrorCode::PageNotFound { .. } => "Page not found", ServerErrorCode::FileNotFound { .. } => "File not found", + ServerErrorCode::DeepwellFailure => "Server error", ServerErrorCode::SiteFetch { .. } => "Cannot load site information", ServerErrorCode::PageFetch { .. } => "Cannot load page", ServerErrorCode::FileFetch { .. } => "Cannot load file", @@ -178,6 +181,9 @@ impl ServerErrorCode<'_> { site_id, ); } + ServerErrorCode::DeepwellFailure => { + str_write!(body, "Fatal: Cannot process request from backend server"); + } ServerErrorCode::SiteFetch { domain } => { str_write!( body, From 26ea1a5444a92dd2c47ebc1ed063c872c36eb7c3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 01:41:04 -0500 Subject: [PATCH 187/306] Use new error in special_error. --- wws/src/handler/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 668c8eef7c..bc05e1704f 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -113,7 +113,7 @@ where Ok(html) => Html(html).into_response(), Err(error) => { error!("Unable to get special error HTML: {error}"); - todo!() // TODO error/html return + ServerErrorCode::DeepwellFailure.into_response() } } } From cf47e18629091c62318a9c7d60ff2d27c21e4279 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 01:46:44 -0500 Subject: [PATCH 188/306] Remove unused error cases, now using SpecialErrorService. --- wws/src/error/html.rs | 32 +++----------------------------- 1 file changed, 3 insertions(+), 29 deletions(-) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index a0caafac27..9c8fef46c8 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -47,12 +47,6 @@ const HTML_END: &str = "</body></html>"; /// These must match the corresponding errors in deepwell (`src/service/error.rs`) #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ServerErrorCode<'a> { - SiteNotFound { - site_slug: &'a str, - }, - CustomDomainNotFound { - domain: &'a str, - }, PageNotFound { site_id: i64, page_slug: &'a str, @@ -91,8 +85,6 @@ impl ServerErrorCode<'_> { /// the same type (`i32`) is used here as in DEEPWELL. pub fn error_code(self) -> i32 { match self { - ServerErrorCode::SiteNotFound { .. } => 2004, - ServerErrorCode::CustomDomainNotFound { .. } => 2013, ServerErrorCode::PageNotFound { .. } => 2005, ServerErrorCode::FileNotFound { .. } => 2009, ServerErrorCode::DeepwellFailure => 6001, @@ -106,10 +98,9 @@ impl ServerErrorCode<'_> { /// Returns the HTTP status code for this error. pub fn status_code(self) -> StatusCode { match self { - ServerErrorCode::SiteNotFound { .. } - | ServerErrorCode::CustomDomainNotFound { .. } - | ServerErrorCode::PageNotFound { .. } - | ServerErrorCode::FileNotFound { .. } => StatusCode::NOT_FOUND, + ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => { + StatusCode::NOT_FOUND + } ServerErrorCode::DeepwellFailure | ServerErrorCode::SiteFetch { .. } | ServerErrorCode::PageFetch { .. } @@ -121,9 +112,6 @@ impl ServerErrorCode<'_> { /// Returns the HTML title for this error. fn title(self) -> &'static str { match self { - ServerErrorCode::SiteNotFound { .. } | ServerErrorCode::CustomDomainNotFound { .. } => { - "Site not found" - } ServerErrorCode::PageNotFound { .. } => "Page not found", ServerErrorCode::FileNotFound { .. } => "File not found", ServerErrorCode::DeepwellFailure => "Server error", @@ -146,20 +134,6 @@ impl ServerErrorCode<'_> { // Write error body match self { - ServerErrorCode::SiteNotFound { site_slug } => { - str_write!( - body, - "No site exists at \"<code>{}</code>\".", - html_escape(site_slug), - ) - } - ServerErrorCode::CustomDomainNotFound { domain } => { - str_write!( - body, - "No site exists with the custom domain \"<code>{}</code>\".", - html_escape(domain), - ) - } ServerErrorCode::PageNotFound { site_id, page_slug } => { str_write!( body, From baa02fdb4cc14da38fe5542c190fed6b71329b7a Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 02:13:46 -0500 Subject: [PATCH 189/306] Move arg order for special error case. --- deepwell/src/services/special_error.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index ea7b228ef5..c88e0ab35d 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -43,9 +43,9 @@ impl SpecialErrorService { assert!(!locales.is_empty(), "No languages specified"); let config = ctx.config(); let mut args = FluentArgs::new(); - args.set("slug", fluent_str!(site_slug)); args.set("main_domain", fluent_str!(config.main_domain_no_dot)); args.set("files_domain", fluent_str!(config.files_domain_no_dot)); + args.set("slug", fluent_str!(site_slug)); let html = ctx.localization() @@ -63,9 +63,9 @@ impl SpecialErrorService { assert!(!locales.is_empty(), "No languages specified"); let config = ctx.config(); let mut args = FluentArgs::new(); - args.set("custom_domain", fluent_str!(domain)); args.set("main_domain", fluent_str!(config.main_domain_no_dot)); args.set("files_domain", fluent_str!(config.files_domain_no_dot)); + args.set("custom_domain", fluent_str!(domain)); let html = ctx.localization() From c26d605f39e7ada9000d6af9e08918c2efae21a0 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 03:16:58 -0500 Subject: [PATCH 190/306] Add new special error for issues with site/host fetch. --- deepwell/src/endpoints/special_error.rs | 15 +++++++++++++++ deepwell/src/services/special_error.rs | 24 ++++++++++++++++++++++-- locales/fluent/special-error/en.ftl | 11 ++++++++++- wws/src/handler/mod.rs | 8 +++++++- 4 files changed, 54 insertions(+), 4 deletions(-) diff --git a/deepwell/src/endpoints/special_error.rs b/deepwell/src/endpoints/special_error.rs index 5e5151f376..a9989c9cba 100644 --- a/deepwell/src/endpoints/special_error.rs +++ b/deepwell/src/endpoints/special_error.rs @@ -51,3 +51,18 @@ pub async fn special_error_missing_custom_domain( let locales = parse_locales(&locales)?; SpecialErrorService::missing_custom_domain(ctx, &locales, &domain).await } + +pub async fn special_error_site_fetch( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<String> { + #[derive(Deserialize, Debug)] + struct Input { + locales: Vec<String>, + domain: String, + } + + let Input { locales, domain } = params.parse()?; + let locales = parse_locales(&locales)?; + SpecialErrorService::site_fetch(ctx, &locales, &domain).await +} diff --git a/deepwell/src/services/special_error.rs b/deepwell/src/services/special_error.rs index c88e0ab35d..84ad06e6fd 100644 --- a/deepwell/src/services/special_error.rs +++ b/deepwell/src/services/special_error.rs @@ -34,7 +34,7 @@ use unic_langid::LanguageIdentifier; pub struct SpecialErrorService; impl SpecialErrorService { - /// Produce output for a canonical site that does not exist. + /// Error for when a canonical site does not exist. pub async fn missing_site_slug( ctx: &ServiceContext<'_>, locales: &[LanguageIdentifier], @@ -54,7 +54,7 @@ impl SpecialErrorService { Ok(html.to_string()) } - /// Produce output for a custom domain that does not exist. + /// Error for when a custom domain does not exist. pub async fn missing_custom_domain( ctx: &ServiceContext<'_>, locales: &[LanguageIdentifier], @@ -73,4 +73,24 @@ impl SpecialErrorService { Ok(html.to_string()) } + + /// Error for when fetching host information fails. + pub async fn site_fetch( + ctx: &ServiceContext<'_>, + locales: &[LanguageIdentifier], + domain: &str, + ) -> Result<String> { + assert!(!locales.is_empty(), "No languages specified"); + let config = ctx.config(); + let mut args = FluentArgs::new(); + args.set("main_domain", fluent_str!(config.main_domain_no_dot)); + args.set("files_domain", fluent_str!(config.files_domain_no_dot)); + args.set("domain", fluent_str!(domain)); + + let html = + ctx.localization() + .translate(locales, "special-error-site-fetch", &args)?; + + Ok(html.to_string()) + } } diff --git a/locales/fluent/special-error/en.ftl b/locales/fluent/special-error/en.ftl index aa29f22ba1..19ce30012a 100644 --- a/locales/fluent/special-error/en.ftl +++ b/locales/fluent/special-error/en.ftl @@ -3,7 +3,7 @@ special-error-site-slug = <h1>No { -service-name } site exists with this address.</h1> <p> <a href="https://{ $slug }.{ $domain }/">{ $slug }.{ $domain }</a> does not exist. - Return to <a href="https://{ $domain }/">{ -service-name }</a>. + Return to <a href="https://{ $main_domain }/">{ -service-name }</a>. </p> special-error-site-custom = <h1>No { -service-name } site exists with this address.</h1> @@ -11,3 +11,12 @@ special-error-site-custom = <h1>No { -service-name } site exists with this addre No site has the custom domain <a href="https://{ $custom_domain }/">{ $custom_domain }</a>. Return to <a href="https://{ $main_domain }/">{ -service-name }</a>. </p> + +special-error-site-fetch = <h1>Unable to fetch site information.</h1> + <p> + { -service-name } was unable to retrieve information about <code>{ $domain }</code>. + </p> + + <p> + Return to <a href="https://{ $main_domain }/">{ -service-name }</a>. + </p> diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index bc05e1704f..21078429a4 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -170,7 +170,13 @@ pub async fn handle_host_delegation( Ok(host_data) => host_data, Err(error) => { error!("Unable to fetch site/host information: {error}"); - return ServerErrorCode::SiteFetch { domain: &hostname }.into_response(); + special_error(request.headers(), |locales| async move { + state + .deepwell + .get_special_error_site_fetch(&locales, &hostname) + .await + }) + .await } }; From 0adbb64a25c57bfe579abf5806f1780993f5340d Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 03:26:42 -0500 Subject: [PATCH 191/306] Use new special error for SiteAndHost failure. --- wws/src/deepwell.rs | 18 ++++++++++++++++++ wws/src/error/html.rs | 13 ------------- wws/src/handler/mod.rs | 4 ++-- 3 files changed, 20 insertions(+), 15 deletions(-) diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index fb2c70daf5..1ea1428f28 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -198,6 +198,24 @@ impl Deepwell { Ok(html) } + + pub async fn get_special_error_site_fetch( + &self, + locales: &[String], + domain: &str, + ) -> Result<String> { + let params = rpc_object! { + "locales" => locales, + "domain" => domain, + }; + + let html: String = self + .client + .request("special_error_missing_custom_domain", params) + .await?; + + Ok(html) + } } #[derive(Debug, Clone)] diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index 9c8fef46c8..b5516ec99d 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -57,9 +57,6 @@ pub enum ServerErrorCode<'a> { filename: &'a str, }, DeepwellFailure, - SiteFetch { - domain: &'a str, - }, PageFetch { site_id: i64, page_slug: &'a str, @@ -88,7 +85,6 @@ impl ServerErrorCode<'_> { ServerErrorCode::PageNotFound { .. } => 2005, ServerErrorCode::FileNotFound { .. } => 2009, ServerErrorCode::DeepwellFailure => 6001, - ServerErrorCode::SiteFetch { .. } => 6002, ServerErrorCode::PageFetch { .. } => 6003, ServerErrorCode::FileFetch { .. } => 6004, ServerErrorCode::BlobFetch { .. } => 6005, @@ -102,7 +98,6 @@ impl ServerErrorCode<'_> { StatusCode::NOT_FOUND } ServerErrorCode::DeepwellFailure - | ServerErrorCode::SiteFetch { .. } | ServerErrorCode::PageFetch { .. } | ServerErrorCode::FileFetch { .. } | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, @@ -115,7 +110,6 @@ impl ServerErrorCode<'_> { ServerErrorCode::PageNotFound { .. } => "Page not found", ServerErrorCode::FileNotFound { .. } => "File not found", ServerErrorCode::DeepwellFailure => "Server error", - ServerErrorCode::SiteFetch { .. } => "Cannot load site information", ServerErrorCode::PageFetch { .. } => "Cannot load page", ServerErrorCode::FileFetch { .. } => "Cannot load file", ServerErrorCode::BlobFetch { .. } => "Cannot load file data", @@ -158,13 +152,6 @@ impl ServerErrorCode<'_> { ServerErrorCode::DeepwellFailure => { str_write!(body, "Fatal: Cannot process request from backend server"); } - ServerErrorCode::SiteFetch { domain } => { - str_write!( - body, - "Cannot load site information for domain \"<code>{}</code>\".", - html_escape(domain), - ); - } ServerErrorCode::PageFetch { site_id, page_slug } => { str_write!( body, diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 21078429a4..d31088a668 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -170,13 +170,13 @@ pub async fn handle_host_delegation( Ok(host_data) => host_data, Err(error) => { error!("Unable to fetch site/host information: {error}"); - special_error(request.headers(), |locales| async move { + return special_error(request.headers(), |locales| async move { state .deepwell .get_special_error_site_fetch(&locales, &hostname) .await }) - .await + .await; } }; From 198f66227bce073e552981c75bc331b296403a9a Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 03:37:16 -0500 Subject: [PATCH 192/306] Add note about future use of ServerErrorCode. --- wws/src/error/html.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs index b5516ec99d..55f6a47844 100644 --- a/wws/src/error/html.rs +++ b/wws/src/error/html.rs @@ -28,6 +28,10 @@ //! This is very basic HTML generation. If we need to do anything //! more fancy in the future, then feel free to replace this with //! something better. +//! +//! Alternatively, you may want to move these error cases to +//! `SpecialErrorService` in DEEPWELL, where they can benefit +//! from localization. use axum::{ body::Body, From 1061c69d442d92178988a9c5b02d2d7c05fd7c36 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 13:55:06 -0500 Subject: [PATCH 193/306] Register new special error method. --- deepwell/src/api.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index c68ffd74fe..4abe3edbf8 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -204,6 +204,7 @@ async fn build_module(app_state: ServerState) -> anyhow::Result<RpcModule<Server "special_error_missing_custom_domain", special_error_missing_custom_domain, ); + register!("special_error_site_fetch", special_error_site_fetch); // Authentication register!("login", auth_login); From 5bfcfec4596b7af12a3d47da5516e9349e48753b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 14:01:04 -0500 Subject: [PATCH 194/306] Fix runRedirect for no domain in framerail. --- framerail/src/lib/server/load/page.ts | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/framerail/src/lib/server/load/page.ts b/framerail/src/lib/server/load/page.ts index abf20b6192..e819e1e529 100644 --- a/framerail/src/lib/server/load/page.ts +++ b/framerail/src/lib/server/load/page.ts @@ -178,7 +178,7 @@ export async function loadPage( // TODO remove checkRedirect when errorStatus is fixed if (checkRedirect) { - runRedirect(viewData, domain, slug, extra) + runRedirect(viewData, slug, extra) } // Return to page for rendering @@ -187,7 +187,6 @@ export async function loadPage( function runRedirect( viewData, - originalDomain: string, originalSlug: Optional<string>, extra: Optional<string> ): void { @@ -196,10 +195,9 @@ function runRedirect( return } - const domain: string = viewData.redirectSite || originalDomain const slug: Optional<string> = viewData.redirectPage || originalSlug const route: string = buildRoute(slug, extra) - redirect(308, `https://${domain}/${route}`) + redirect(308, `/${route}`) } function buildRoute(slug: Optional<string>, extra: Optional<string>): string { From 0b144c60bc17b0650577ac0f1a868ed848809277 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 14:03:50 -0500 Subject: [PATCH 195/306] Remove header X-Wikijump-Domain. Host information is already passed through. --- framerail/src/lib/server/load/site-info.ts | 3 --- wws/src/handler/mod.rs | 8 +------- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/framerail/src/lib/server/load/site-info.ts b/framerail/src/lib/server/load/site-info.ts index cb282be9f8..5128bdfcfd 100644 --- a/framerail/src/lib/server/load/site-info.ts +++ b/framerail/src/lib/server/load/site-info.ts @@ -4,12 +4,10 @@ // If the headers are first set by the client, those values // get erased. -const DOMAIN_HEADER = "x-wikijump-domain" const SITE_ID_HEADER = "x-wikijump-site-id" const SITE_SLUG_HEADER = "x-wikijump-site-slug" export interface SiteInfo { - domain: string siteId: number siteSlug: string } @@ -24,7 +22,6 @@ function getHeader(headers: Headers, key: string): string { } export function loadSiteInfo(headers: Headers): SiteInfo { - const domain = getHeader(headers, DOMAIN_HEADER) const siteSlug = getHeader(headers, SITE_SLUG_HEADER) const siteId = parseInt(getHeader(headers, SITE_ID_HEADER)) return { domain, siteId, siteSlug } diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index d31088a668..5e46e8561e 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -54,7 +54,6 @@ use tower::util::ServiceExt; pub const HEADER_SITE_ID: HeaderName = HeaderName::from_static("x-wikijump-site-id"); pub const HEADER_SITE_SLUG: HeaderName = HeaderName::from_static("x-wikijump-site-slug"); -pub const HEADER_DOMAIN: HeaderName = HeaderName::from_static("x-wikijump-domain"); pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump"); pub const HEADER_WWS_VERSION: HeaderName = HeaderName::from_static("x-wikijump-wws-ver"); @@ -132,15 +131,10 @@ pub async fn handle_host_delegation( files_router: Router, ) -> Response { { - let headers = request.headers_mut(); - // Strip internal headers, just to be safe. + let headers = request.headers_mut(); headers.remove(HEADER_SITE_ID); headers.remove(HEADER_SITE_SLUG); - headers.remove(HEADER_DOMAIN); - - // Also add the domain header since that is the same before lookup_host() - headers.insert(HEADER_DOMAIN, header_value!(hostname)); } macro_rules! forward_request { From ab577ab5a9d2397f35b9cad14c817ed870b77668 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 14:26:23 -0500 Subject: [PATCH 196/306] Change --sudo short flag. --- install/local/deploy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/local/deploy.py b/install/local/deploy.py index 4fed1c3516..117a04e337 100755 --- a/install/local/deploy.py +++ b/install/local/deploy.py @@ -20,7 +20,7 @@ def chdir_wikijump(): # Parse arguments argparser = argparse.ArgumentParser() argparser.add_argument( - "-S", + "-s", "--sudo", action="store_true", help="Runs the docker-compose command using sudo", From 55c3c6df70effb06cb9b7e595ed514b4f2238378 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 15:20:44 -0500 Subject: [PATCH 197/306] Change teapot content-type to text/plain. --- wws/src/handler/misc.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index ed98996b1b..e64279cdbb 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -27,7 +27,7 @@ use axum::{ pub async fn handle_teapot() -> Response { Response::builder() .status(StatusCode::IM_A_TEAPOT) - .header(header::CONTENT_TYPE, "text/html; charset=utf-8") + .header(header::CONTENT_TYPE, "text/plain; charset=utf-8") .body(Body::from("🫖")) .expect("Unable to convert response data") } From 261f3b0af46e21c59c2c0f5c819ac9670aa4fe89 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 15:25:19 -0500 Subject: [PATCH 198/306] Add /-/health-check route. --- wws/src/handler/misc.rs | 25 ++++++++++++++++++++++--- wws/src/route.rs | 1 + 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index e64279cdbb..727714c38b 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -18,20 +18,39 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +use crate::state::ServerState; use axum::{ body::Body, + extract::State, http::{header, status::StatusCode}, response::Response, }; -pub async fn handle_teapot() -> Response { +fn text_response(body: &'static str, status: StatusCode) -> Response { Response::builder() - .status(StatusCode::IM_A_TEAPOT) + .status(status) .header(header::CONTENT_TYPE, "text/plain; charset=utf-8") - .body(Body::from("🫖")) + .body(Body::from(body)) .expect("Unable to convert response data") } +pub async fn handle_teapot() -> Response { + text_response("🫖", StatusCode::IM_A_TEAPOT) +} + +pub async fn handle_health_check(State(state): State<ServerState>) -> Response { + // DEEPWELL's ping ensures both Postgres and Redis are connected + match state.deepwell.ping().await { + Ok(()) => { + text_response("✅", StatusCode::OK) + } + Err(error) => { + error!("Unable to perform health check: {error}"); + text_response("❌", StatusCode::SERVICE_UNAVAILABLE) + } + } +} + pub async fn handle_invalid_method() -> StatusCode { StatusCode::METHOD_NOT_ALLOWED } diff --git a/wws/src/route.rs b/wws/src/route.rs index c73d8ddfd1..9233874dde 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -115,6 +115,7 @@ pub fn build_router(state: ServerState) -> Router { .route("/robots.txt", get(handle_robots_txt)) // TODO .route("/.well-known", any(handle_well_known)) // TODO .route("/-/teapot", any(handle_teapot)) + .route("/-/health-check", any(handle_health_check)) // Middleware .layer(TraceLayer::new_for_http()) .layer(NormalizePathLayer::trim_trailing_slash()) From 17c5726b6c355ef29d67729cc759d91fd4f6b64d Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 15:27:21 -0500 Subject: [PATCH 199/306] Remove extra newline. --- install/common/api/health-check.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/install/common/api/health-check.sh b/install/common/api/health-check.sh index f51ee1d76d..c7a5e7549b 100755 --- a/install/common/api/health-check.sh +++ b/install/common/api/health-check.sh @@ -1,3 +1,2 @@ #!/bin/sh - curl -i -X POST --json '{"jsonrpc":"2.0","method":"ping","id":0}' http://localhost:2747/jsonrpc From 40413b0a118db61d377741c32427bd0c95a0ad0f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 15:28:50 -0500 Subject: [PATCH 200/306] Create local wws Docker image. --- install/local/wws/Dockerfile | 25 +++++++++++++++++++++++++ install/local/wws/health-check.sh | 2 ++ install/local/wws/wws-start | 13 +++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 install/local/wws/Dockerfile create mode 100755 install/local/wws/health-check.sh create mode 100755 install/local/wws/wws-start diff --git a/install/local/wws/Dockerfile b/install/local/wws/Dockerfile new file mode 100644 index 0000000000..f101cff394 --- /dev/null +++ b/install/local/wws/Dockerfile @@ -0,0 +1,25 @@ +# +# WWS build +# + +# This image is modified for development, it retains the +# full rust container and rebuilds as needed, to ease +# iteration during development. + +FROM rust:latest AS rust + +# Install helpers +RUN cargo install cargo-watch sqlx-cli + +# Install files +COPY ./install/local/api/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/api/health-check.sh /usr/local/bin/wikijump-health-check + +# Copy source +# Don't build until container execution (see cargo-watch) +RUN mkdir /src +COPY ./wws /src/wws +WORKDIR /src/wws + +EXPOSE 2747 +CMD ["/usr/local/bin/wikijump-wws-start"] diff --git a/install/local/wws/health-check.sh b/install/local/wws/health-check.sh new file mode 100755 index 0000000000..07ecd67ac4 --- /dev/null +++ b/install/local/wws/health-check.sh @@ -0,0 +1,2 @@ +#!/bin/sh +curl -i http://localhost/-/health-check diff --git a/install/local/wws/wws-start b/install/local/wws/wws-start new file mode 100755 index 0000000000..47ac026ec9 --- /dev/null +++ b/install/local/wws/wws-start @@ -0,0 +1,13 @@ +#!/bin/sh +set -e + +# Start watcher +# +# This will recompile and restart the deepwell daemon on any source changes +# The commandline being run for the daemon is "wws". + +exec /usr/bin/env RUST_BACKTRACE=1 \ + /usr/local/cargo/bin/cargo watch \ + --why \ + -w /src/wws \ + -x run From e8be8b58cafe45207f0eef4dff0fa612863af075 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 15:43:57 -0500 Subject: [PATCH 201/306] Add TODO for proxy IP headers. --- wws/src/handler/framerail.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index ce152b2c01..e02bfc3346 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -29,6 +29,8 @@ use axum::{ pub async fn proxy_framerail(State(state): State<ServerState>, mut req: Request) -> Response { let path = get_path(req.uri()); info!(path = path, "Proxying request to framerail"); + + // TODO add X-Real-IP and X-Forwarded-For *req.uri_mut() = state.framerail.proxy_uri(path); state From 1b7191ea08aa2a2dfd4be5772fbd9ceba4ccdd6c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 16:08:40 -0500 Subject: [PATCH 202/306] Replace nginx container with wws. --- install/local/docker-compose.dev.yaml | 15 +++ install/local/docker-compose.yaml | 16 +-- install/local/nginx/Dockerfile | 38 ------- install/local/nginx/ffdhe2048.txt | 8 -- install/local/nginx/gzip.conf | 28 ------ install/local/nginx/nginx.conf | 98 ------------------- install/local/nginx/proxy.conf | 12 --- install/local/nginx/ssl.conf | 4 - install/local/nginx/web.conf | 6 -- install/local/{wws => router}/Dockerfile | 3 +- install/local/{wws => router}/health-check.sh | 0 install/local/{wws => router}/wws-start | 0 12 files changed, 26 insertions(+), 202 deletions(-) delete mode 100644 install/local/nginx/Dockerfile delete mode 100644 install/local/nginx/ffdhe2048.txt delete mode 100644 install/local/nginx/gzip.conf delete mode 100644 install/local/nginx/nginx.conf delete mode 100644 install/local/nginx/proxy.conf delete mode 100644 install/local/nginx/ssl.conf delete mode 100644 install/local/nginx/web.conf rename install/local/{wws => router}/Dockerfile (75%) rename install/local/{wws => router}/health-check.sh (100%) rename install/local/{wws => router}/wws-start (100%) diff --git a/install/local/docker-compose.dev.yaml b/install/local/docker-compose.dev.yaml index 671ba16ef6..ac714feaa3 100644 --- a/install/local/docker-compose.dev.yaml +++ b/install/local/docker-compose.dev.yaml @@ -64,3 +64,18 @@ services: source: ../../assets target: /app/src/assets read_only: true + router: + volumes: + # Rust sources + - type: bind + source: ../../wws/src + target: /src/wws/src + read_only: true + - type: bind + source: ../../wws/build.rs + target: /src/wws/build.rs + read_only: true + - type: bind + source: ../../wws/Cargo.toml + target: /src/wws/Cargo.toml + read_only: true diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 5e7aa38e8d..c277ddfd97 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -98,18 +98,22 @@ services: timeout: 2s retries: 3 - nginx: + router: build: context: ../.. - dockerfile: install/local/nginx/Dockerfile + dockerfile: install/local/router/Dockerfile ports: - - "80:80" - - "443:443" + - "8080:80" links: + - api + - cache - web restart: always healthcheck: - test: ["CMD", "curl", "-If", "http://localhost/"] + test: ["CMD", "curl", "-If", "http://localhost/-/health-check"] interval: 120s - timeout: 1s + timeout: 2s retries: 3 + depends_on: + api: + condition: service_healthy diff --git a/install/local/nginx/Dockerfile b/install/local/nginx/Dockerfile deleted file mode 100644 index e6209cea42..0000000000 --- a/install/local/nginx/Dockerfile +++ /dev/null @@ -1,38 +0,0 @@ -# -# nginx reverse proxy -# - -FROM nginx:alpine - -RUN apk add --no-cache curl openssl - -# Add snake oil cert for local -RUN openssl req \ - -x509 \ - -verbose \ - -utf8 \ - -noenc \ - -sha256 \ - -days 365 \ - -newkey rsa:4096 \ - -out /etc/ssl/certs/nginx-selfsigned.crt \ - -keyout /etc/ssl/private/nginx-selfsigned.key \ - -subj "/CN=localhost" \ - -addext "subjectAltName=DNS:localhost,DNS:*.localhost" - -# Copy configuration files -COPY ./install/local/nginx/nginx.conf /etc/nginx/nginx.conf -COPY ./install/local/nginx/proxy.conf /etc/nginx/proxy.conf -COPY ./install/local/nginx/gzip.conf /etc/nginx/gzip.conf -COPY ./install/local/nginx/ffdhe2048.txt /etc/nginx/ffdhe2048.txt -COPY ./install/local/nginx/ssl.conf /etc/nginx/ssl.conf -COPY ./install/local/nginx/web.conf /etc/nginx/web.conf - -# Verify configuration -RUN nginx -t - -# Delete docker-entrypoint, this is a custom image -RUN rm -rf /docker-entrypoint.d/ - -EXPOSE 80 -EXPOSE 443 diff --git a/install/local/nginx/ffdhe2048.txt b/install/local/nginx/ffdhe2048.txt deleted file mode 100644 index 088f9673dc..0000000000 --- a/install/local/nginx/ffdhe2048.txt +++ /dev/null @@ -1,8 +0,0 @@ ------BEGIN DH PARAMETERS----- -MIIBCAKCAQEA//////////+t+FRYortKmq/cViAnPTzx2LnFg84tNpWp4TZBFGQz -+8yTnc4kmz75fS/jY2MMddj2gbICrsRhetPfHtXV/WVhJDP1H18GbtCFY2VVPe0a -87VXE15/V8k1mE8McODmi3fipona8+/och3xWKE2rec1MKzKT0g6eXq8CrGCsyT7 -YdEIqUuyyOP7uWrat2DX9GgdT0Kj3jlN9K5W7edjcrsZCwenyO4KbXCeAvzhzffi -7MA0BM0oNC9hkXL+nOmFg/+OTxIy7vKBg8P+OxtMb61zO7X8vC7CIAXFjvGDfRaD -ssbzSibBsu/6iGtCOGEoXJf//////////wIBAg== ------END DH PARAMETERS----- \ No newline at end of file diff --git a/install/local/nginx/gzip.conf b/install/local/nginx/gzip.conf deleted file mode 100644 index cb1959ad2c..0000000000 --- a/install/local/nginx/gzip.conf +++ /dev/null @@ -1,28 +0,0 @@ -# HTTP compression settings - -gzip on; -gzip_proxied any; -gzip_comp_level 6; -gzip_min_length 256; -gzip_types - application/atom+xml - application/geo+json - application/javascript - application/x-javascript - application/json - application/ld+json - application/manifest+json - application/rdf+xml - application/rss+xml - application/xhtml+xml - application/xml - application/wasm - application/octet-stream - font/eot - font/otf - font/ttf - image/svg+xml - text/css - text/javascript - text/plain - text/xml; diff --git a/install/local/nginx/nginx.conf b/install/local/nginx/nginx.conf deleted file mode 100644 index 73631ed5f7..0000000000 --- a/install/local/nginx/nginx.conf +++ /dev/null @@ -1,98 +0,0 @@ -worker_processes auto; -pid /run/nginx.pid; - -events { - worker_connections 4096; -} - -http { - include /etc/nginx/mime.types; - include /etc/nginx/proxy.conf; - include /etc/nginx/gzip.conf; # compression - - # Logging - - log_format main '$http_x_real_ip - $remote_user [$time_local] ' - '"$request" $status $body_bytes_sent "$http_referer" ' - '"$http_user_agent"'; - - log_format local '[$time_local] $status $request_method: $request_uri'; - - access_log /var/log/nginx/access.log main; - error_log /var/log/nginx/error.log; - - # Basic settings - - sendfile on; - tcp_nopush on; - tcp_nodelay on; - keepalive_timeout 70; - types_hash_max_size 2048; - - # SSL - - ssl_protocols TLSv1.2 TLSv1.3; - ssl_ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384; - ssl_prefer_server_ciphers off; - - include /etc/nginx/ssl.conf; - - ssl_session_timeout 1d; - ssl_session_cache shared:MozSSL:10m; - ssl_session_tickets off; - - ssl_dhparam /etc/nginx/ffdhe2048.txt; - - # HSTS - add_header Strict-Transport-Security "max-age=63072000" always; - - ssl_stapling on; - ssl_stapling_verify on; - - # Virtual Hosts - - # Redirect all HTTP -> HTTPS - server { - server_name _; - listen [::]:80; - listen 80; - - rewrite ^ https://$host$request_uri permanent; - } - - # Main handler - server { - server_name _; - listen [::]:443 ssl; - listen 443 ssl; - - # SEE ALSO: https://github.com/scpwiki/wikijump/blob/legacy-php/install/files/nginx.conf#L150 - location / { - # Redirects - rewrite ^/_admin(/.*)?$ /-/admin permanent; - rewrite ^/login$ /-/login permanent; - rewrite ^/logout$ /-/logout permanent; - rewrite ^/user:info/(.*)$ /-/user/$1 permanent; - rewrite ^/forum/start(.*)$ /forum permanent; - - # Reverse proxy - include /etc/nginx/web.conf; - } - } - - # Health check for Datadog - server { - server_name localhost; - listen 81; - - access_log off; - allow ::1; - allow 127.0.0.1; - deny all; - - location /nginx_status { - stub_status; - server_tokens on; - } - } -} diff --git a/install/local/nginx/proxy.conf b/install/local/nginx/proxy.conf deleted file mode 100644 index 648ac6c589..0000000000 --- a/install/local/nginx/proxy.conf +++ /dev/null @@ -1,12 +0,0 @@ -# General proxy settings - -proxy_redirect off; -proxy_set_header Host $host; -proxy_set_header X-Real-IP $remote_addr; -proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; -client_max_body_size 10m; -client_body_buffer_size 128k; -proxy_connect_timeout 90; -proxy_send_timeout 90; -proxy_read_timeout 90; -proxy_buffers 32 4k; diff --git a/install/local/nginx/ssl.conf b/install/local/nginx/ssl.conf deleted file mode 100644 index e30d4c9bd3..0000000000 --- a/install/local/nginx/ssl.conf +++ /dev/null @@ -1,4 +0,0 @@ -# Local TLS certificate - -ssl_certificate /etc/ssl/certs/nginx-selfsigned.crt; -ssl_certificate_key /etc/ssl/private/nginx-selfsigned.key; diff --git a/install/local/nginx/web.conf b/install/local/nginx/web.conf deleted file mode 100644 index 193f443348..0000000000 --- a/install/local/nginx/web.conf +++ /dev/null @@ -1,6 +0,0 @@ -# Location of web server - -# Use docker nameserver -# See https://stackoverflow.com/a/22259088 -resolver 127.0.0.11; -proxy_pass http://web:3000$request_uri; diff --git a/install/local/wws/Dockerfile b/install/local/router/Dockerfile similarity index 75% rename from install/local/wws/Dockerfile rename to install/local/router/Dockerfile index f101cff394..a191585747 100644 --- a/install/local/wws/Dockerfile +++ b/install/local/router/Dockerfile @@ -12,8 +12,7 @@ FROM rust:latest AS rust RUN cargo install cargo-watch sqlx-cli # Install files -COPY ./install/local/api/wws-start /usr/local/bin/wikijump-wws-start -COPY ./install/local/api/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start # Copy source # Don't build until container execution (see cargo-watch) diff --git a/install/local/wws/health-check.sh b/install/local/router/health-check.sh similarity index 100% rename from install/local/wws/health-check.sh rename to install/local/router/health-check.sh diff --git a/install/local/wws/wws-start b/install/local/router/wws-start similarity index 100% rename from install/local/wws/wws-start rename to install/local/router/wws-start From 462ab46be9f504f0b90928fafd1a6a554ba25330 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 16:40:15 -0500 Subject: [PATCH 203/306] Don't use debug logger level in deepwell.toml --- install/dev/api/deepwell.toml | 2 +- install/local/api/deepwell.toml | 2 +- install/prod/api/deepwell.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/install/dev/api/deepwell.toml b/install/dev/api/deepwell.toml index b842b4c716..6875a6b1c4 100644 --- a/install/dev/api/deepwell.toml +++ b/install/dev/api/deepwell.toml @@ -1,6 +1,6 @@ [logger] enable = true -level = "debug" +level = "info" [server] address = "[::]:2747" diff --git a/install/local/api/deepwell.toml b/install/local/api/deepwell.toml index 94d45d9dd7..c0817cf64d 100644 --- a/install/local/api/deepwell.toml +++ b/install/local/api/deepwell.toml @@ -1,6 +1,6 @@ [logger] enable = true -level = "debug" +level = "info" [server] address = "[::]:2747" diff --git a/install/prod/api/deepwell.toml b/install/prod/api/deepwell.toml index 8032e9ed06..4832bf65d0 100644 --- a/install/prod/api/deepwell.toml +++ b/install/prod/api/deepwell.toml @@ -1,6 +1,6 @@ [logger] enable = true -level = "debug" +level = "info" [server] address = "[::]:2747" From 481441b8bebc16d38226f9296275185b3a6488f5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 17:28:09 -0500 Subject: [PATCH 204/306] Add secrets for wws. --- install/local/docker-compose.yaml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index c277ddfd97..b9be8663c1 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -108,6 +108,16 @@ services: - api - cache - web + environment: + - "DEEPWELL_URL=http://api:2747" + - "REDIS_URL=redis://cache" + - "FRAMERAIL_HOST=web:3000" + - "S3_BUCKET=deepwell-files" + - "S3_REGION_NAME=local" + - "S3_PATH_STYLE=true" + - "S3_CUSTOM_ENDPOINT=http://files:9000" + - "S3_ACCESS_KEY_ID=minio" + - "S3_SECRET_ACCESS_KEY=defaultpassword" restart: always healthcheck: test: ["CMD", "curl", "-If", "http://localhost/-/health-check"] From 13b166c71e71100e55af9df12ba5adcfbbbb5c84 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 17:32:51 -0500 Subject: [PATCH 205/306] Add explicit docker dependencies for wws. --- install/local/docker-compose.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index b9be8663c1..72ce6003b0 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -127,3 +127,7 @@ services: depends_on: api: condition: service_healthy + cache: + condition: service_healthy + files: + condition: service_healthy From 6501b7274df1ddaa4520efc526e1fc7983c8a537 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 17:46:09 -0500 Subject: [PATCH 206/306] Add internal headers to framerail healthcheck. --- install/local/docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 72ce6003b0..54fca0ca5d 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -93,7 +93,7 @@ services: - "DEEPWELL_HOST=api" restart: always healthcheck: - test: ["CMD", "curl", "-If", "http://localhost:3000/"] + test: ["CMD", "curl", "-If", "-H", "x-wikijump-site-slug: www", "-H", "x-wikijump-site-id: 1", "http://localhost:3000/"] interval: 120s timeout: 2s retries: 3 From 29ed6fb4ce162a9fc8dbe0af2fc8e86c3dacdbb1 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 17:48:09 -0500 Subject: [PATCH 207/306] Add -f (fail on error) to deepwell health check. --- install/common/api/health-check.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/common/api/health-check.sh b/install/common/api/health-check.sh index c7a5e7549b..96bcd448e5 100755 --- a/install/common/api/health-check.sh +++ b/install/common/api/health-check.sh @@ -1,2 +1,2 @@ #!/bin/sh -curl -i -X POST --json '{"jsonrpc":"2.0","method":"ping","id":0}' http://localhost:2747/jsonrpc +curl -if -X POST --json '{"jsonrpc":"2.0","method":"ping","id":0}' http://localhost:2747/jsonrpc From 69cda07909f09c1812ae57f8e474531a8aaaf865 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 17:51:31 -0500 Subject: [PATCH 208/306] Add file for framerail health check. --- install/common/router/health-check.sh | 6 ++++++ install/local/docker-compose.yaml | 4 ++-- install/local/router/Dockerfile | 1 + install/local/web/health-check.sh | 1 + 4 files changed, 10 insertions(+), 2 deletions(-) create mode 100755 install/common/router/health-check.sh create mode 120000 install/local/web/health-check.sh diff --git a/install/common/router/health-check.sh b/install/common/router/health-check.sh new file mode 100755 index 0000000000..d6a102fef9 --- /dev/null +++ b/install/common/router/health-check.sh @@ -0,0 +1,6 @@ +#!/bin/sh +curl \ + -If \ + -H 'x-wikijump-site-slug: www' \ + -H 'x-wikijump-site-id: 1' \ + http://localhost:3000/ diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 54fca0ca5d..c7c9d41bb3 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -69,7 +69,7 @@ services: - "S3_SECRET_ACCESS_KEY=defaultpassword" restart: always healthcheck: - test: ["CMD", "/bin/wikijump-health-check"] + test: ["CMD", "wikijump-health-check"] interval: 120s timeout: 2s retries: 3 @@ -93,7 +93,7 @@ services: - "DEEPWELL_HOST=api" restart: always healthcheck: - test: ["CMD", "curl", "-If", "-H", "x-wikijump-site-slug: www", "-H", "x-wikijump-site-id: 1", "http://localhost:3000/"] + test: ["CMD", "wikijump-health-check"] interval: 120s timeout: 2s retries: 3 diff --git a/install/local/router/Dockerfile b/install/local/router/Dockerfile index a191585747..3cc606ab26 100644 --- a/install/local/router/Dockerfile +++ b/install/local/router/Dockerfile @@ -13,6 +13,7 @@ RUN cargo install cargo-watch sqlx-cli # Install files COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/router/health-check.sh /usr/local/bin/wikijump-health-check # Copy source # Don't build until container execution (see cargo-watch) diff --git a/install/local/web/health-check.sh b/install/local/web/health-check.sh new file mode 120000 index 0000000000..63d5f38b8a --- /dev/null +++ b/install/local/web/health-check.sh @@ -0,0 +1 @@ +../../common/router/health-check.sh \ No newline at end of file From 85f14cf3471b87137f9ef9036520b1b5dda9aee9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 18:30:14 -0500 Subject: [PATCH 209/306] Fix port forwarding. --- install/local/docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index c7c9d41bb3..74fddd5b59 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -103,7 +103,7 @@ services: context: ../.. dockerfile: install/local/router/Dockerfile ports: - - "8080:80" + - "80:8080" links: - api - cache From 2f42ff71750db5fb18231add2b62b736df57d1fd Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 18:40:04 -0500 Subject: [PATCH 210/306] Add TODO for special error HTML. --- wws/src/handler/mod.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 5e46e8561e..76eea46156 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -109,6 +109,7 @@ where { let locales = parse_accept_language(headers); match f(locales).await { + // TODO wrap HTML output into body Ok(html) => Html(html).into_response(), Err(error) => { error!("Unable to get special error HTML: {error}"); From 7c27b3dc7dd405e513308d15fd2a8749d1680f40 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 18:42:47 -0500 Subject: [PATCH 211/306] Run rustfmt. --- wws/src/handler/misc.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index 727714c38b..7b47b7b821 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -41,9 +41,7 @@ pub async fn handle_teapot() -> Response { pub async fn handle_health_check(State(state): State<ServerState>) -> Response { // DEEPWELL's ping ensures both Postgres and Redis are connected match state.deepwell.ping().await { - Ok(()) => { - text_response("✅", StatusCode::OK) - } + Ok(()) => text_response("✅", StatusCode::OK), Err(error) => { error!("Unable to perform health check: {error}"); text_response("❌", StatusCode::SERVICE_UNAVAILABLE) From 67ecf4d52be0b65050f1679198e9b3e7145e22bd Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 19:01:20 -0500 Subject: [PATCH 212/306] Handle partial field setting in redis. --- wws/src/cache.rs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index 91e7ff022b..c65754fb35 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -39,6 +39,14 @@ macro_rules! hset { }; } +macro_rules! hset_opt { + ($conn:expr, $key:expr, $field:expr, $value:expr $(,)?) => { + if let Some(value) = $value { + hset!($conn, $key, $field, value) + } + }; +} + macro_rules! hdel { ($conn:expr, $key:expr, $field:expr $(,)?) => { $conn.hdel::<_, _, ()>(&$key, $field).await? @@ -136,9 +144,9 @@ impl Cache { }; hset!(conn, key, "variant", variant); - hset!(conn, key, "id", site_id); - hset!(conn, key, "slug", slug); - hset!(conn, key, "domain", domain); + hset_opt!(conn, key, "id", site_id); + hset_opt!(conn, key, "slug", slug); + hset_opt!(conn, key, "domain", domain); Ok(()) } From 435a7e49ed2c3571fe7686c4fa832270880223d8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 21:12:57 -0500 Subject: [PATCH 213/306] Catch miss variant. --- wws/src/cache.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/wws/src/cache.rs b/wws/src/cache.rs index c65754fb35..d136d699e8 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -129,7 +129,9 @@ impl Cache { SiteAndHost::MainSite { site_id, site_slug } => { ("site_found", Some(*site_id), Some(site_slug), Some(domain)) } - SiteAndHost::MainSiteRedirect { domain } => ("site_redirect", None, None, Some(domain)), + SiteAndHost::MainSiteRedirect { domain } => { + ("main_site_redirect", None, None, Some(domain)) + } SiteAndHost::MissingSiteSlug { site_slug } => { ("missing_site_slug", None, Some(site_slug), None) } From 976993cd8d9605fc123a4b54258cb6c55e3109dd Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 14 Feb 2025 21:14:27 -0500 Subject: [PATCH 214/306] Prefer Self to service name. --- deepwell/src/services/domain/service.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 11f6cb74cd..f79e89e519 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -136,9 +136,7 @@ impl DomainService { macro_rules! found { ($site:expr) => {{ let config = ctx.config(); - let preferred_domain = - DomainService::preferred_domain(config, &$site).into_owned(); - + let preferred_domain = Self::preferred_domain(config, &$site).into_owned(); if domain == &preferred_domain { let SiteModel { site_id, From 49dde46f9aab13c381551b462a11fc58b892ea55 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 18 Feb 2025 00:21:54 -0500 Subject: [PATCH 215/306] Set allowedHosts in vite config. --- framerail/vite.config.ts | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/framerail/vite.config.ts b/framerail/vite.config.ts index 9974e4b696..7d6240fb46 100644 --- a/framerail/vite.config.ts +++ b/framerail/vite.config.ts @@ -12,7 +12,19 @@ const config: UserConfig = { server: { host: "::", port: 3000, - strictPort: true + strictPort: true, + + // This setting was added to avoid a security issue: + // https://github.com/vitejs/vite/security/advisories/GHSA-vg6x-rcgg-rjx6 + // + // Normally this should be a list but setting it to "true" disables the check. + // After discussion, this is acceptable because: + // 1. Vite is only used in development, not in deployed instances. + // 2. In the stack, wws receives requests and reverse proxies them + // to framerail. This performs a domain lookup to get site information, + // so hostile domains cannot utilize this exception since they are not + // in the site_domain table. Essentially, wws acts as "allowedHosts" for us. + allowedHosts: true }, plugins: [sveltekit()], define: { From 5b025df5dd1f66b784460bb3098fb7802043274c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sat, 22 Feb 2025 17:50:57 -0500 Subject: [PATCH 216/306] Remove missing local variable. --- framerail/src/lib/server/load/site-info.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/framerail/src/lib/server/load/site-info.ts b/framerail/src/lib/server/load/site-info.ts index 5128bdfcfd..ad63e1b9e2 100644 --- a/framerail/src/lib/server/load/site-info.ts +++ b/framerail/src/lib/server/load/site-info.ts @@ -24,5 +24,5 @@ function getHeader(headers: Headers, key: string): string { export function loadSiteInfo(headers: Headers): SiteInfo { const siteSlug = getHeader(headers, SITE_SLUG_HEADER) const siteId = parseInt(getHeader(headers, SITE_ID_HEADER)) - return { domain, siteId, siteSlug } + return { siteId, siteSlug } } From c400c7e9e0f68720ffc0be45f83432dec86a7972 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 18:41:01 -0500 Subject: [PATCH 217/306] Add client_ip_source to configuration. --- install/local/docker-compose.yaml | 1 + wws/.env.example | 14 ++++++++++++++ wws/Cargo.lock | 28 ++++++++++++++++++++++++++++ wws/Cargo.toml | 1 + wws/src/config/mod.rs | 9 +++++++++ wws/src/config/secrets.rs | 20 ++++++++++++++++++++ wws/src/route.rs | 1 + wws/src/state.rs | 30 ++++++++++++++++++++---------- 8 files changed, 94 insertions(+), 10 deletions(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 74fddd5b59..94e915c168 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -118,6 +118,7 @@ services: - "S3_CUSTOM_ENDPOINT=http://files:9000" - "S3_ACCESS_KEY_ID=minio" - "S3_SECRET_ACCESS_KEY=defaultpassword" + - "CLIENT_IP_SOURCE=ConnectInfo" restart: always healthcheck: test: ["CMD", "curl", "-If", "http://localhost/-/health-check"] diff --git a/wws/.env.example b/wws/.env.example index 683aeb80b5..063e239e64 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -35,4 +35,18 @@ S3_SECRET_ACCESS_KEY= # But don't include both. AWS_PROFILE_NAME=wikijump +# How client IP addresses are determined. +# See https://docs.rs/axum-client-ip/latest/axum_client_ip/enum.SecureClientIpSource.html +# +# Must have one of these values: +# - RightmostForwarded +# - RightmostXForwardedFor +# - XRealIp +# - FlyClientIp +# - TrueClientIp +# - CfConnectingIp +# - ConnectInfo +# - CloudFrontViewerAddress +CLIENT_IP_SOURCE=XRealIp + # vim: set ft=sh: diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 415a977ace..b95db9cc89 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -219,6 +219,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "axum-client-ip" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff8ee1869817523c8f91c20bf17fd932707f66c2e7e0b0f811b29a227289562" +dependencies = [ + "axum", + "forwarded-header-value", + "serde", +] + [[package]] name = "axum-core" version = "0.5.0" @@ -584,6 +595,16 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "forwarded-header-value" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8835f84f38484cc86f110a805655697908257fb9a7af005234060891557198e9" +dependencies = [ + "nonempty", + "thiserror 1.0.69", +] + [[package]] name = "futures" version = "0.3.31" @@ -1326,6 +1347,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "nonempty" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9e591e719385e6ebaeb5ce5d3887f7d5676fceca6411d1925ccc95745f3d6f7" + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -2698,6 +2725,7 @@ dependencies = [ "accept-language", "anyhow", "axum", + "axum-client-ip", "axum-extra", "built", "clap", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 0260b09521..4f533db9f7 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -16,6 +16,7 @@ edition = "2021" accept-language = "3" anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } +axum-client-ip = "0.7" axum-extra = { version = "0.10", features = ["attachment"] } clap = "4" color-backtrace = "0.6" diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 76e3429f7d..2cee8e8db6 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -124,6 +124,14 @@ pub fn load_config() -> (Config, Secrets) { } }; + let client_ip_source = match get_env!("CLIENT_IP_SOURCE").parse() { + Ok(ip_source) => ip_source, + Err(_) => { + eprintln!("CLIENT_IP_SOURCE variable does not have a valid enum value"); + process::exit(1); + } + }; + // Build and return let config = Config { enable_trace, @@ -139,6 +147,7 @@ pub fn load_config() -> (Config, Secrets) { s3_region, s3_path_style, s3_credentials, + client_ip_source, }; (config, secrets) diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index fcf9e8ace1..c80461e5a0 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -18,6 +18,7 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +use axum_client_ip::SecureClientIpSource; use s3::{creds::Credentials, region::Region}; #[derive(Debug, Clone)] @@ -62,4 +63,23 @@ pub struct Secrets { /// Alternatively you can have it read from the AWS credentials file. /// The profile to read from can be set in the `AWS_PROFILE_NAME` environment variable. pub s3_credentials: Credentials, + + /// Specify how client IP addresses are determined. + /// + /// In the crate `axum-client-ip`, you need to specify hoow `SecureClientIp` sources its + /// information, since it depends on the exact stack your web application is in. + /// + /// Set using environment variable `CLIENT_IP_SOURCE`, must have one of the following values: + /// (see [`SecureClientIpSource`]) + /// * `RightmostForwarded` + /// * `RightmostXForwardedFor` + /// * `XRealIp` + /// * `FlyClientIp` + /// * `TrueClientIp` + /// * `CfConnectingIp` + /// * `ConnectInfo` + /// * `CloudFrontViewerAddress` + /// + /// [`SecureClientIpSource`]: https://docs.rs/axum-client-ip/latest/axum_client_ip/enum.SecureClientIpSource.html)) + pub client_ip_source: SecureClientIpSource, } diff --git a/wws/src/route.rs b/wws/src/route.rs index 9233874dde..c5853f3597 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -126,6 +126,7 @@ pub fn build_router(state: ServerState) -> Router { .br(true) .zstd(true), ) + .layer(state.client_ip_source.clone().into_extension()) .layer(SetResponseHeaderLayer::overriding( HEADER_IS_WIKIJUMP, Some(HeaderValue::from_static("1")), diff --git a/wws/src/state.rs b/wws/src/state.rs index b8efcb848d..24301da5ce 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -27,6 +27,7 @@ use crate::{ host::SiteAndHost, }; use axum::body::Body; +use axum_client_ip::SecureClientIpSource; use hyper_util::{ client::legacy::{connect::HttpConnector, Client as HyperClient}, rt::TokioExecutor, @@ -48,23 +49,31 @@ pub struct ServerStateInner { pub framerail: Framerail, pub cache: Cache, pub s3_bucket: Box<Bucket>, + pub client_ip_source: SecureClientIpSource, } -pub async fn build_server_state(secrets: Secrets) -> Result<ServerState> { - let framerail = Framerail::new(secrets.framerail_host); - let deepwell = Deepwell::connect(&secrets.deepwell_url)?; +pub async fn build_server_state( + Secrets { + framerail_host, + deepwell_url, + redis_url, + s3_bucket, + s3_region, + s3_credentials, + s3_path_style, + client_ip_source, + }: Secrets, +) -> Result<ServerState> { + let framerail = Framerail::new(framerail_host); + let deepwell = Deepwell::connect(&deepwell_url)?; deepwell.check().await; let domains = deepwell.domains().await?; - let cache = Cache::connect(&secrets.redis_url)?; + let cache = Cache::connect(&redis_url)?; let client = HyperClient::builder(TokioExecutor::new()).build(HttpConnector::new()); let s3_bucket = { - let mut bucket = Bucket::new( - &secrets.s3_bucket, - secrets.s3_region.clone(), - secrets.s3_credentials.clone(), - )?; + let mut bucket = Bucket::new(&s3_bucket, s3_region.clone(), s3_credentials.clone())?; - if secrets.s3_path_style { + if s3_path_style { bucket = bucket.with_path_style(); } @@ -79,6 +88,7 @@ pub async fn build_server_state(secrets: Secrets) -> Result<ServerState> { framerail, cache, s3_bucket, + client_ip_source, })) } From 006df5cac034144e1953b355d4ec37cee3e19090 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 18:51:54 -0500 Subject: [PATCH 218/306] Fix ConnectInfo retrieval. --- wws/src/main.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/wws/src/main.rs b/wws/src/main.rs index a0dac764b6..cd4cec9a96 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -52,6 +52,7 @@ use self::trace::setup_tracing; use anyhow::Result; use std::fs::File; use std::io::Write; +use std::net::SocketAddr; use std::process; use tokio::net::TcpListener; @@ -73,7 +74,7 @@ async fn main() -> Result<()> { // Connect to services, build server state and then run let state = build_server_state(secrets).await?; - let app = build_router(state); + let app = build_router(state).into_make_service_with_connect_info::<SocketAddr>(); let listener = TcpListener::bind(config.address).await?; // Begin listening From 8a48ec2bb0e103f6350447b9c1978fe6e1b4977c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 20:21:17 -0500 Subject: [PATCH 219/306] Use SecureClientIp to add internal header. --- wws/src/handler/framerail.rs | 1 - wws/src/handler/mod.rs | 7 ++++++- wws/src/route.rs | 11 +++++++---- 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/wws/src/handler/framerail.rs b/wws/src/handler/framerail.rs index e02bfc3346..4390257e44 100644 --- a/wws/src/handler/framerail.rs +++ b/wws/src/handler/framerail.rs @@ -30,7 +30,6 @@ pub async fn proxy_framerail(State(state): State<ServerState>, mut req: Request) let path = get_path(req.uri()); info!(path = path, "Proxying request to framerail"); - // TODO add X-Real-IP and X-Forwarded-For *req.uri_mut() = state.framerail.proxy_uri(path); state diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 76eea46156..12e30ff729 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -49,7 +49,7 @@ use axum::{ response::{Html, IntoResponse, Redirect, Response}, Router, }; -use std::future::Future; +use std::{future::Future, net::IpAddr}; use tower::util::ServiceExt; pub const HEADER_SITE_ID: HeaderName = HeaderName::from_static("x-wikijump-site-id"); @@ -59,6 +59,8 @@ pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump") pub const HEADER_WWS_VERSION: HeaderName = HeaderName::from_static("x-wikijump-wws-ver"); pub const HEADER_DEEPWELL_VERSION: HeaderName = HeaderName::from_static("x-wikijump-deepwell-ver"); +pub const HEADER_X_REAL_IP: HeaderName = HeaderName::from_static("x-real-ip"); + /// Helper function to get the site ID and slug from headers. fn get_site_info(headers: &HeaderMap) -> (i64, &str) { let site_id = headers @@ -127,6 +129,7 @@ where pub async fn handle_host_delegation( state: ServerState, hostname: String, + ip: IpAddr, mut request: Request<Body>, main_router: Router, files_router: Router, @@ -136,6 +139,7 @@ pub async fn handle_host_delegation( let headers = request.headers_mut(); headers.remove(HEADER_SITE_ID); headers.remove(HEADER_SITE_SLUG); + headers.remove(HEADER_X_REAL_IP); } macro_rules! forward_request { @@ -157,6 +161,7 @@ pub async fn handle_host_delegation( let headers = request.headers_mut(); headers.insert(HEADER_SITE_ID, header_value!(str!($site_id))); headers.insert(HEADER_SITE_SLUG, header_value!($site_slug)); + headers.insert(HEADER_X_REAL_IP, header_value!(str!(ip))); }}; } diff --git a/wws/src/route.rs b/wws/src/route.rs index c5853f3597..f9bd0d6591 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -26,6 +26,7 @@ use axum::{ routing::{any, get}, Router, }; +use axum_client_ip::SecureClientIp; use axum_extra::extract::Host; use std::sync::Arc; use tower_http::{ @@ -106,10 +107,12 @@ pub fn build_router(state: ServerState) -> Router { // Forward requests to the appropriate sub-router depending on the hostname .fallback( |State(state): State<ServerState>, - Host(hostname): Host, - request: Request<Body>| async move { - handle_host_delegation(state, hostname, request, main_router, files_router).await - } + Host(hostname): Host, + SecureClientIp(ip): SecureClientIp, + request: Request<Body>| async move { + handle_host_delegation(state, hostname, ip, request, main_router, files_router) + .await + }, ) // General routes .route("/robots.txt", get(handle_robots_txt)) // TODO From 0fa60a23ecb48d02ab8b2c6884136c54442c5459 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 20:44:26 -0500 Subject: [PATCH 220/306] Add DEEPWELL_PORT env variable to dev deployment. --- .do/app.yaml | 3 +++ framerail/src/lib/server/deepwell/index.ts | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.do/app.yaml b/.do/app.yaml index eac5365a5e..d5a73244be 100644 --- a/.do/app.yaml +++ b/.do/app.yaml @@ -31,6 +31,9 @@ services: - key: DEEPWELL_HOST value: '${deepwell.PRIVATE_DOMAIN}' scope: RUN_TIME + - key: DEEPWELL_PORT + value: '${deepwell.PRIVATE_PORT}' + scope: RUN_TIME - name: deepwell source_dir: . diff --git a/framerail/src/lib/server/deepwell/index.ts b/framerail/src/lib/server/deepwell/index.ts index 97dbe23f73..96a1224986 100644 --- a/framerail/src/lib/server/deepwell/index.ts +++ b/framerail/src/lib/server/deepwell/index.ts @@ -3,7 +3,7 @@ import { JSONRPCClient, type JSONRPCRequest } from "json-rpc-2.0" export const DEEPWELL_HOST = process.env.DEEPWELL_HOST || "localhost" -export const DEEPWELL_PORT = 2747 +export const DEEPWELL_PORT = process.env.DEEPWELL_PORT || 2747 export const DEEPWELL_URL = `http://${DEEPWELL_HOST}:${DEEPWELL_PORT}/jsonrpc` export const client = new JSONRPCClient(processRawRequest) From cd15d2b0b3e78e6645de363ce0bcb59f78dad0b9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 20:46:16 -0500 Subject: [PATCH 221/306] Reorder env keys. Put the S3 entries together. --- .do/app.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.do/app.yaml b/.do/app.yaml index d5a73244be..ce5b8432ca 100644 --- a/.do/app.yaml +++ b/.do/app.yaml @@ -45,6 +45,12 @@ services: instance_size_slug: basic-xxs internal_ports: [2747] envs: + - key: DATABASE_URL + value: '${database.DATABASE_URL}' + scope: RUN_TIME + - key: REDIS_URL + value: '${cache.REDIS_URL}' + scope: RUN_TIME - key: S3_BUCKET value: wikijump-files-dev scope: RUN_TIME @@ -57,12 +63,6 @@ services: - key: S3_PATH_STYLE value: 'false' scope: RUN_TIME - - key: DATABASE_URL - value: '${database.DATABASE_URL}' - scope: RUN_TIME - - key: REDIS_URL - value: '${cache.REDIS_URL}' - scope: RUN_TIME - key: S3_ACCESS_KEY_ID type: SECRET scope: RUN_TIME From b5dbb1caa3017c6ee76c14f5a1c5ed9cc7239d14 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 20:50:23 -0500 Subject: [PATCH 222/306] Add wws image to app platform config. --- .do/app.yaml | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/.do/app.yaml b/.do/app.yaml index ce5b8432ca..a00981e4d7 100644 --- a/.do/app.yaml +++ b/.do/app.yaml @@ -35,6 +35,49 @@ services: value: '${deepwell.PRIVATE_PORT}' scope: RUN_TIME + - name: wws + source_dir: . + dockerfile_path: install/dev/router/Dockerfile + github: + repo: scpwiki/wikijump + branch: develop + deploy_on_push: true + instance_size_slug: basic-xxs + health_check: + initial_delay_seconds: 2 + envs: + - key: FRAMERAIL_HOST + value: '${framerail.PRIVATE_DOMAIN}' + - key: DEEPWELL_URL + value: 'http://${deepwell.PRIVATE_DOMAIN}:${deepwell.PRIVATE_PORT}' + scope: RUN_TIME + - key: REDIS_URL + value: '${cache.REDIS_URL}' + scope: RUN_TIME + - key: S3_BUCKET + value: wikijump-files-dev + scope: RUN_TIME + - key: S3_REGION_NAME + value: nyc3 + scope: RUN_TIME + - key: S3_CUSTOM_ENDPOINT + value: https://nyc3.digitaloceanspaces.com + scope: RUN_TIME + - key: S3_PATH_STYLE + value: 'false' + scope: RUN_TIME + - key: S3_ACCESS_KEY_ID + type: SECRET + scope: RUN_TIME + value: EV[1:o7KR/O5b7OHyXKDwyazNBHvLtp3RknSX:yimWNoxvDzML1V8dtbG+PN9qUbGfi+8lzYWzl+5ImU2fxojU] + - key: S3_SECRET_ACCESS_KEY + type: SECRET + scope: RUN_TIME + value: EV[1:HcyKErP7i3GUkjNt6GzRlrwXwgfEZRR+:r/51sfR9EqTGZOIev0F2fLrN4CqBl0AB/rpVVr1y2tWbEDnhMKBKZFuWoDJRMjOAKXhZfa/Nam29wEo=] + - key: CLIENT_IP_SOURCE + value: XRealIp + scope: RUN_TIME + - name: deepwell source_dir: . dockerfile_path: install/dev/api/Dockerfile From 074675bbe81b1492761c0e1d3016ea09de9c4b4c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 20:56:22 -0500 Subject: [PATCH 223/306] Create dev router Dockerfile. --- install/dev/router/Dockerfile | 37 ++++++++++++++++++++++++++++++ install/dev/router/health-check.sh | 2 ++ 2 files changed, 39 insertions(+) create mode 100644 install/dev/router/Dockerfile create mode 100755 install/dev/router/health-check.sh diff --git a/install/dev/router/Dockerfile b/install/dev/router/Dockerfile new file mode 100644 index 0000000000..4716ac0f44 --- /dev/null +++ b/install/dev/router/Dockerfile @@ -0,0 +1,37 @@ +# +# WWS build +# + +FROM rust:latest AS rust + +# Install helpers +RUN cargo install cargo-watch sqlx-cli + +# Install files +COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/router/health-check.sh /usr/local/bin/wikijump-health-check + +# Copy source +RUN mkdir /src +COPY ./wws /src/wws +WORKDIR /src/wws + +# Cache rust dependencies +RUN cargo vendor + +# Build wws +RUN cargo build --release + +# +# Final image +# + +FROM debian:latest + +# Install files +COPY --from=rust /src/wws/target/release/wws /usr/local/bin/wws +COPY ./install/dev/router/health-check.sh /usr/local/bin/wikijump-health-check + +USER daemon +EXPOSE 2747 +CMD ["/usr/local/bin/wws"] diff --git a/install/dev/router/health-check.sh b/install/dev/router/health-check.sh new file mode 100755 index 0000000000..07ecd67ac4 --- /dev/null +++ b/install/dev/router/health-check.sh @@ -0,0 +1,2 @@ +#!/bin/sh +curl -i http://localhost/-/health-check From 295bf6cdbfbb861742d3e1448886d4c924f7f992 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 20:58:29 -0500 Subject: [PATCH 224/306] Create GitHub workflow files for router. --- .../workflows/docker-build-router.dev.yaml | 20 +++++++++++++++++++ .../workflows/docker-build-router.local.yaml | 20 +++++++++++++++++++ 2 files changed, 40 insertions(+) create mode 100644 .github/workflows/docker-build-router.dev.yaml create mode 100644 .github/workflows/docker-build-router.local.yaml diff --git a/.github/workflows/docker-build-router.dev.yaml b/.github/workflows/docker-build-router.dev.yaml new file mode 100644 index 0000000000..dea772f0ff --- /dev/null +++ b/.github/workflows/docker-build-router.dev.yaml @@ -0,0 +1,20 @@ +name: '[backend] Docker build router (dev)' + +on: + pull_request: + paths: + - 'deepwell/**' + - 'install/dev/router/Dockerfile' + - '.github/workflows/docker-build-router.dev.yaml' + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Build image + run: docker build -f install/dev/router/Dockerfile . + env: + DOCKER_BUILDKIT: 1 diff --git a/.github/workflows/docker-build-router.local.yaml b/.github/workflows/docker-build-router.local.yaml new file mode 100644 index 0000000000..f1100ecdbb --- /dev/null +++ b/.github/workflows/docker-build-router.local.yaml @@ -0,0 +1,20 @@ +name: '[backend] Docker build router (local)' + +on: + pull_request: + paths: + - 'deepwell/**' + - 'install/local/router/Dockerfile' + - '.github/workflows/docker-build-router.local.yaml' + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Build image + run: docker build -f install/local/router/Dockerfile . + env: + DOCKER_BUILDKIT: 1 From 8397e7f834085e4b4666dfdbf81ad17ffc8fb4b6 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 21:00:17 -0500 Subject: [PATCH 225/306] Use symlink for health-check.sh script. --- install/dev/router/health-check.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) mode change 100755 => 120000 install/dev/router/health-check.sh diff --git a/install/dev/router/health-check.sh b/install/dev/router/health-check.sh deleted file mode 100755 index 07ecd67ac4..0000000000 --- a/install/dev/router/health-check.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -curl -i http://localhost/-/health-check diff --git a/install/dev/router/health-check.sh b/install/dev/router/health-check.sh new file mode 120000 index 0000000000..63d5f38b8a --- /dev/null +++ b/install/dev/router/health-check.sh @@ -0,0 +1 @@ +../../common/router/health-check.sh \ No newline at end of file From 186a099a3ddc01464d6ec0d84e710b7614ac348f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 21:03:24 -0500 Subject: [PATCH 226/306] Create prod router image and workflow. --- .../workflows/docker-build-router.prod.yaml | 20 ++++++++++ install/prod/router/Dockerfile | 37 +++++++++++++++++++ install/prod/router/health-check.sh | 1 + 3 files changed, 58 insertions(+) create mode 100644 .github/workflows/docker-build-router.prod.yaml create mode 100644 install/prod/router/Dockerfile create mode 120000 install/prod/router/health-check.sh diff --git a/.github/workflows/docker-build-router.prod.yaml b/.github/workflows/docker-build-router.prod.yaml new file mode 100644 index 0000000000..d8bc7605e7 --- /dev/null +++ b/.github/workflows/docker-build-router.prod.yaml @@ -0,0 +1,20 @@ +name: '[backend] Docker build router (prod)' + +on: + pull_request: + paths: + - 'deepwell/**' + - 'install/prod/router/Dockerfile' + - '.github/workflows/docker-build-router.prod.yaml' + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Build image + run: docker build -f install/prod/router/Dockerfile . + env: + DOCKER_BUILDKIT: 1 diff --git a/install/prod/router/Dockerfile b/install/prod/router/Dockerfile new file mode 100644 index 0000000000..0cb8c2a2e4 --- /dev/null +++ b/install/prod/router/Dockerfile @@ -0,0 +1,37 @@ +# +# WWS build +# + +FROM rust:latest AS rust + +# Install helpers +RUN cargo install cargo-watch sqlx-cli + +# Install files +COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/router/health-check.sh /usr/local/bin/wikijump-health-check + +# Copy source +RUN mkdir /src +COPY ./wws /src/wws +WORKDIR /src/wws + +# Cache rust dependencies +RUN cargo vendor + +# Build wws +RUN cargo build --release + +# +# Final image +# + +FROM debian:latest + +# Install files +COPY --from=rust /src/wws/target/release/wws /usr/local/bin/wws +COPY ./install/prod/router/health-check.sh /usr/local/bin/wikijump-health-check + +USER daemon +EXPOSE 2747 +CMD ["/usr/local/bin/wws"] diff --git a/install/prod/router/health-check.sh b/install/prod/router/health-check.sh new file mode 120000 index 0000000000..63d5f38b8a --- /dev/null +++ b/install/prod/router/health-check.sh @@ -0,0 +1 @@ +../../common/router/health-check.sh \ No newline at end of file From 20f8d6665b9cfa61e92441b68f20fe1cfafb4a7b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 21:11:47 -0500 Subject: [PATCH 227/306] Run rustfmt for deepwell. --- deepwell/src/services/domain/service.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index f79e89e519..492b69ea49 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -136,7 +136,9 @@ impl DomainService { macro_rules! found { ($site:expr) => {{ let config = ctx.config(); - let preferred_domain = Self::preferred_domain(config, &$site).into_owned(); + let preferred_domain = + Self::preferred_domain(config, &$site).into_owned(); + if domain == &preferred_domain { let SiteModel { site_id, From d0f9c96f6012143da16ea120f215a72bb432683f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Feb 2025 23:40:55 -0500 Subject: [PATCH 228/306] Address deepwell clippy lint. --- deepwell/src/services/view/service.rs | 8 ++++---- deepwell/src/services/view/structs.rs | 6 +++--- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/deepwell/src/services/view/service.rs b/deepwell/src/services/view/service.rs index 0724aeb81c..c59721ad95 100644 --- a/deepwell/src/services/view/service.rs +++ b/deepwell/src/services/view/service.rs @@ -250,7 +250,7 @@ impl ViewService { PageStatus::Found { page, page_revision, - } => GetPageViewOutput::PageFound { + } => GetPageViewOutput::Found { viewer, options, page, @@ -259,21 +259,21 @@ impl ViewService { wikitext, compiled_html, }, - PageStatus::Missing => GetPageViewOutput::PageMissing { + PageStatus::Missing => GetPageViewOutput::Missing { viewer, options, redirect_page, wikitext, compiled_html, }, - PageStatus::Private => GetPageViewOutput::PagePermissions { + PageStatus::Private => GetPageViewOutput::Permissions { viewer, options, redirect_page, compiled_html, banned: false, }, - PageStatus::Banned => GetPageViewOutput::PagePermissions { + PageStatus::Banned => GetPageViewOutput::Permissions { viewer, options, redirect_page, diff --git a/deepwell/src/services/view/structs.rs b/deepwell/src/services/view/structs.rs index 9efe8ac585..07ab2b5b6e 100644 --- a/deepwell/src/services/view/structs.rs +++ b/deepwell/src/services/view/structs.rs @@ -54,7 +54,7 @@ pub struct PageRoute { #[derive(Serialize, Debug, Clone)] #[serde(rename_all = "snake_case", tag = "type", content = "data")] pub enum GetPageViewOutput { - PageFound { + Found { #[serde(flatten)] viewer: Viewer, options: PageOptions, @@ -65,7 +65,7 @@ pub enum GetPageViewOutput { compiled_html: String, }, - PageMissing { + Missing { #[serde(flatten)] viewer: Viewer, options: PageOptions, @@ -74,7 +74,7 @@ pub enum GetPageViewOutput { compiled_html: String, }, - PagePermissions { + Permissions { #[serde(flatten)] viewer: Viewer, options: PageOptions, From 7d85a84e3dd5c55bacff71445938d788cf6813b3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 24 Feb 2025 19:37:01 -0500 Subject: [PATCH 229/306] Add optional dependency for TLS. --- wws/Cargo.lock | 228 ++++++++++++++++++++++++++++++++++++++++++++++++- wws/Cargo.toml | 5 ++ 2 files changed, 232 insertions(+), 1 deletion(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index b95db9cc89..88f7e4f836 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -179,6 +179,31 @@ dependencies = [ "url", ] +[[package]] +name = "aws-lc-rs" +version = "1.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd755adf9707cf671e31d944a189be3deaaeee11c8bc1d669bb8022ac90fbd0" +dependencies = [ + "aws-lc-sys", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f9dd2e03ee80ca2822dd6ea431163d2ef259f2066a4d6ccaca6d9dcb386aa43" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "paste", +] + [[package]] name = "aws-region" version = "0.25.5" @@ -283,6 +308,30 @@ dependencies = [ "syn", ] +[[package]] +name = "axum-server" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56bac90848f6a9393ac03c63c640925c4b7c8ca21654de40d53f55964667c7d8" +dependencies = [ + "arc-swap", + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.2", + "hyper-util", + "pin-project-lite", + "rustls 0.23.20", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.1", + "tower 0.4.13", + "tower-service", +] + [[package]] name = "backtrace" version = "0.3.74" @@ -310,6 +359,29 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bindgen" +version = "0.69.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "itertools", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn", + "which", +] + [[package]] name = "bitflags" version = "2.6.0" @@ -384,12 +456,32 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "clap" version = "4.5.23" @@ -417,6 +509,15 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + [[package]] name = "color-backtrace" version = "0.6.1" @@ -564,12 +665,34 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "either" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" + [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + [[package]] name = "flate2" version = "1.0.35" @@ -605,6 +728,12 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "futures" version = "0.3.31" @@ -740,6 +869,12 @@ dependencies = [ "url", ] +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + [[package]] name = "h2" version = "0.4.7" @@ -1113,6 +1248,15 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.14" @@ -1173,7 +1317,7 @@ dependencies = [ "http-body-util", "jsonrpsee-types", "pin-project", - "rustc-hash", + "rustc-hash 2.1.0", "serde", "serde_json", "thiserror 1.0.69", @@ -1225,6 +1369,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" version = "0.2.169" @@ -1243,6 +1393,16 @@ dependencies = [ "pkg-config", ] +[[package]] +name = "libloading" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" +dependencies = [ + "cfg-if", + "windows-targets", +] + [[package]] name = "libz-sys" version = "1.1.20" @@ -1255,6 +1415,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + [[package]] name = "litemap" version = "0.7.4" @@ -1327,6 +1493,12 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "miniz_oxide" version = "0.8.2" @@ -1347,6 +1519,16 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + [[package]] name = "nonempty" version = "0.7.0" @@ -1522,6 +1704,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" version = "1.0.92" @@ -1748,12 +1940,31 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + [[package]] name = "rustc-hash" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.59.0", +] + [[package]] name = "rustls" version = "0.21.12" @@ -1786,6 +1997,7 @@ version = "0.23.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" dependencies = [ + "aws-lc-rs", "log", "once_cell", "ring", @@ -1887,6 +2099,7 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -2580,6 +2793,18 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "wikidot-normalize" version = "0.12.0" @@ -2727,6 +2952,7 @@ dependencies = [ "axum", "axum-client-ip", "axum-extra", + "axum-server", "built", "clap", "color-backtrace", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 4f533db9f7..fcae2267c2 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -12,12 +12,17 @@ version = "2025.2.6" authors = ["Emmie Smith <emmie.maeda@gmail.com>"] edition = "2021" +[features] +default = [] +tls = ["axum-server"] + [dependencies] accept-language = "3" anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } axum-client-ip = "0.7" axum-extra = { version = "0.10", features = ["attachment"] } +axum-server = { version = "0.7", features = ["tls-rustls"], optional = true } clap = "4" color-backtrace = "0.6" dotenvy = "0.15" From 818bb692dea3a8fc0d0362afcf68982852eec087 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 24 Feb 2025 20:01:43 -0500 Subject: [PATCH 230/306] Add code for tls feature. --- wws/.gitattributes | 1 + wws/misc/self-signed-cert.pem | 32 +++++++++++++++++++++ wws/misc/self-signed-key.pem | 52 +++++++++++++++++++++++++++++++++++ wws/src/main.rs | 35 +++++++++++++++++++++-- 4 files changed, 118 insertions(+), 2 deletions(-) create mode 100644 wws/.gitattributes create mode 100644 wws/misc/self-signed-cert.pem create mode 100644 wws/misc/self-signed-key.pem diff --git a/wws/.gitattributes b/wws/.gitattributes new file mode 100644 index 0000000000..b914593ed9 --- /dev/null +++ b/wws/.gitattributes @@ -0,0 +1 @@ +misc/*.pem binary diff --git a/wws/misc/self-signed-cert.pem b/wws/misc/self-signed-cert.pem new file mode 100644 index 0000000000..8227f327be --- /dev/null +++ b/wws/misc/self-signed-cert.pem @@ -0,0 +1,32 @@ +-----BEGIN CERTIFICATE----- +MIIFkzCCA3ugAwIBAgIUQZiKeBISKUZoglT8J8CCPpGbgTkwDQYJKoZIhvcNAQEL +BQAwWTELMAkGA1UEBhMCVVMxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoM +GEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MB4X +DTIxMDgyOTEyMDE0NVoXDTIyMDgyOTEyMDE0NVowWTELMAkGA1UEBhMCVVMxEzAR +BgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5 +IEx0ZDESMBAGA1UEAwwJbG9jYWxob3N0MIICIjANBgkqhkiG9w0BAQEFAAOCAg8A +MIICCgKCAgEAoeDJnuh1lhcpKCt5VEBqO9JcSoz2wqD3SLj4i2qrEOvqb4X0ZZeN +5GQXQlOG2N6+9FOxTzaTTigTecYzI3hqKn1fiuvaS4EeTC7E1sVOj7tY0yVySjXM +pC/3t1n1s3B25m7eQ0G2JypZFCobGqY0kaRoO+mCTjI4bdCd769shIerCO4Z8FD5 +uj1+hBC7ZY/sqmRkGTLX1ZzkXzaeNeWGlkXKU8/V3qdveFQ/sGe+KoZpOPXb0yR7 +H8zf6NE2CFCNJDhytOkYLOsnvCJOvibJ3kbM2GfI9iCd0/QhQAOcrVhcOgI4aIxr +wP3zvF4PFUhFKEWHqK5IFq41xKyMYu2fw3bmKXg4zsQGcB0avBD7z+7ENEBvLkNI +7O20wKJp8u0RfjStNHWPmWLXPjkadVB5JHJjsktvgNZkbs9ugxhZWW2AzrrIuqwR +NOWnjHE7J3jvcHP6jE5O9LHpnlh6BMoKPsQuRu/bkrD34rNzwH7IX1To1CyDazMR +yhUiARYh43gg6hrrQdVjDFMHd51mgWHtOPzSLb0uzToglAa3FClGlCeaiacu4H2V +EfJrlCbVlftmIub9/EILZ6XpyYWMxt2mm4mCcMtXmBsHolP4lU3keK8AGNFOr3PC +B7NHLNp1RHgx8+Q3kzobJ1Lk+zEjraWPb5gyByUvZySbd/JTGgNCmZsCAwEAAaNT +MFEwHQYDVR0OBBYEFGsIv6GsbDS+dEWwWlA/3TG5Oi88MB8GA1UdIwQYMBaAFGsI +v6GsbDS+dEWwWlA/3TG5Oi88MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL +BQADggIBAHhjzP8WtkLJVfZXXUPAAekR7kaqk2hb3hIgDABBJ7xNxcktLOH7V/ng +nhbnwSH5mCkHHXx78TOhWqokHp5wru8K3de5wvAD8uz0UwNDHK5EzqtjYLzxbxAr +ht89WoXGPEZIz6MuOxVYx/HHXdgNEXUcujzfpAfvznVxvzBVqpHNgc7qO8wJd0cG +nit1XubxKoIVTEUjDfxGa2TsmBI7CZ8MLjIyztp/b3txpVl36hPC/uFLwKC780Jc +eO9saA5ISbJh7EaISRr8MKpBpJcraL+055bMjM+kzRFA18NWuuo9Y8fXnXE8e/af +k8FvclVdH/YyezaLkjW7lXjo7QoSXHhAuSzvsGmIsh+HuH+3Fs22AN3aGdmimOmp +7JiNe42mwEpJydwgGlKOysw4ht6MA6yOcQJw73QAYYwusOmNjFZtfCUqJx/JO7mn +Sb1/PW58xYSJhDxdGhoh6Rd3xPMW1T4YwpapkAC/htciK3XkwCcG1VKSmCIErkXf +vllmdahH/QkNooNAHMZl/ipYMik8pp5eRjVjCvpQTDBOI97U0+bgXydHVowP9ExE +dGcm6pP8FU1LyBZdYTdlMRC5Z0L0ltcZn7bqKcyzZB3UcWJv7Uhn3MYbmqGsUVly +a/e3kH2t5pEWRTsrNrRD94LzEYKvcNHy6PYkrgpGjh2G2VBZgNzh +-----END CERTIFICATE----- diff --git a/wws/misc/self-signed-key.pem b/wws/misc/self-signed-key.pem new file mode 100644 index 0000000000..c329a2d836 --- /dev/null +++ b/wws/misc/self-signed-key.pem @@ -0,0 +1,52 @@ +-----BEGIN PRIVATE KEY----- +MIIJQQIBADANBgkqhkiG9w0BAQEFAASCCSswggknAgEAAoICAQCh4Mme6HWWFyko +K3lUQGo70lxKjPbCoPdIuPiLaqsQ6+pvhfRll43kZBdCU4bY3r70U7FPNpNOKBN5 +xjMjeGoqfV+K69pLgR5MLsTWxU6Pu1jTJXJKNcykL/e3WfWzcHbmbt5DQbYnKlkU +KhsapjSRpGg76YJOMjht0J3vr2yEh6sI7hnwUPm6PX6EELtlj+yqZGQZMtfVnORf +Np415YaWRcpTz9Xep294VD+wZ74qhmk49dvTJHsfzN/o0TYIUI0kOHK06Rgs6ye8 +Ik6+JsneRszYZ8j2IJ3T9CFAA5ytWFw6AjhojGvA/fO8Xg8VSEUoRYeorkgWrjXE +rIxi7Z/DduYpeDjOxAZwHRq8EPvP7sQ0QG8uQ0js7bTAomny7RF+NK00dY+ZYtc+ +ORp1UHkkcmOyS2+A1mRuz26DGFlZbYDOusi6rBE05aeMcTsneO9wc/qMTk70seme +WHoEygo+xC5G79uSsPfis3PAfshfVOjULINrMxHKFSIBFiHjeCDqGutB1WMMUwd3 +nWaBYe04/NItvS7NOiCUBrcUKUaUJ5qJpy7gfZUR8muUJtWV+2Yi5v38QgtnpenJ +hYzG3aabiYJwy1eYGweiU/iVTeR4rwAY0U6vc8IHs0cs2nVEeDHz5DeTOhsnUuT7 +MSOtpY9vmDIHJS9nJJt38lMaA0KZmwIDAQABAoICAHzGnCLU4+4xJBRGjlsW28wI +tgLw7TPQh0uS6GHucrW0YxxbkKrOSx0E2bjSUVrRNzd1W3LHinvwADMZR0nMA2mF +AiQ+8CDLAeOPGULDC29W5Xy7nID/PyI/px25Rd5ujffI9aG6AQHnbopQelvsSREK +PR4RO9OyejSLXXHnMipluLxFa9EFWbjotaBulUQP0Ej24QFbY2rQaGfL3d+FcFxc +pzw7M4tQXGfP6Ne836Q/vtOdDziNIiq87Mq0mIWIMYL9z80K7wuQpywo9bE0jN28 +jSExvoGZWo6J2ydQoXAsb8p286wCsPwtw7Yqek3ZSxVjotGupPp2hhN3PS70IvR5 +wcR+1pGTSzUFkrLurZftR+HNU4GHVGEzmFKtQ1dyBjDdLSkBHx+N3rzvvArMLDKI +hYXc7AgCTR1SkZBBVPFlNZJyicE+x52UGLvnyS5chgqvSsOrkhDu/bK+ISTh+3jZ +8QSnjYuZLQ1q5i3914wKzjSrHbFWuoGullqCk6nvhn2EEDcAVla0ebSYBcrnzKhO +qJogZzUSTpINIKNQlZuohzbS0lrvXuYDRDkZLRaQWKgHGiat7peBazEfd0NTHpIs +2lKovGTWNU8MIvJPONFixIZ0k7Z+s7Oje+dSOoCyCUzA3BT+mmS2Yi180zxrtRBS +LPGooWR3Rfyptx+OJkehAoIBAQDQkoPWIQWdFG1G9x08H49/AjcfGtHbdjeCjNqS +6mbXLzHgQjnUnmKmuqgkSw9IA+l2OqX4dNrKqH9P6Ex9s3HRxTmYt9/0DLT8Thus +04DiusjhUDQYV8pXUBujmVkMEEI8N5RXv0IAd59kaA6kWJLtrnp6mREY2WJicIAJ +BKut0QTC+upnvV2NKYc+Ki5ElB5hqzICr+wBq35ZlxTId7F5iaZeWeljpOodZw06 +KCVIUhmGHNVR0DUqUJ8+j7gstXhXr0MVhAlRg+WhlUvyCm1UhElyyrVgiXjqeqO9 +RO2+/poPNFxylVzYgTi54ydeB378/LcrxFQ7Q3DAW6DSAefHAoIBAQDGsBc6SnXu +WGW2qPWQM1Jm9hGy7ZgB8953kvpSxE1cVkXoOOtaa2HtRurxT55s4nTAzqDV//7R +9OX+JDCMeQLm9oLzGOxaCaq5lGNTNQs+MBPP78wwQrZRhneuG5U0lEYBb+dlkHih +IejR9OK0r0btpwuLWTC/cs2dNMW0J6JwaK6J4JiJC+nJiKyt1W98Vtpz0oLJq/Re +Z/e3sVZF3RLks5WoQsiXYoQ3KFf9koBsImggGm2prrFl9KeZJOVJP0ZeDaRcLGWQ +PRt0nNKuuSRJ5HZF/0TCwUXAtpaftAsr4fhB+/KYVdVrni5FYdfqUX4KH6n9LFSG +VC1OST1JJIeNAoIBAB0H57XMTt24VCWGi9ksg2qoQkfgEcm8QKm5NUsxuTLGbOjM +DwSbLxwJ6xFyKSRa9wnvy94zVajTnzTeHpd4fKU4EHZDUbbEdgSQUqXRoqTsXr2N +zlJ9FbrleZNh6tUVBkMfcVRtWKB8BgGRwkf51CmlGYMq/wg4actN4WRf9A1zhHgn +OK1L3FOjriFm+Z2uCDSMAaACIJVy61lJACmPD3LdR/zmAuhNshB5oYuwvs+8LbVP +GhoTIvNK2X95vabrc16xFGNQR4PDGhlNkI6WCPW0nAyQToKrX9szSsszZuwowATR +wvRn+c5g3iZxia861+AaxNwgraC6GF2N42qXvU0CggEAXD+NyUahEpSARRqVSOpL +K/q7pPOjS+TKOYJILv1tXZ3Av10OCOEqilwO4RMyXyOVSZ+mFTXSPfESh7iNweq9 +ajax/eRoeDVcyuUWaJ+MJMd1q2mOyClxNNDV6ERuNgdRqYEnUoSNPWLdEf48898d +c2HHfl9evsSyqnbCBC8SwFYaE3Hv4FFjrmqCogMiy/wXWQc4KiJoRxzGascvYyiN +iRnINmMrdv4KnQFiOR03+vzOk3kxyUKOouPAnN4Ahs2WAj0bPqBuV1XH1ZCqUO0s +6BHmyAEJD9Nka2Fa9bNGLI2yEhDERe40NM8wdI5FDUng1xp0dlOKuwOCNYLTrY4E +UQKCAQByK/e9bFaNv+BS81flfTt9tinKRIFc8IAKUl39M5wmehUqey8BGfKkMTGX +1w7R7lfCxoDi5Cl64fkPLWHrvZTuWh5ApC8r6uVjEX3TNWhBCQAB2tJmF7s9N73K +ymoh3VvQUHFZ2+IrCTgkJTWqjEdhPiiU3/oBnIv9ZYWf1ORkVhoAdxoLBn2XuTRC +xIKhiQeqCcKE9yTN26rt+7DjhB5TJ0W2meC8Rxb4lZRDD50MZayZQ6Vo4O87INpD +WjR7NdZndxUeinCPNQos9hEEke1ncCIzkwzJ9kn1R3iJzZRdjDKW3oT4G6QaStf5 +HUGWsrhnzvWoCOV+9+MdApoim8FI +-----END PRIVATE KEY----- diff --git a/wws/src/main.rs b/wws/src/main.rs index cd4cec9a96..75bb0e62ff 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -50,6 +50,7 @@ use self::route::build_router; use self::state::build_server_state; use self::trace::setup_tracing; use anyhow::Result; +use axum::Router; use std::fs::File; use std::io::Write; use std::net::SocketAddr; @@ -74,8 +75,7 @@ async fn main() -> Result<()> { // Connect to services, build server state and then run let state = build_server_state(secrets).await?; - let app = build_router(state).into_make_service_with_connect_info::<SocketAddr>(); - let listener = TcpListener::bind(config.address).await?; + let router = build_router(state); // Begin listening info!( @@ -83,6 +83,37 @@ async fn main() -> Result<()> { "Listening to connections...", ); + serve(config.address, router).await?; + Ok(()) +} + +// Snake oil TLS +// For local +#[cfg(feature = "tls")] +async fn serve(address: SocketAddr, router: Router) -> Result<()> { + use axum_server::tls_rustls::RustlsConfig; + + let app = router.into_make_service_with_connect_info::<SocketAddr>(); + let tls_config = RustlsConfig::from_pem_file( + // Added in Docker container + "/etc/ssl/self-signed-cert.pem", + "/etc/ssl/self-signed-key.pem", + ) + .await?; + + axum_server::bind_rustls(address, tls_config) + .serve(app) + .await?; + + Ok(()) +} + +// TLS-terminated HTTP server +// For dev and prod +#[cfg(not(feature = "tls"))] +async fn serve(address: SocketAddr, router: Router) -> Result<()> { + let app = router.into_make_service_with_connect_info::<SocketAddr>(); + let listener = TcpListener::bind(address).await?; axum::serve(listener, app).await?; Ok(()) } From 086609744d946314cf2ecc113f0c89bb4acd2506 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 24 Feb 2025 20:04:40 -0500 Subject: [PATCH 231/306] Move import to avoid warning in tls feature. --- wws/src/main.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/wws/src/main.rs b/wws/src/main.rs index 75bb0e62ff..197009fc47 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -55,7 +55,6 @@ use std::fs::File; use std::io::Write; use std::net::SocketAddr; use std::process; -use tokio::net::TcpListener; #[tokio::main] async fn main() -> Result<()> { @@ -112,6 +111,8 @@ async fn serve(address: SocketAddr, router: Router) -> Result<()> { // For dev and prod #[cfg(not(feature = "tls"))] async fn serve(address: SocketAddr, router: Router) -> Result<()> { + use tokio::net::TcpListener; + let app = router.into_make_service_with_connect_info::<SocketAddr>(); let listener = TcpListener::bind(address).await?; axum::serve(listener, app).await?; From a3177d894b6d4f72a1577715ab7fec26548449c1 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 4 Mar 2025 22:00:18 -0500 Subject: [PATCH 232/306] Add TLS fields to .env --- wws/.env.example | 6 ++++++ wws/Cargo.lock | 1 + wws/Cargo.toml | 1 + wws/src/config/mod.rs | 45 +++++++++++++++++++++++++++++++++++----- wws/src/config/object.rs | 8 +++++++ 5 files changed, 56 insertions(+), 5 deletions(-) diff --git a/wws/.env.example b/wws/.env.example index 063e239e64..8dafccd0ca 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -49,4 +49,10 @@ AWS_PROFILE_NAME=wikijump # - CloudFrontViewerAddress CLIENT_IP_SOURCE=XRealIp +# What TLS certificate and secret key to use for this server. +# +# Requires the "tls" feature to be enabled in wws. +TLS_CERTIFICATE=misc/self-signed-cert.pem +TLS_SECRET_KEY=misc/self-signed-key.pem + # vim: set ft=sh: diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 88f7e4f836..b304ea72ad 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2954,6 +2954,7 @@ dependencies = [ "axum-extra", "axum-server", "built", + "cfg-if", "clap", "color-backtrace", "dotenvy", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index fcae2267c2..7cf51b4778 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -23,6 +23,7 @@ axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "towe axum-client-ip = "0.7" axum-extra = { version = "0.10", features = ["attachment"] } axum-server = { version = "0.7", features = ["tls-rustls"], optional = true } +cfg-if = "1" clap = "4" color-backtrace = "0.6" dotenvy = "0.15" diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 2cee8e8db6..825bc67b5e 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -26,6 +26,7 @@ pub use self::object::Config; pub use self::secrets::Secrets; use self::args::Arguments; +use cfg_if::cfg_if; use dotenvy::dotenv; use ref_map::*; use s3::{creds::Credentials, region::Region}; @@ -48,6 +49,20 @@ pub fn load_config() -> (Config, Secrets) { }; } + // The OsString version of get_env!() + #[cfg(feature = "tls")] + macro_rules! get_env_os { + ($name:expr) => { + match env::var_os($name) { + Some(value) => value, + None => { + eprintln!("Unable to read environment variable {}", $name); + process::exit(1); + } + } + }; + } + // Process arguments and overrides let Arguments { enable_trace, @@ -132,12 +147,32 @@ pub fn load_config() -> (Config, Secrets) { } }; + cfg_if! { + if #[cfg(feature = "tls")] { + let tls_certificate = PathBuf::from(get_env_os!("TLS_CERTIFICATE")); + let tls_secret_key = PathBuf::from(get_env_os!("TLS_SECRET_KEY")); + } + } + // Build and return - let config = Config { - enable_trace, - pid_file, - address, - }; + + cfg_if! { + if #[cfg(feature = "tls")] { + let config = Config { + enable_trace, + pid_file, + address, + tls_certificate, + tls_secret_key, + }; + } else { + let config = Config { + enable_trace, + pid_file, + address, + }; + } + } let secrets = Secrets { deepwell_url, diff --git a/wws/src/config/object.rs b/wws/src/config/object.rs index f0d13dd967..e5169aeef0 100644 --- a/wws/src/config/object.rs +++ b/wws/src/config/object.rs @@ -32,4 +32,12 @@ pub struct Config { /// The address the server will be hosted on. pub address: SocketAddr, + + /// Specify where to get the certificate PEM file for TLS. + #[cfg(feature = "tls")] + pub tls_certificate: PathBuf, + + /// Specify where to get the secret key PEM file for TLS. + #[cfg(feature = "tls")] + pub tls_secret_key: PathBuf, } From 2e061c45d7c640ce4d1cca2a4551cb8b95ea46a3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Tue, 4 Mar 2025 22:10:39 -0500 Subject: [PATCH 233/306] Pass in TLS paths to server builder. --- wws/src/main.rs | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/wws/src/main.rs b/wws/src/main.rs index 197009fc47..fd345b75eb 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -45,7 +45,7 @@ mod route; mod state; mod trace; -use self::config::load_config; +use self::config::{load_config, Config}; use self::route::build_router; use self::state::build_server_state; use self::trace::setup_tracing; @@ -82,25 +82,25 @@ async fn main() -> Result<()> { "Listening to connections...", ); - serve(config.address, router).await?; + serve(&config, router).await?; Ok(()) } // Snake oil TLS // For local #[cfg(feature = "tls")] -async fn serve(address: SocketAddr, router: Router) -> Result<()> { +async fn serve(config: &Config, router: Router) -> Result<()> { use axum_server::tls_rustls::RustlsConfig; let app = router.into_make_service_with_connect_info::<SocketAddr>(); let tls_config = RustlsConfig::from_pem_file( // Added in Docker container - "/etc/ssl/self-signed-cert.pem", - "/etc/ssl/self-signed-key.pem", + &config.tls_certificate, + &config.tls_secret_key, ) .await?; - axum_server::bind_rustls(address, tls_config) + axum_server::bind_rustls(config.address, tls_config) .serve(app) .await?; @@ -110,11 +110,11 @@ async fn serve(address: SocketAddr, router: Router) -> Result<()> { // TLS-terminated HTTP server // For dev and prod #[cfg(not(feature = "tls"))] -async fn serve(address: SocketAddr, router: Router) -> Result<()> { +async fn serve(config: &Config, router: Router) -> Result<()> { use tokio::net::TcpListener; let app = router.into_make_service_with_connect_info::<SocketAddr>(); - let listener = TcpListener::bind(address).await?; + let listener = TcpListener::bind(config.address).await?; axum::serve(listener, app).await?; Ok(()) } From e1866d2e89eecdd8251ee62aeb1b977a184b16f2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 5 Mar 2025 00:16:06 -0500 Subject: [PATCH 234/306] Fix runtime TLS errors. What a pain in the ass. --- wws/Cargo.lock | 204 +------------------------------------------------ wws/Cargo.toml | 3 +- 2 files changed, 4 insertions(+), 203 deletions(-) diff --git a/wws/Cargo.lock b/wws/Cargo.lock index b304ea72ad..20ee2a0e44 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -179,31 +179,6 @@ dependencies = [ "url", ] -[[package]] -name = "aws-lc-rs" -version = "1.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd755adf9707cf671e31d944a189be3deaaeee11c8bc1d669bb8022ac90fbd0" -dependencies = [ - "aws-lc-sys", - "paste", - "zeroize", -] - -[[package]] -name = "aws-lc-sys" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f9dd2e03ee80ca2822dd6ea431163d2ef259f2066a4d6ccaca6d9dcb386aa43" -dependencies = [ - "bindgen", - "cc", - "cmake", - "dunce", - "fs_extra", - "paste", -] - [[package]] name = "aws-region" version = "0.25.5" @@ -359,29 +334,6 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" -[[package]] -name = "bindgen" -version = "0.69.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" -dependencies = [ - "bitflags", - "cexpr", - "clang-sys", - "itertools", - "lazy_static", - "lazycell", - "log", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn", - "which", -] - [[package]] name = "bitflags" version = "2.6.0" @@ -456,32 +408,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" -[[package]] -name = "cexpr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] - [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - [[package]] name = "clap" version = "4.5.23" @@ -509,15 +441,6 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" -[[package]] -name = "cmake" -version = "0.1.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" -dependencies = [ - "cc", -] - [[package]] name = "color-backtrace" version = "0.6.1" @@ -665,34 +588,12 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "either" -version = "1.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" - [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" -[[package]] -name = "errno" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "flate2" version = "1.0.35" @@ -728,12 +629,6 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "fs_extra" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" - [[package]] name = "futures" version = "0.3.31" @@ -869,12 +764,6 @@ dependencies = [ "url", ] -[[package]] -name = "glob" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" - [[package]] name = "h2" version = "0.4.7" @@ -1248,15 +1137,6 @@ version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.14" @@ -1317,7 +1197,7 @@ dependencies = [ "http-body-util", "jsonrpsee-types", "pin-project", - "rustc-hash 2.1.0", + "rustc-hash", "serde", "serde_json", "thiserror 1.0.69", @@ -1369,12 +1249,6 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "libc" version = "0.2.169" @@ -1393,16 +1267,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "libloading" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" -dependencies = [ - "cfg-if", - "windows-targets", -] - [[package]] name = "libz-sys" version = "1.1.20" @@ -1415,12 +1279,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" - [[package]] name = "litemap" version = "0.7.4" @@ -1493,12 +1351,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - [[package]] name = "miniz_oxide" version = "0.8.2" @@ -1519,16 +1371,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - [[package]] name = "nonempty" version = "0.7.0" @@ -1704,16 +1546,6 @@ dependencies = [ "zerocopy", ] -[[package]] -name = "prettyplease" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" -dependencies = [ - "proc-macro2", - "syn", -] - [[package]] name = "proc-macro2" version = "1.0.92" @@ -1940,31 +1772,12 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - [[package]] name = "rustc-hash" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" -[[package]] -name = "rustix" -version = "0.38.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.59.0", -] - [[package]] name = "rustls" version = "0.21.12" @@ -1997,7 +1810,6 @@ version = "0.23.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5065c3f250cbd332cd894be57c40fa52387247659b14a2d6041d121547903b1b" dependencies = [ - "aws-lc-rs", "log", "once_cell", "ring", @@ -2099,7 +1911,6 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ - "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -2793,18 +2604,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "which" -version = "4.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" -dependencies = [ - "either", - "home", - "once_cell", - "rustix", -] - [[package]] name = "wikidot-normalize" version = "0.12.0" @@ -2965,6 +2764,7 @@ dependencies = [ "redis", "ref-map", "rust-s3", + "rustls 0.23.20", "serde", "str-macro", "thiserror 2.0.11", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 7cf51b4778..20bb4de3a3 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -22,7 +22,7 @@ anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } axum-client-ip = "0.7" axum-extra = { version = "0.10", features = ["attachment"] } -axum-server = { version = "0.7", features = ["tls-rustls"], optional = true } +axum-server = { version = "0.7", features = ["tls-rustls-no-provider"], optional = true } cfg-if = "1" clap = "4" color-backtrace = "0.6" @@ -34,6 +34,7 @@ paste = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "r2d2", "tokio-comp", "tokio-rustls-comp"], default-features = false } ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } +rustls = { version = "0.23", features = ["logging", "std", "ring", "tls12"], default-features = false } # see https://github.com/programatik29/axum-server/issues/153#issuecomment-2605740256 serde = { version = "1", features = ["derive"] } str-macro = "1" thiserror = "2" From 9a6c4d5e0e61585aaadce75d5d862bf5393a4637 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 5 Mar 2025 00:17:19 -0500 Subject: [PATCH 235/306] Add note about HTTP -> HTTPS. --- wws/src/main.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/wws/src/main.rs b/wws/src/main.rs index fd345b75eb..4bbae7811e 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -92,6 +92,7 @@ async fn main() -> Result<()> { async fn serve(config: &Config, router: Router) -> Result<()> { use axum_server::tls_rustls::RustlsConfig; + // NOTE: This does not include a HTTP -> HTTPS redirector let app = router.into_make_service_with_connect_info::<SocketAddr>(); let tls_config = RustlsConfig::from_pem_file( // Added in Docker container From fcce0fe42806130d6e4791698c524e3186f73e81 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 5 Mar 2025 00:25:32 -0500 Subject: [PATCH 236/306] Use TLS feature for local router deployment. --- install/local/router/wws-start | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/install/local/router/wws-start b/install/local/router/wws-start index 47ac026ec9..400921a4a5 100755 --- a/install/local/router/wws-start +++ b/install/local/router/wws-start @@ -10,4 +10,5 @@ exec /usr/bin/env RUST_BACKTRACE=1 \ /usr/local/cargo/bin/cargo watch \ --why \ -w /src/wws \ - -x run + -x run \ + --features tls From 59d9fe499a54cac0eb366b478b77336acee98d83 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sat, 22 Mar 2025 16:26:14 -0400 Subject: [PATCH 237/306] Replace quay.io with Dockerhub. Domain seems busted? --- install/local/minio/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install/local/minio/Dockerfile b/install/local/minio/Dockerfile index b40d06c1ed..90539318dc 100644 --- a/install/local/minio/Dockerfile +++ b/install/local/minio/Dockerfile @@ -1,5 +1,5 @@ -FROM quay.io/minio/mc:latest AS mc -FROM quay.io/minio/minio:latest AS minio +FROM minio/mc:latest AS mc +FROM minio/minio:latest AS minio COPY --from=mc /usr/bin/mc /usr/local/bin/mc COPY ./docker-entrypoint.sh ./healthcheck.sh / From 88a80ae9187943cfa81211af1fc7bf22a73ea8e8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 24 Mar 2025 01:15:26 -0400 Subject: [PATCH 238/306] Include wws artifacts in dockerignore. --- .dockerignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.dockerignore b/.dockerignore index 3be4c16e46..c582fd6409 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,6 +12,7 @@ ftml/test ftml/target ftml/pkg deepwell/target +wws/target **/__pycache__ **/dist **/node_modules From 4a2d313f19ae3a2413b79998434398acf0a19086 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Mar 2025 20:28:35 -0400 Subject: [PATCH 239/306] Install wikijump-health-check helper into web. --- install/local/web/Dockerfile | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/install/local/web/Dockerfile b/install/local/web/Dockerfile index bb0cbcac64..db3ebedb40 100644 --- a/install/local/web/Dockerfile +++ b/install/local/web/Dockerfile @@ -10,6 +10,10 @@ COPY framerail /app COPY assets /app/src/assets WORKDIR /app +# Install files +COPY ./install/local/web/health-check.sh /usr/local/bin/wikijump-health-check + +# Install node dependencies RUN pnpm install EXPOSE 3000 From 129ec09233ca99889b365cab783e5c60a7a648dd Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Mar 2025 20:37:51 -0400 Subject: [PATCH 240/306] Fix health check scripts. --- install/common/router/health-check.sh | 6 +----- install/common/web/health-check.sh | 6 ++++++ install/dev/web/health-check.sh | 1 + install/local/router/health-check.sh | 3 +-- install/local/web/health-check.sh | 2 +- install/prod/web/health-check.sh | 1 + 6 files changed, 11 insertions(+), 8 deletions(-) create mode 100755 install/common/web/health-check.sh create mode 120000 install/dev/web/health-check.sh mode change 100755 => 120000 install/local/router/health-check.sh create mode 120000 install/prod/web/health-check.sh diff --git a/install/common/router/health-check.sh b/install/common/router/health-check.sh index d6a102fef9..4c72254b9e 100755 --- a/install/common/router/health-check.sh +++ b/install/common/router/health-check.sh @@ -1,6 +1,2 @@ #!/bin/sh -curl \ - -If \ - -H 'x-wikijump-site-slug: www' \ - -H 'x-wikijump-site-id: 1' \ - http://localhost:3000/ +curl -If http://localhost/-/health-check diff --git a/install/common/web/health-check.sh b/install/common/web/health-check.sh new file mode 100755 index 0000000000..d6a102fef9 --- /dev/null +++ b/install/common/web/health-check.sh @@ -0,0 +1,6 @@ +#!/bin/sh +curl \ + -If \ + -H 'x-wikijump-site-slug: www' \ + -H 'x-wikijump-site-id: 1' \ + http://localhost:3000/ diff --git a/install/dev/web/health-check.sh b/install/dev/web/health-check.sh new file mode 120000 index 0000000000..093d52486b --- /dev/null +++ b/install/dev/web/health-check.sh @@ -0,0 +1 @@ +../../common/web/health-check.sh \ No newline at end of file diff --git a/install/local/router/health-check.sh b/install/local/router/health-check.sh deleted file mode 100755 index 07ecd67ac4..0000000000 --- a/install/local/router/health-check.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -curl -i http://localhost/-/health-check diff --git a/install/local/router/health-check.sh b/install/local/router/health-check.sh new file mode 120000 index 0000000000..63d5f38b8a --- /dev/null +++ b/install/local/router/health-check.sh @@ -0,0 +1 @@ +../../common/router/health-check.sh \ No newline at end of file diff --git a/install/local/web/health-check.sh b/install/local/web/health-check.sh index 63d5f38b8a..093d52486b 120000 --- a/install/local/web/health-check.sh +++ b/install/local/web/health-check.sh @@ -1 +1 @@ -../../common/router/health-check.sh \ No newline at end of file +../../common/web/health-check.sh \ No newline at end of file diff --git a/install/prod/web/health-check.sh b/install/prod/web/health-check.sh new file mode 120000 index 0000000000..093d52486b --- /dev/null +++ b/install/prod/web/health-check.sh @@ -0,0 +1 @@ +../../common/web/health-check.sh \ No newline at end of file From ba24e16bf8c778b4ba279f902c7f2c9033e69b98 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Sun, 23 Mar 2025 20:38:52 -0400 Subject: [PATCH 241/306] Use new health script for router image. --- install/local/docker-compose.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 94e915c168..baf7971bfb 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -121,7 +121,7 @@ services: - "CLIENT_IP_SOURCE=ConnectInfo" restart: always healthcheck: - test: ["CMD", "curl", "-If", "http://localhost/-/health-check"] + test: ["CMD", "wikijump-health-check"] interval: 120s timeout: 2s retries: 3 From 5a873401e9d063ff74e6ff7eca4d916da387c152 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 24 Mar 2025 03:36:58 -0400 Subject: [PATCH 242/306] Specify host port for wws. --- install/local/docker-compose.yaml | 3 ++- install/local/router/Dockerfile | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index baf7971bfb..4bfdd0700a 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -103,12 +103,13 @@ services: context: ../.. dockerfile: install/local/router/Dockerfile ports: - - "80:8080" + - "80:8000" links: - api - cache - web environment: + - "ADDRESS=[::]:8000" - "DEEPWELL_URL=http://api:2747" - "REDIS_URL=redis://cache" - "FRAMERAIL_HOST=web:3000" diff --git a/install/local/router/Dockerfile b/install/local/router/Dockerfile index 3cc606ab26..839ed0cb7f 100644 --- a/install/local/router/Dockerfile +++ b/install/local/router/Dockerfile @@ -21,5 +21,5 @@ RUN mkdir /src COPY ./wws /src/wws WORKDIR /src/wws -EXPOSE 2747 +EXPOSE 8000 CMD ["/usr/local/bin/wikijump-wws-start"] From fdc29133aa2315c12044e6eba755b89eb96d52a5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Mon, 24 Mar 2025 03:38:05 -0400 Subject: [PATCH 243/306] Update local container for use of snake-oil SSL. --- install/common/router/health-check.sh | 2 +- install/local/docker-compose.yaml | 2 +- install/local/router/health-check.sh | 3 ++- 3 files changed, 4 insertions(+), 3 deletions(-) mode change 120000 => 100755 install/local/router/health-check.sh diff --git a/install/common/router/health-check.sh b/install/common/router/health-check.sh index 4c72254b9e..7ed507ee85 100755 --- a/install/common/router/health-check.sh +++ b/install/common/router/health-check.sh @@ -1,2 +1,2 @@ #!/bin/sh -curl -If http://localhost/-/health-check +curl -If http://localhost:8000/-/health-check diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 4bfdd0700a..262a64cdeb 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -103,7 +103,7 @@ services: context: ../.. dockerfile: install/local/router/Dockerfile ports: - - "80:8000" + - "443:8000" links: - api - cache diff --git a/install/local/router/health-check.sh b/install/local/router/health-check.sh deleted file mode 120000 index 63d5f38b8a..0000000000 --- a/install/local/router/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/router/health-check.sh \ No newline at end of file diff --git a/install/local/router/health-check.sh b/install/local/router/health-check.sh new file mode 100755 index 0000000000..574c815e73 --- /dev/null +++ b/install/local/router/health-check.sh @@ -0,0 +1,2 @@ +#!/bin/sh +curl -If -k https://localhost:8000/-/health-check From 688f72f10bbf9b84208d837d3892d5e4723409c9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 02:01:27 -0400 Subject: [PATCH 244/306] Rename api -> deepwell, web -> framerail, router -> wws. Since WWS is no longer really a router, the name isn't too relevant. While we're at it, let's use service names instead of generic names to avoid this kind of thing in the future. I implement the changes to the GitHub workflow files in separate commits, since the diffs git has chosen are quite confusing and annoying. --- .../common/{api => deepwell}/health-check.sh | 0 .../common/{web => framerail}/health-check.sh | 0 .../common/{router => wws}/health-check.sh | 0 install/dev/api/health-check.sh | 1 - install/dev/{api => deepwell}/Dockerfile | 6 ++--- install/dev/{api => deepwell}/deepwell-start | 0 install/dev/{api => deepwell}/deepwell.toml | 0 install/dev/deepwell/health-check.sh | 1 + install/dev/{web => framerail}/Dockerfile | 0 install/dev/framerail/health-check.sh | 1 + install/dev/web/health-check.sh | 1 - install/dev/{router => wws}/Dockerfile | 6 ++--- install/dev/{router => wws}/health-check.sh | 0 install/local/api/health-check.sh | 1 - install/local/{api => deepwell}/Dockerfile | 6 ++--- .../local/{api => deepwell}/deepwell-start | 0 install/local/{api => deepwell}/deepwell.toml | 0 install/local/deepwell/health-check.sh | 1 + install/local/docker-compose.dev.yaml | 9 ++++--- install/local/docker-compose.yaml | 26 +++++++++---------- install/local/{web => framerail}/Dockerfile | 2 +- install/local/framerail/health-check.sh | 1 + install/local/web/health-check.sh | 1 - install/local/{router => wws}/Dockerfile | 4 +-- install/local/{router => wws}/health-check.sh | 0 install/local/{router => wws}/wws-start | 0 install/prod/api/health-check.sh | 1 - install/prod/{api => deepwell}/Dockerfile | 4 +-- install/prod/{api => deepwell}/deepwell.toml | 0 install/prod/deepwell/health-check.sh | 1 + install/prod/{web => framerail}/Dockerfile | 0 install/prod/framerail/health-check.sh | 1 + install/prod/web/health-check.sh | 1 - install/prod/{router => wws}/Dockerfile | 6 ++--- install/prod/{router => wws}/health-check.sh | 0 35 files changed, 41 insertions(+), 40 deletions(-) rename install/common/{api => deepwell}/health-check.sh (100%) rename install/common/{web => framerail}/health-check.sh (100%) rename install/common/{router => wws}/health-check.sh (100%) delete mode 120000 install/dev/api/health-check.sh rename install/dev/{api => deepwell}/Dockerfile (82%) rename install/dev/{api => deepwell}/deepwell-start (100%) rename install/dev/{api => deepwell}/deepwell.toml (100%) create mode 120000 install/dev/deepwell/health-check.sh rename install/dev/{web => framerail}/Dockerfile (100%) create mode 120000 install/dev/framerail/health-check.sh delete mode 120000 install/dev/web/health-check.sh rename install/dev/{router => wws}/Dockerfile (65%) rename install/dev/{router => wws}/health-check.sh (100%) delete mode 120000 install/local/api/health-check.sh rename install/local/{api => deepwell}/Dockerfile (72%) rename install/local/{api => deepwell}/deepwell-start (100%) rename install/local/{api => deepwell}/deepwell.toml (100%) create mode 120000 install/local/deepwell/health-check.sh rename install/local/{web => framerail}/Dockerfile (76%) create mode 120000 install/local/framerail/health-check.sh delete mode 120000 install/local/web/health-check.sh rename install/local/{router => wws}/Dockerfile (74%) rename install/local/{router => wws}/health-check.sh (100%) rename install/local/{router => wws}/wws-start (100%) delete mode 120000 install/prod/api/health-check.sh rename install/prod/{api => deepwell}/Dockerfile (83%) rename install/prod/{api => deepwell}/deepwell.toml (100%) create mode 120000 install/prod/deepwell/health-check.sh rename install/prod/{web => framerail}/Dockerfile (100%) create mode 120000 install/prod/framerail/health-check.sh delete mode 120000 install/prod/web/health-check.sh rename install/prod/{router => wws}/Dockerfile (65%) rename install/prod/{router => wws}/health-check.sh (100%) diff --git a/install/common/api/health-check.sh b/install/common/deepwell/health-check.sh similarity index 100% rename from install/common/api/health-check.sh rename to install/common/deepwell/health-check.sh diff --git a/install/common/web/health-check.sh b/install/common/framerail/health-check.sh similarity index 100% rename from install/common/web/health-check.sh rename to install/common/framerail/health-check.sh diff --git a/install/common/router/health-check.sh b/install/common/wws/health-check.sh similarity index 100% rename from install/common/router/health-check.sh rename to install/common/wws/health-check.sh diff --git a/install/dev/api/health-check.sh b/install/dev/api/health-check.sh deleted file mode 120000 index 03329b7bcb..0000000000 --- a/install/dev/api/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/api/health-check.sh \ No newline at end of file diff --git a/install/dev/api/Dockerfile b/install/dev/deepwell/Dockerfile similarity index 82% rename from install/dev/api/Dockerfile rename to install/dev/deepwell/Dockerfile index 09a36e4804..2eb6e819f2 100644 --- a/install/dev/api/Dockerfile +++ b/install/dev/deepwell/Dockerfile @@ -40,9 +40,9 @@ COPY --from=rust /usr/local/cargo/bin/sqlx /usr/local/cargo/bin/sqlx COPY --from=rust /src/deepwell/target/release/deepwell /usr/local/bin/deepwell COPY --from=rust /src/deepwell/migrations /opt/database/migrations COPY --from=rust /src/deepwell/seeder /opt/database/seeder -COPY ./install/dev/api/deepwell.toml /etc/deepwell.toml -COPY ./install/dev/api/deepwell-start /usr/local/bin/wikijump-deepwell-start -COPY ./install/dev/api/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/dev/deepwell/deepwell.toml /etc/deepwell.toml +COPY ./install/dev/deepwell/deepwell-start /usr/local/bin/wikijump-deepwell-start +COPY ./install/dev/deepwell/health-check.sh /usr/local/bin/wikijump-health-check COPY ./locales/fluent /opt/locales/fluent USER daemon diff --git a/install/dev/api/deepwell-start b/install/dev/deepwell/deepwell-start similarity index 100% rename from install/dev/api/deepwell-start rename to install/dev/deepwell/deepwell-start diff --git a/install/dev/api/deepwell.toml b/install/dev/deepwell/deepwell.toml similarity index 100% rename from install/dev/api/deepwell.toml rename to install/dev/deepwell/deepwell.toml diff --git a/install/dev/deepwell/health-check.sh b/install/dev/deepwell/health-check.sh new file mode 120000 index 0000000000..3bb99327b3 --- /dev/null +++ b/install/dev/deepwell/health-check.sh @@ -0,0 +1 @@ +../../common/deepwell/health-check.sh \ No newline at end of file diff --git a/install/dev/web/Dockerfile b/install/dev/framerail/Dockerfile similarity index 100% rename from install/dev/web/Dockerfile rename to install/dev/framerail/Dockerfile diff --git a/install/dev/framerail/health-check.sh b/install/dev/framerail/health-check.sh new file mode 120000 index 0000000000..76f70d3f73 --- /dev/null +++ b/install/dev/framerail/health-check.sh @@ -0,0 +1 @@ +../../common/framerail/health-check.sh \ No newline at end of file diff --git a/install/dev/web/health-check.sh b/install/dev/web/health-check.sh deleted file mode 120000 index 093d52486b..0000000000 --- a/install/dev/web/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/web/health-check.sh \ No newline at end of file diff --git a/install/dev/router/Dockerfile b/install/dev/wws/Dockerfile similarity index 65% rename from install/dev/router/Dockerfile rename to install/dev/wws/Dockerfile index 4716ac0f44..b875ecb0be 100644 --- a/install/dev/router/Dockerfile +++ b/install/dev/wws/Dockerfile @@ -8,8 +8,8 @@ FROM rust:latest AS rust RUN cargo install cargo-watch sqlx-cli # Install files -COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start -COPY ./install/local/router/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/local/wws/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/wws/health-check.sh /usr/local/bin/wikijump-health-check # Copy source RUN mkdir /src @@ -30,7 +30,7 @@ FROM debian:latest # Install files COPY --from=rust /src/wws/target/release/wws /usr/local/bin/wws -COPY ./install/dev/router/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/dev/wws/health-check.sh /usr/local/bin/wikijump-health-check USER daemon EXPOSE 2747 diff --git a/install/dev/router/health-check.sh b/install/dev/wws/health-check.sh similarity index 100% rename from install/dev/router/health-check.sh rename to install/dev/wws/health-check.sh diff --git a/install/local/api/health-check.sh b/install/local/api/health-check.sh deleted file mode 120000 index 03329b7bcb..0000000000 --- a/install/local/api/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/api/health-check.sh \ No newline at end of file diff --git a/install/local/api/Dockerfile b/install/local/deepwell/Dockerfile similarity index 72% rename from install/local/api/Dockerfile rename to install/local/deepwell/Dockerfile index 5253bc457b..a8ffef1a07 100644 --- a/install/local/api/Dockerfile +++ b/install/local/deepwell/Dockerfile @@ -16,9 +16,9 @@ RUN apt install -y libmagic-dev RUN cargo install cargo-watch sqlx-cli # Install files -COPY ./install/local/api/deepwell.toml /etc/deepwell.toml -COPY ./install/local/api/deepwell-start /usr/local/bin/wikijump-deepwell-start -COPY ./install/local/api/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/local/deepwell/deepwell.toml /etc/deepwell.toml +COPY ./install/local/deepwell/deepwell-start /usr/local/bin/wikijump-deepwell-start +COPY ./install/local/deepwell/health-check.sh /usr/local/bin/wikijump-health-check # /opt/locales is provided via docker-compose.dev.yaml diff --git a/install/local/api/deepwell-start b/install/local/deepwell/deepwell-start similarity index 100% rename from install/local/api/deepwell-start rename to install/local/deepwell/deepwell-start diff --git a/install/local/api/deepwell.toml b/install/local/deepwell/deepwell.toml similarity index 100% rename from install/local/api/deepwell.toml rename to install/local/deepwell/deepwell.toml diff --git a/install/local/deepwell/health-check.sh b/install/local/deepwell/health-check.sh new file mode 120000 index 0000000000..3bb99327b3 --- /dev/null +++ b/install/local/deepwell/health-check.sh @@ -0,0 +1 @@ +../../common/deepwell/health-check.sh \ No newline at end of file diff --git a/install/local/docker-compose.dev.yaml b/install/local/docker-compose.dev.yaml index ac714feaa3..a11693b1da 100644 --- a/install/local/docker-compose.dev.yaml +++ b/install/local/docker-compose.dev.yaml @@ -1,5 +1,5 @@ services: - api: + deepwell: volumes: # Rust sources - type: bind @@ -25,14 +25,15 @@ services: read_only: true # Configuration data - type: bind - source: ../../install/local/api/deepwell.toml + source: ../../install/local/deepwell/deepwell.toml target: /etc/deepwell.toml # Translation data - type: bind source: ../../locales target: /opt/locales read_only: true - web: + + framerail: volumes: # Typescript/Svelte files - type: bind @@ -64,7 +65,7 @@ services: source: ../../assets target: /app/src/assets read_only: true - router: + wws: volumes: # Rust sources - type: bind diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 262a64cdeb..63d86be183 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -48,10 +48,10 @@ services: ports: - "6379:6379" - api: + deepwell: build: context: ../.. - dockerfile: install/local/api/Dockerfile + dockerfile: install/local/deepwell/Dockerfile ports: - "2747:2747" links: @@ -81,16 +81,16 @@ services: files: condition: service_healthy - web: + framerail: build: context: ../.. - dockerfile: install/local/web/Dockerfile + dockerfile: install/local/framerail/Dockerfile ports: - "3000:3000" links: - - api + - deepwell environment: - - "DEEPWELL_HOST=api" + - "DEEPWELL_HOST=deepwell" restart: always healthcheck: test: ["CMD", "wikijump-health-check"] @@ -98,21 +98,21 @@ services: timeout: 2s retries: 3 - router: + wws: build: context: ../.. - dockerfile: install/local/router/Dockerfile + dockerfile: install/local/wws/Dockerfile ports: - "443:8000" links: - - api + - deepwell - cache - - web + - framerail environment: - "ADDRESS=[::]:8000" - - "DEEPWELL_URL=http://api:2747" + - "DEEPWELL_URL=http://deepwell:2747" - "REDIS_URL=redis://cache" - - "FRAMERAIL_HOST=web:3000" + - "FRAMERAIL_HOST=framerail:3000" - "S3_BUCKET=deepwell-files" - "S3_REGION_NAME=local" - "S3_PATH_STYLE=true" @@ -127,7 +127,7 @@ services: timeout: 2s retries: 3 depends_on: - api: + deepwell: condition: service_healthy cache: condition: service_healthy diff --git a/install/local/web/Dockerfile b/install/local/framerail/Dockerfile similarity index 76% rename from install/local/web/Dockerfile rename to install/local/framerail/Dockerfile index db3ebedb40..ccfbe90f99 100644 --- a/install/local/web/Dockerfile +++ b/install/local/framerail/Dockerfile @@ -11,7 +11,7 @@ COPY assets /app/src/assets WORKDIR /app # Install files -COPY ./install/local/web/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/local/framerail/health-check.sh /usr/local/bin/wikijump-health-check # Install node dependencies RUN pnpm install diff --git a/install/local/framerail/health-check.sh b/install/local/framerail/health-check.sh new file mode 120000 index 0000000000..76f70d3f73 --- /dev/null +++ b/install/local/framerail/health-check.sh @@ -0,0 +1 @@ +../../common/framerail/health-check.sh \ No newline at end of file diff --git a/install/local/web/health-check.sh b/install/local/web/health-check.sh deleted file mode 120000 index 093d52486b..0000000000 --- a/install/local/web/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/web/health-check.sh \ No newline at end of file diff --git a/install/local/router/Dockerfile b/install/local/wws/Dockerfile similarity index 74% rename from install/local/router/Dockerfile rename to install/local/wws/Dockerfile index 839ed0cb7f..bbf97cac42 100644 --- a/install/local/router/Dockerfile +++ b/install/local/wws/Dockerfile @@ -12,8 +12,8 @@ FROM rust:latest AS rust RUN cargo install cargo-watch sqlx-cli # Install files -COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start -COPY ./install/local/router/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/local/wws/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/wws/health-check.sh /usr/local/bin/wikijump-health-check # Copy source # Don't build until container execution (see cargo-watch) diff --git a/install/local/router/health-check.sh b/install/local/wws/health-check.sh similarity index 100% rename from install/local/router/health-check.sh rename to install/local/wws/health-check.sh diff --git a/install/local/router/wws-start b/install/local/wws/wws-start similarity index 100% rename from install/local/router/wws-start rename to install/local/wws/wws-start diff --git a/install/prod/api/health-check.sh b/install/prod/api/health-check.sh deleted file mode 120000 index 03329b7bcb..0000000000 --- a/install/prod/api/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/api/health-check.sh \ No newline at end of file diff --git a/install/prod/api/Dockerfile b/install/prod/deepwell/Dockerfile similarity index 83% rename from install/prod/api/Dockerfile rename to install/prod/deepwell/Dockerfile index def16949f4..da4651aa57 100644 --- a/install/prod/api/Dockerfile +++ b/install/prod/deepwell/Dockerfile @@ -30,8 +30,8 @@ ENV LOCALIZATION_PATH="/opt/locales" RUN apt update RUN apt install -y curl libmagic1 libmagic-mgc COPY --from=rust /src/deepwell/target/release/deepwell /usr/local/bin/deepwell -COPY ./install/prod/api/health-check.sh /bin/wikijump-health-check -COPY ./install/prod/api/deepwell.toml /etc/deepwell.toml +COPY ./install/prod/deepwell/health-check.sh /bin/wikijump-health-check +COPY ./install/prod/deepwell/deepwell.toml /etc/deepwell.toml COPY ./locales/fluent /opt/locales/fluent USER daemon diff --git a/install/prod/api/deepwell.toml b/install/prod/deepwell/deepwell.toml similarity index 100% rename from install/prod/api/deepwell.toml rename to install/prod/deepwell/deepwell.toml diff --git a/install/prod/deepwell/health-check.sh b/install/prod/deepwell/health-check.sh new file mode 120000 index 0000000000..3bb99327b3 --- /dev/null +++ b/install/prod/deepwell/health-check.sh @@ -0,0 +1 @@ +../../common/deepwell/health-check.sh \ No newline at end of file diff --git a/install/prod/web/Dockerfile b/install/prod/framerail/Dockerfile similarity index 100% rename from install/prod/web/Dockerfile rename to install/prod/framerail/Dockerfile diff --git a/install/prod/framerail/health-check.sh b/install/prod/framerail/health-check.sh new file mode 120000 index 0000000000..76f70d3f73 --- /dev/null +++ b/install/prod/framerail/health-check.sh @@ -0,0 +1 @@ +../../common/framerail/health-check.sh \ No newline at end of file diff --git a/install/prod/web/health-check.sh b/install/prod/web/health-check.sh deleted file mode 120000 index 093d52486b..0000000000 --- a/install/prod/web/health-check.sh +++ /dev/null @@ -1 +0,0 @@ -../../common/web/health-check.sh \ No newline at end of file diff --git a/install/prod/router/Dockerfile b/install/prod/wws/Dockerfile similarity index 65% rename from install/prod/router/Dockerfile rename to install/prod/wws/Dockerfile index 0cb8c2a2e4..009e9a8dbd 100644 --- a/install/prod/router/Dockerfile +++ b/install/prod/wws/Dockerfile @@ -8,8 +8,8 @@ FROM rust:latest AS rust RUN cargo install cargo-watch sqlx-cli # Install files -COPY ./install/local/router/wws-start /usr/local/bin/wikijump-wws-start -COPY ./install/local/router/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/local/wws/wws-start /usr/local/bin/wikijump-wws-start +COPY ./install/local/wws/health-check.sh /usr/local/bin/wikijump-health-check # Copy source RUN mkdir /src @@ -30,7 +30,7 @@ FROM debian:latest # Install files COPY --from=rust /src/wws/target/release/wws /usr/local/bin/wws -COPY ./install/prod/router/health-check.sh /usr/local/bin/wikijump-health-check +COPY ./install/prod/wws/health-check.sh /usr/local/bin/wikijump-health-check USER daemon EXPOSE 2747 diff --git a/install/prod/router/health-check.sh b/install/prod/wws/health-check.sh similarity index 100% rename from install/prod/router/health-check.sh rename to install/prod/wws/health-check.sh From 81743bac456e69d1a1d466b9696d318bbeb4051b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:27:17 -0400 Subject: [PATCH 245/306] Update GitHub workflow for local deepwell. --- ...ld-api.local.yaml => docker-build-deepwell.local.yaml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{docker-build-api.local.yaml => docker-build-deepwell.local.yaml} (51%) diff --git a/.github/workflows/docker-build-api.local.yaml b/.github/workflows/docker-build-deepwell.local.yaml similarity index 51% rename from .github/workflows/docker-build-api.local.yaml rename to .github/workflows/docker-build-deepwell.local.yaml index 5afb5472ef..074cac3107 100644 --- a/.github/workflows/docker-build-api.local.yaml +++ b/.github/workflows/docker-build-deepwell.local.yaml @@ -1,11 +1,11 @@ -name: '[backend] Docker build API (local)' +name: '[backend] Docker build DEEPWELL (local)' on: pull_request: paths: - 'deepwell/**' - - 'install/local/api/Dockerfile' - - '.github/workflows/docker-build-api.local.yaml' + - 'install/local/deepwell/Dockerfile' + - '.github/workflows/docker-build-deepwell.local.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/local/api/Dockerfile . + run: docker build -f install/local/deepwell/Dockerfile . env: DOCKER_BUILDKIT: 1 From 0bd18d5acf3d864519acc5cc0fff3101f5e10701 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:27:55 -0400 Subject: [PATCH 246/306] Update GitHub workflow for local framerail. --- ...ild-web.local.yaml => docker-build-framerail.local.yaml} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{docker-build-web.local.yaml => docker-build-framerail.local.yaml} (62%) diff --git a/.github/workflows/docker-build-web.local.yaml b/.github/workflows/docker-build-framerail.local.yaml similarity index 62% rename from .github/workflows/docker-build-web.local.yaml rename to .github/workflows/docker-build-framerail.local.yaml index 898e3314bc..2e1a07e07e 100644 --- a/.github/workflows/docker-build-web.local.yaml +++ b/.github/workflows/docker-build-framerail.local.yaml @@ -4,8 +4,8 @@ on: pull_request: paths: - 'framerail/**' - - 'install/local/web/Dockerfile' - - '.github/workflows/docker-build-web.local.yaml' + - 'install/local/framerail/Dockerfile' + - '.github/workflows/docker-build-framerail.local.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/local/web/Dockerfile . + run: docker build -f install/local/framerail/Dockerfile . env: DOCKER_BUILDKIT: 1 From 8c816c3eaf87a7439b94ee0833a78cb067c4728f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:28:19 -0400 Subject: [PATCH 247/306] Update GitHub workflow for local wws. --- ...uild-router.local.yaml => docker-build-wws.local.yaml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{docker-build-router.local.yaml => docker-build-wws.local.yaml} (52%) diff --git a/.github/workflows/docker-build-router.local.yaml b/.github/workflows/docker-build-wws.local.yaml similarity index 52% rename from .github/workflows/docker-build-router.local.yaml rename to .github/workflows/docker-build-wws.local.yaml index f1100ecdbb..c61149a1a7 100644 --- a/.github/workflows/docker-build-router.local.yaml +++ b/.github/workflows/docker-build-wws.local.yaml @@ -1,11 +1,11 @@ -name: '[backend] Docker build router (local)' +name: '[backend] Docker build WWS (local)' on: pull_request: paths: - 'deepwell/**' - - 'install/local/router/Dockerfile' - - '.github/workflows/docker-build-router.local.yaml' + - 'install/local/wws/Dockerfile' + - '.github/workflows/docker-build-wws.local.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/local/router/Dockerfile . + run: docker build -f install/local/wws/Dockerfile . env: DOCKER_BUILDKIT: 1 From 8a3e5eac6c07f7085e012467c78a5fd356f5277b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:30:44 -0400 Subject: [PATCH 248/306] Update GitHub workflow for dev deepwell. --- ...-build-api.dev.yaml => docker-build-deepwell.dev.yaml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{docker-build-api.dev.yaml => docker-build-deepwell.dev.yaml} (52%) diff --git a/.github/workflows/docker-build-api.dev.yaml b/.github/workflows/docker-build-deepwell.dev.yaml similarity index 52% rename from .github/workflows/docker-build-api.dev.yaml rename to .github/workflows/docker-build-deepwell.dev.yaml index 5c8e268ebd..4503c09d30 100644 --- a/.github/workflows/docker-build-api.dev.yaml +++ b/.github/workflows/docker-build-deepwell.dev.yaml @@ -1,11 +1,11 @@ -name: '[backend] Docker build API (dev)' +name: '[backend] Docker build DEEWELL (dev)' on: pull_request: paths: - 'deepwell/**' - - 'install/dev/api/Dockerfile' - - '.github/workflows/docker-build-api.dev.yaml' + - 'install/dev/deepwell/Dockerfile' + - '.github/workflows/docker-build-deepwell.dev.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/dev/api/Dockerfile . + run: docker build -f install/dev/deepwell/Dockerfile . env: DOCKER_BUILDKIT: 1 From dd249d5e7d72e9d3048dd571fc4edc0ad92d7c6c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:31:35 -0400 Subject: [PATCH 249/306] Update GitHub workflow for dev framerail. --- ...r-build-web.dev.yaml => docker-build-framerail.dev.yaml} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{docker-build-web.dev.yaml => docker-build-framerail.dev.yaml} (62%) diff --git a/.github/workflows/docker-build-web.dev.yaml b/.github/workflows/docker-build-framerail.dev.yaml similarity index 62% rename from .github/workflows/docker-build-web.dev.yaml rename to .github/workflows/docker-build-framerail.dev.yaml index 02e10ac9e3..376f8d4756 100644 --- a/.github/workflows/docker-build-web.dev.yaml +++ b/.github/workflows/docker-build-framerail.dev.yaml @@ -4,8 +4,8 @@ on: pull_request: paths: - 'framerail/**' - - 'install/dev/web/Dockerfile' - - '.github/workflows/docker-build-web.dev.yaml' + - 'install/dev/framerail/Dockerfile' + - '.github/workflows/docker-build-framerail.dev.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/dev/web/Dockerfile . + run: docker build -f install/dev/framerail/Dockerfile . env: DOCKER_BUILDKIT: 1 From 29aa7c7191988b57fc4232912833c0a85c6af56b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:32:30 -0400 Subject: [PATCH 250/306] Update GitHub workflow for dev wws. --- ...er-build-router.dev.yaml => docker-build-wws.dev.yaml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{docker-build-router.dev.yaml => docker-build-wws.dev.yaml} (53%) diff --git a/.github/workflows/docker-build-router.dev.yaml b/.github/workflows/docker-build-wws.dev.yaml similarity index 53% rename from .github/workflows/docker-build-router.dev.yaml rename to .github/workflows/docker-build-wws.dev.yaml index dea772f0ff..c0101febc5 100644 --- a/.github/workflows/docker-build-router.dev.yaml +++ b/.github/workflows/docker-build-wws.dev.yaml @@ -1,11 +1,11 @@ -name: '[backend] Docker build router (dev)' +name: '[backend] Docker build WWS (dev)' on: pull_request: paths: - 'deepwell/**' - - 'install/dev/router/Dockerfile' - - '.github/workflows/docker-build-router.dev.yaml' + - 'install/dev/wws/Dockerfile' + - '.github/workflows/docker-build-wws.dev.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/dev/router/Dockerfile . + run: docker build -f install/dev/wws/Dockerfile . env: DOCKER_BUILDKIT: 1 From 5b5bf774773bb1336a80bb5d62d87d3129e93fed Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:33:07 -0400 Subject: [PATCH 251/306] Update GitHub workflow for prod deepwell. --- ...uild-api.prod.yaml => docker-build-deepwell.prod.yaml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{docker-build-api.prod.yaml => docker-build-deepwell.prod.yaml} (52%) diff --git a/.github/workflows/docker-build-api.prod.yaml b/.github/workflows/docker-build-deepwell.prod.yaml similarity index 52% rename from .github/workflows/docker-build-api.prod.yaml rename to .github/workflows/docker-build-deepwell.prod.yaml index b9a7334522..c2d9fd0534 100644 --- a/.github/workflows/docker-build-api.prod.yaml +++ b/.github/workflows/docker-build-deepwell.prod.yaml @@ -1,11 +1,11 @@ -name: '[backend] Docker build API (prod)' +name: '[backend] Docker build DEEPWELL (prod)' on: pull_request: paths: - 'deepwell/**' - - 'install/prod/api/Dockerfile' - - '.github/workflows/docker-build-api.prod.yaml' + - 'install/prod/deepwell/Dockerfile' + - '.github/workflows/docker-build-deepwell.prod.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/prod/api/Dockerfile . + run: docker build -f install/prod/deepwell/Dockerfile . env: DOCKER_BUILDKIT: 1 From 57eb0f51caf1fd1e2a35917f931d001e4014f35f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:34:33 -0400 Subject: [PATCH 252/306] Update GitHub workflow for prod framerail. --- ...build-web.prod.yaml => docker-build-framerail.prod.yaml} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename .github/workflows/{docker-build-web.prod.yaml => docker-build-framerail.prod.yaml} (62%) diff --git a/.github/workflows/docker-build-web.prod.yaml b/.github/workflows/docker-build-framerail.prod.yaml similarity index 62% rename from .github/workflows/docker-build-web.prod.yaml rename to .github/workflows/docker-build-framerail.prod.yaml index 3c8c3b6c11..8ea5e16bcd 100644 --- a/.github/workflows/docker-build-web.prod.yaml +++ b/.github/workflows/docker-build-framerail.prod.yaml @@ -4,8 +4,8 @@ on: pull_request: paths: - 'framerail/**' - - 'install/prod/web/Dockerfile' - - '.github/workflows/docker-build-web.prod.yaml' + - 'install/prod/framerail/Dockerfile' + - '.github/workflows/docker-build-framerail.prod.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/prod/web/Dockerfile . + run: docker build -f install/prod/framerail/Dockerfile . env: DOCKER_BUILDKIT: 1 From 92f1d222fcede281041c6437d41d0dc6c12f6038 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:34:50 -0400 Subject: [PATCH 253/306] Update GitHub workflow for prod wws. --- ...-build-router.prod.yaml => docker-build-wws.prod.yaml} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename .github/workflows/{docker-build-router.prod.yaml => docker-build-wws.prod.yaml} (53%) diff --git a/.github/workflows/docker-build-router.prod.yaml b/.github/workflows/docker-build-wws.prod.yaml similarity index 53% rename from .github/workflows/docker-build-router.prod.yaml rename to .github/workflows/docker-build-wws.prod.yaml index d8bc7605e7..9d984feb6a 100644 --- a/.github/workflows/docker-build-router.prod.yaml +++ b/.github/workflows/docker-build-wws.prod.yaml @@ -1,11 +1,11 @@ -name: '[backend] Docker build router (prod)' +name: '[backend] Docker build WWS (prod)' on: pull_request: paths: - 'deepwell/**' - - 'install/prod/router/Dockerfile' - - '.github/workflows/docker-build-router.prod.yaml' + - 'install/prod/wws/Dockerfile' + - '.github/workflows/docker-build-wws.prod.yaml' jobs: build: @@ -15,6 +15,6 @@ jobs: uses: actions/checkout@v2 - name: Build image - run: docker build -f install/prod/router/Dockerfile . + run: docker build -f install/prod/wws/Dockerfile . env: DOCKER_BUILDKIT: 1 From 662ebed8b5a11cb7907260283db994c28d261f4b Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 02:07:05 -0400 Subject: [PATCH 254/306] Change default port for wws. --- install/local/docker-compose.yaml | 4 ++-- wws/.env.example | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index 63d86be183..c4542bf003 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -103,13 +103,13 @@ services: context: ../.. dockerfile: install/local/wws/Dockerfile ports: - - "443:8000" + - "7000:7000" links: - deepwell - cache - framerail environment: - - "ADDRESS=[::]:8000" + - "ADDRESS=[::]:7000" - "DEEPWELL_URL=http://deepwell:2747" - "REDIS_URL=redis://cache" - "FRAMERAIL_HOST=framerail:3000" diff --git a/wws/.env.example b/wws/.env.example index 8dafccd0ca..36e1e3daa3 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -3,6 +3,9 @@ # # If you're using docker-compose, these are already set in the container as appropriate. +# The location to host this server at. +ADDRESS=[::]:7000 + # DEEPWELL URL DEEPWELL_URL=http://localhost:2747 From cd8385304b17af380bc378c02e8bcb3421481653 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 02:19:28 -0400 Subject: [PATCH 255/306] Add initial Caddyfile. --- install/local/caddy/Caddyfile | 109 +++++++++++++++++++++++++++++++++ install/local/caddy/Dockerfile | 3 + 2 files changed, 112 insertions(+) create mode 100644 install/local/caddy/Caddyfile create mode 100644 install/local/caddy/Dockerfile diff --git a/install/local/caddy/Caddyfile b/install/local/caddy/Caddyfile new file mode 100644 index 0000000000..dbb9afd28d --- /dev/null +++ b/install/local/caddy/Caddyfile @@ -0,0 +1,109 @@ +# Global options +{ + debug + + http_port 8000 + https_port 8443 + + metrics { + per_host + } + + skip_install_trust +} + +# +# MAIN +# + +(serve_main) { + reverse_proxy http://localhost:3000 +} + +# Preferred domain +scpwiki.localhost { + request_header X-Wikijump-Site-Id 4 + request_header X-Wikijump-Site-Slug scp-wiki + + import serve_main +} + +test.wikijump.localhost { + request_header X-Wikijump-Site-Id 2 + request_header X-Wikijump-Site-Slug test + + import serve_main +} + +# Alternate domain (canonical) +scp-wiki.wikijump.localhost { + redir https://scpwiki.localhost{uri} +} + +# Alternate domain (alias) +scpwiki.wikijump.localhost { + redir https://scpwiki.localhost{uri} +} + +# Alternate domain (custom) +testdomain.localhost { + redir https://test.wikijump.localhost{uri} +} + +# Other sites +wikijump.localhost { + request_header X-Wikijump-Site-Id 1 + request_header X-Wikijump-Site-Slug www + + import serve_main +} + +template-en.wikijump.localhost { + request_header X-Wikijump-Site-Id 3 + request_header X-Wikijump-Site-Slug template-en + + import serve_main +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://localhost:7000 +} + +*.wjfiles.localhost { + request_header X-Wikijump-Site-Slug {labels.2} + + @www host www.wjfiles.localhost + request_header @www X-Wikijump-Site-Id 1 + + @test host test.wjfiles.localhost + request_header @test X-Wikijump-Site-Id 2 + + @template-en host test.wjfiles.localhost + request_header @template-en X-Wikijump-Site-Id 3 + + @scp-wiki host scp-wiki.wjfiles.localhost + request_header @scp-wiki X-Wikijump-Site-Id 4 + + import serve_files +} + +# +# OTHER +# + +# Remove www +www.wikijump.localhost, +www.scpwiki.localhost { + redir {labels.1}.{labels.0}{uri} +} + +# Fallback route +http://, +https://, +localhost { + respond "fallback" +} diff --git a/install/local/caddy/Dockerfile b/install/local/caddy/Dockerfile new file mode 100644 index 0000000000..aace7a6f93 --- /dev/null +++ b/install/local/caddy/Dockerfile @@ -0,0 +1,3 @@ +FROM caddy:alpine + +COPY ./install/local/caddy/Caddyfile /etc/caddy/Caddyfile From fa14ba8d9b84b31c20284bccda9766090e64718f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 02:20:35 -0400 Subject: [PATCH 256/306] Add newlines in docker-compose.dev.yaml --- install/local/docker-compose.dev.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/install/local/docker-compose.dev.yaml b/install/local/docker-compose.dev.yaml index a11693b1da..99010bfd45 100644 --- a/install/local/docker-compose.dev.yaml +++ b/install/local/docker-compose.dev.yaml @@ -65,6 +65,7 @@ services: source: ../../assets target: /app/src/assets read_only: true + wws: volumes: # Rust sources From d6a2df1f472ef2139cdf9ef3f7aef8ab947103cf Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:46:21 -0400 Subject: [PATCH 257/306] Enhance initial Caddyfile. --- install/local/caddy/Caddyfile | 67 +++++++++++++++++++++++++++-------- 1 file changed, 53 insertions(+), 14 deletions(-) diff --git a/install/local/caddy/Caddyfile b/install/local/caddy/Caddyfile index dbb9afd28d..3029dd16d3 100644 --- a/install/local/caddy/Caddyfile +++ b/install/local/caddy/Caddyfile @@ -17,20 +17,48 @@ # (serve_main) { + # Routes that exist on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + + } + redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + reverse_proxy http://localhost:3000 } # Preferred domain scpwiki.localhost { - request_header X-Wikijump-Site-Id 4 - request_header X-Wikijump-Site-Slug scp-wiki + vars { + site_id 4 + site_slug scp-wiki + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} import serve_main } test.wikijump.localhost { - request_header X-Wikijump-Site-Id 2 - request_header X-Wikijump-Site-Slug test + vars { + site_id 2 + site_slug test + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} import serve_main } @@ -52,15 +80,25 @@ testdomain.localhost { # Other sites wikijump.localhost { - request_header X-Wikijump-Site-Id 1 - request_header X-Wikijump-Site-Slug www + vars { + site_id 1 + site_slug www + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} import serve_main } template-en.wikijump.localhost { - request_header X-Wikijump-Site-Id 3 - request_header X-Wikijump-Site-Slug template-en + vars { + site_id 3 + site_slug template-en + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} import serve_main } @@ -74,19 +112,20 @@ template-en.wikijump.localhost { } *.wjfiles.localhost { - request_header X-Wikijump-Site-Slug {labels.2} - @www host www.wjfiles.localhost - request_header @www X-Wikijump-Site-Id 1 + vars @www site_id 1 @test host test.wjfiles.localhost - request_header @test X-Wikijump-Site-Id 2 + vars @test site_id 2 @template-en host test.wjfiles.localhost - request_header @template-en X-Wikijump-Site-Id 3 + vars @template-en site_id 3 @scp-wiki host scp-wiki.wjfiles.localhost - request_header @scp-wiki X-Wikijump-Site-Id 4 + vars @scp-wiki site_id 4 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} import serve_files } From 88e9871e36ab8cf0dc685ce60d0e1f8552f16bb9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 03:53:15 -0400 Subject: [PATCH 258/306] Run caddy fmt --overwrite. --- install/local/caddy/Caddyfile | 141 +++++++++++++++++----------------- 1 file changed, 70 insertions(+), 71 deletions(-) diff --git a/install/local/caddy/Caddyfile b/install/local/caddy/Caddyfile index 3029dd16d3..986e1f4f0b 100644 --- a/install/local/caddy/Caddyfile +++ b/install/local/caddy/Caddyfile @@ -1,15 +1,15 @@ # Global options { - debug + debug - http_port 8000 - https_port 8443 + http_port 8000 + https_port 8443 - metrics { - per_host - } + metrics { + per_host + } - skip_install_trust + skip_install_trust } # @@ -17,90 +17,89 @@ # (serve_main) { - # Routes that exist on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - - } - redir @files https://{vars.site_slug}.wjfiles.localhost{uri} - - reverse_proxy http://localhost:3000 + # Routes that exist on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + + reverse_proxy http://localhost:3000 } # Preferred domain scpwiki.localhost { - vars { - site_id 4 - site_slug scp-wiki - } + vars { + site_id 4 + site_slug scp-wiki + } - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} - import serve_main + import serve_main } test.wikijump.localhost { - vars { - site_id 2 - site_slug test - } + vars { + site_id 2 + site_slug test + } - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} - import serve_main + import serve_main } # Alternate domain (canonical) scp-wiki.wikijump.localhost { - redir https://scpwiki.localhost{uri} + redir https://scpwiki.localhost{uri} } # Alternate domain (alias) scpwiki.wikijump.localhost { - redir https://scpwiki.localhost{uri} + redir https://scpwiki.localhost{uri} } # Alternate domain (custom) testdomain.localhost { - redir https://test.wikijump.localhost{uri} + redir https://test.wikijump.localhost{uri} } # Other sites wikijump.localhost { - vars { - site_id 1 - site_slug www - } + vars { + site_id 1 + site_slug www + } - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} - import serve_main + import serve_main } template-en.wikijump.localhost { - vars { - site_id 3 - site_slug template-en - } + vars { + site_id 3 + site_slug template-en + } - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} - import serve_main + import serve_main } # @@ -108,26 +107,26 @@ template-en.wikijump.localhost { # (serve_files) { - reverse_proxy http://localhost:7000 + reverse_proxy http://localhost:7000 } *.wjfiles.localhost { - @www host www.wjfiles.localhost - vars @www site_id 1 + @www host www.wjfiles.localhost + vars @www site_id 1 - @test host test.wjfiles.localhost - vars @test site_id 2 + @test host test.wjfiles.localhost + vars @test site_id 2 - @template-en host test.wjfiles.localhost - vars @template-en site_id 3 + @template-en host test.wjfiles.localhost + vars @template-en site_id 3 - @scp-wiki host scp-wiki.wjfiles.localhost - vars @scp-wiki site_id 4 + @scp-wiki host scp-wiki.wjfiles.localhost + vars @scp-wiki site_id 4 - request_header X-Wikijump-Site-Slug {labels.2} - request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} - import serve_files + import serve_files } # @@ -137,12 +136,12 @@ template-en.wikijump.localhost { # Remove www www.wikijump.localhost, www.scpwiki.localhost { - redir {labels.1}.{labels.0}{uri} + redir {labels.1}.{labels.0}{uri} } # Fallback route http://, https://, localhost { - respond "fallback" + respond "fallback" } From 7a7192f5f7bd16463e9f0134a17a78737cd141ec Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 04:08:25 -0400 Subject: [PATCH 259/306] Add handler for missing sites. --- install/local/caddy/Caddyfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/install/local/caddy/Caddyfile b/install/local/caddy/Caddyfile index 986e1f4f0b..01028c36dd 100644 --- a/install/local/caddy/Caddyfile +++ b/install/local/caddy/Caddyfile @@ -143,5 +143,6 @@ www.scpwiki.localhost { http://, https://, localhost { - respond "fallback" + rewrite * /-/special-error + reverse_proxy http://localhost:3000 } From 4265c02014c71adb66770f59cce2ad15bfe0059d Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 22:24:23 -0400 Subject: [PATCH 260/306] Update Caddyfile template. --- install/local/caddy/Caddyfile | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/install/local/caddy/Caddyfile b/install/local/caddy/Caddyfile index 01028c36dd..3ecb0d9fda 100644 --- a/install/local/caddy/Caddyfile +++ b/install/local/caddy/Caddyfile @@ -1,13 +1,13 @@ # Global options { - debug + metrics { + per_host + } http_port 8000 https_port 8443 - metrics { - per_host - } + debug skip_install_trust } @@ -21,7 +21,7 @@ @files { path /*/code/* path /*/html/* - path /*/file/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes path /*/download/* path /local--files/* path /local--code/* @@ -143,6 +143,7 @@ www.scpwiki.localhost { http://, https://, localhost { - rewrite * /-/special-error + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site reverse_proxy http://localhost:3000 } From f56ee8446d9bfe32733a65f698371c6a5a0889ac Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 22:24:38 -0400 Subject: [PATCH 261/306] Add initial Caddyfile generation implementation. --- deepwell/src/api.rs | 7 +- deepwell/src/config/object.rs | 1 - deepwell/src/endpoints/mod.rs | 1 + deepwell/src/endpoints/routing.rs | 157 ++++++++++++++++++++++++++++++ 4 files changed, 163 insertions(+), 3 deletions(-) create mode 100644 deepwell/src/endpoints/routing.rs diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 4abe3edbf8..75d54111fc 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -30,8 +30,8 @@ use crate::config::{Config, Secrets}; use crate::endpoints::{ auth::*, blob::*, category::*, domain::*, email::*, file::*, file_revision::*, info::*, link::*, locale::*, message::*, misc::*, page::*, page_revision::*, - parent::*, site::*, site_member::*, special_error::*, text::*, user::*, user_bot::*, - view::*, vote::*, + parent::*, routing::*, site::*, site_member::*, special_error::*, text::*, user::*, + user_bot::*, view::*, vote::*, }; use crate::locales::Localizations; use crate::services::blob::MimeAnalyzer; @@ -190,6 +190,9 @@ async fn build_module(app_state: ServerState) -> anyhow::Result<RpcModule<Server register!("locale", locale_info); register!("translate", translate_strings); + // Web routing + register!("caddyfile", generate_caddyfile); + // Web server register!("page_view", page_view); register!("user_view", user_view); diff --git a/deepwell/src/config/object.rs b/deepwell/src/config/object.rs index a8939089d7..417b81e6d0 100644 --- a/deepwell/src/config/object.rs +++ b/deepwell/src/config/object.rs @@ -65,7 +65,6 @@ pub struct Config { /// The files domain to serve user-generated content from. /// /// Always starts with a `.` - #[allow(dead_code)] // TEMP pub files_domain: String, /// The files domain, but without a leading `.` diff --git a/deepwell/src/endpoints/mod.rs b/deepwell/src/endpoints/mod.rs index 0fe77ee569..3e70ba1e2a 100644 --- a/deepwell/src/endpoints/mod.rs +++ b/deepwell/src/endpoints/mod.rs @@ -57,6 +57,7 @@ pub mod misc; pub mod page; pub mod page_revision; pub mod parent; +pub mod routing; pub mod site; pub mod site_member; pub mod special_error; diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs new file mode 100644 index 0000000000..d0ec610690 --- /dev/null +++ b/deepwell/src/endpoints/routing.rs @@ -0,0 +1,157 @@ +/* + * endpoints/routing.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +use super::prelude::*; + +#[derive(Deserialize, Debug)] +struct CaddyfileOptions { + #[serde(default)] + debug: bool, + + #[serde(default)] + local: bool, + + #[serde(default)] + http_port: Option<i64>, + + #[serde(default)] + https_port: Option<i64>, + + // Infra information + framerail_host: String, +} + +pub async fn generate_caddyfile( + ctx: &ServiceContext<'_>, + params: Params<'static>, +) -> Result<String> { + // TODO split into pure function + info!("Generating Caddyfile for current sites"); + + let CaddyfileOptions { + debug, + local, + http_port, + https_port, + framerail_host, + } = params.parse()?; + + let config = ctx.config(); + let main_domain = &config.main_domain; + let files_domain = &config.files_domain; + + let mut caddyfile = str!( + "\ +# Globals option +{ + metrics { + per_host + } +" + ); + + if let Some(port) = http_port { + str_writeln!(&mut caddyfile, "\thttp_port {port}"); + } + + if let Some(port) = https_port { + str_writeln!(&mut caddyfile, "\thttps_port {port}"); + } + + if debug { + str_writeln!(&mut caddyfile, "\tdebug"); + } + + if local { + str_writeln!(&mut caddyfile, "\tskip_install_trust"); + } + + str_writeln!( + &mut caddyfile, + "\ +}} + +# +# MAIN +# + +(serve_main) {{ + # Redirect, route is on the files server + @files {{ + path /*/code/* + path /*/html/* + path /*/file/* # for the /{{slug}}/file/{{filename}} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + }} + redir @files https://{{vars.site_slug}}{files_domain}{{uri}} + + reverse_proxy http://localhost:3000 +}} +" + ); + + // TODO generate main site sections + + str_writeln!( + &mut caddyfile, + " +# +# FILES +# + +(serve_files) {{ + reverse_proxy http://{framerail_host} +}} + +*{files_domain} {{ +" + ); + + // TODO generate *.wjfiles.com interior + + str_writeln!( + &mut caddyfile, + " +# +# FALLBACK +# + +{} {{ + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://localhost:3000 +}}", + if local { + "http://,\nhttps://,\nlocalhost" + } else { + "http://,\nhttps://" + } + ); + + Ok(caddyfile) +} From 7eec5e4206dbc14da35eabcb0d2198c6cae1e6d5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 23:00:07 -0400 Subject: [PATCH 262/306] Add CHECK constraint for www preferred domain. --- deepwell/migrations/20220906103252_deepwell.sql | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index f4629e15b6..c29230070f 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -92,6 +92,13 @@ CREATE TABLE site ( custom_domain TEXT, -- Dependency cycle, add foreign key constraint after layout TEXT, -- Default page layout for the site + -- Special condition + -- The preferred site for the special 'www' site (the main page) must always be the + -- canonical domain. That is, if the main domain is "wikijump.com", then the + -- preferred site is "wikijump.com" (since the "www" is elided as a special case). + CHECK (slug != 'www' OR custom_domain IS NULL), + + -- Enforce site slug uniqueness UNIQUE (slug, deleted_at) ); From 9539745a1db0dffc50451e9d11d0ce3112503070 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 23:00:20 -0400 Subject: [PATCH 263/306] Add comment explaining the custom_domain column. --- deepwell/migrations/20220906103252_deepwell.sql | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index c29230070f..e0f6a61a82 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -89,7 +89,22 @@ CREATE TABLE site ( description TEXT NOT NULL, locale TEXT NOT NULL, default_page TEXT NOT NULL DEFAULT 'start', - custom_domain TEXT, -- Dependency cycle, add foreign key constraint after + + -- Dependency cycle, add foreign key constraint after. + -- + -- This field describes what the preferred domain is for this site. + -- + -- Say we have a site with the slug 'foo' and the main domain is 'wikijump.dev'. + -- Therefore, the canonical domain for this site is 'foo.wikijump.dev'. + -- + -- What is the preferred domain? It depends on the value of this column. + -- * NULL - This means the canonical domain is also the preferred domain. + -- * 'example.com' - This means that the custom domain 'example.com' is preferred. + -- + -- Observe that a site may have many custom domains, and this is unrelated to what + -- its preferred domain is. Of course, if the custom_domain column is not NULL, + -- then it must be one of these site domains, it cannot belong to another site. + custom_domain TEXT, layout TEXT, -- Default page layout for the site -- Special condition From 6ac9171fa180638fde82c6198499368fe9433ce5 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 23:12:37 -0400 Subject: [PATCH 264/306] Substitute both reverse proxy hosts. --- deepwell/src/endpoints/routing.rs | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index d0ec610690..ef1fa4f990 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -36,6 +36,7 @@ struct CaddyfileOptions { // Infra information framerail_host: String, + wws_host: String, } pub async fn generate_caddyfile( @@ -51,6 +52,7 @@ pub async fn generate_caddyfile( http_port, https_port, framerail_host, + wws_host, } = params.parse()?; let config = ctx.config(); @@ -110,7 +112,7 @@ pub async fn generate_caddyfile( }} redir @files https://{{vars.site_slug}}{files_domain}{{uri}} - reverse_proxy http://localhost:3000 + reverse_proxy http://{framerail_host} }} " ); @@ -125,7 +127,7 @@ pub async fn generate_caddyfile( # (serve_files) {{ - reverse_proxy http://{framerail_host} + reverse_proxy http://{wws_host} }} *{files_domain} {{ @@ -144,7 +146,7 @@ pub async fn generate_caddyfile( {} {{ request_header X-Wikijump-Special-Error 1 rewrite * /-/special-error/missing-site - reverse_proxy http://localhost:3000 + reverse_proxy http://{framerail_host} }}", if local { "http://,\nhttps://,\nlocalhost" From 1ba3a9b371bc01c65a2b8f1fbaa7d2cd51bc7da8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Wed, 26 Mar 2025 23:57:17 -0400 Subject: [PATCH 265/306] Separate Caddyfile generation to pure function. --- deepwell/src/api.rs | 2 +- deepwell/src/endpoints/routing.rs | 78 +++++++++++++++++++++++++------ 2 files changed, 65 insertions(+), 15 deletions(-) diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index 75d54111fc..adcba83f3c 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -191,7 +191,7 @@ async fn build_module(app_state: ServerState) -> anyhow::Result<RpcModule<Server register!("translate", translate_strings); // Web routing - register!("caddyfile", generate_caddyfile); + register!("caddyfile", caddyfile_endpoint); // Web server register!("page_view", page_view); diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index ef1fa4f990..5f48b1314b 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -19,43 +19,93 @@ */ use super::prelude::*; +use crate::config::Config; +use crate::models::alias::Model as AliasModel; +use crate::models::sea_orm_active_enums::AliasType; +use crate::models::site::{self, Entity as Site}; +use crate::models::site_domain::Model as SiteDomainModel; +use sea_orm::{EntityTrait, QuerySelect}; +use std::collections::HashMap; #[derive(Deserialize, Debug)] -struct CaddyfileOptions { +pub struct CaddyfileOptions { #[serde(default)] - debug: bool, + pub debug: bool, #[serde(default)] - local: bool, + pub local: bool, #[serde(default)] - http_port: Option<i64>, + pub http_port: Option<i64>, #[serde(default)] - https_port: Option<i64>, + pub https_port: Option<i64>, // Infra information - framerail_host: String, - wws_host: String, + pub framerail_host: String, + pub wws_host: String, } -pub async fn generate_caddyfile( +#[derive(Debug)] +pub struct SiteDomainData { + sites: Vec<(i64, String, Option<String>)>, + domains: HashMap<i64, (Vec<AliasModel>, Vec<SiteDomainModel>)>, +} + +pub async fn caddyfile_endpoint( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result<String> { - // TODO split into pure function - info!("Generating Caddyfile for current sites"); + let options: CaddyfileOptions = params.parse()?; + let config = ctx.config(); + + // Gather necessary site data + let txn = ctx.transaction(); + + let sites: Vec<(i64, String, Option<String>)> = Site::find() + .select_only() + .column(site::Column::SiteId) + .column(site::Column::Slug) + .column(site::Column::CustomDomain) + .into_tuple() + .all(txn) + .await?; + + let domains = { + let mut extras = HashMap::with_capacity(sites.len()); + + for &(site_id, _, _) in &sites { + let site_aliases = + AliasService::get_all(ctx, AliasType::Site, site_id).await?; - let CaddyfileOptions { + let site_domains = DomainService::list_custom(ctx, site_id).await?; + extras.insert(site_id, (site_aliases, site_domains)); + } + + extras + }; + + Ok(generate_caddyfile( + config, + options, + &SiteDomainData { sites, domains }, + )) +} + +pub fn generate_caddyfile( + config: &Config, + CaddyfileOptions { debug, local, http_port, https_port, framerail_host, wws_host, - } = params.parse()?; + }: CaddyfileOptions, + SiteDomainData { sites, domains }: &SiteDomainData, +) -> String { + info!("Generating Caddyfile for {} sites", sites.len()); - let config = ctx.config(); let main_domain = &config.main_domain; let files_domain = &config.files_domain; @@ -155,5 +205,5 @@ pub async fn generate_caddyfile( } ); - Ok(caddyfile) + caddyfile } From b5ccd13637a03e31a3a7934d2edbaf95b01e93ed Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Thu, 27 Mar 2025 00:12:03 -0400 Subject: [PATCH 266/306] Implement wjfiles section of the Caddyfile. See the added comments for an explanation of how my system properly extracts the right part of the hostname to get the site slug. --- deepwell/src/endpoints/routing.rs | 34 ++++++++++++++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 5f48b1314b..3b3cdcf463 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -184,7 +184,39 @@ pub fn generate_caddyfile( " ); - // TODO generate *.wjfiles.com interior + for (site_id, site_slug, _) in sites { + str_writeln!(&mut caddyfile, "@{site_slug} host {site_slug}{files_domain}"); + str_writeln!(&mut caddyfile, "vars @{site_slug} site_id {site_id}"); + } + + str_writeln!( + &mut caddyfile, + " + request_header X-Wikijump-Site-Slug {{labels.{}}} + request_header X-Wikijump-Site-Id {{vars.site_id}} + + import serve_files +}}", + // What part of the domain to split + // + // So if the files domain (with dot) is ".wjfiles.com", there are 2 periods. + // Any site slugs would be before that first dot, such as in "foo.wjfiles.com", + // which would be index 2 using Caddy's domain addressing system: + // + // 0 - "com" + // 1 - "wjfiles" + // 2 - "foo" <-- what we want + // + // An additional example, say the files domain is ".host.wikijump.example.com", + // then there are 4 dots in the files domain, and thus the zero-based index is 4: + // + // 0 - "com" + // 1 - "example" + // 2 - "wikijump" + // 3 - "host" + // 4 - "foo" <-- what we want + files_domain.chars().filter(|&c| c == '.').count(), + ); str_writeln!( &mut caddyfile, From 26d7613456ece7e8e6d25889dc0d9a21be7351dd Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Thu, 27 Mar 2025 00:15:11 -0400 Subject: [PATCH 267/306] Add test stub to add before PR finalization. --- deepwell/src/endpoints/routing.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 3b3cdcf463..28a823e595 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -239,3 +239,9 @@ pub fn generate_caddyfile( caddyfile } + +#[test] +fn test_caddyfile_gen() { + // TODO + todo!() +} From a3a27d20ab0e54b76078c464cc0845acda0914fc Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Thu, 27 Mar 2025 00:28:42 -0400 Subject: [PATCH 268/306] Rename column custom_domain -> preferred_domain. --- deepwell/migrations/20220906103252_deepwell.sql | 15 ++++++++++----- deepwell/src/endpoints/routing.rs | 2 +- deepwell/src/models/site.rs | 6 +++--- deepwell/src/services/domain/service.rs | 2 +- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/deepwell/migrations/20220906103252_deepwell.sql b/deepwell/migrations/20220906103252_deepwell.sql index e0f6a61a82..f8f635ba5b 100644 --- a/deepwell/migrations/20220906103252_deepwell.sql +++ b/deepwell/migrations/20220906103252_deepwell.sql @@ -93,6 +93,8 @@ CREATE TABLE site ( -- Dependency cycle, add foreign key constraint after. -- -- This field describes what the preferred domain is for this site. + -- All sites have one preferred domain, and so a value of NULL is + -- also meaningful here. -- -- Say we have a site with the slug 'foo' and the main domain is 'wikijump.dev'. -- Therefore, the canonical domain for this site is 'foo.wikijump.dev'. @@ -101,17 +103,20 @@ CREATE TABLE site ( -- * NULL - This means the canonical domain is also the preferred domain. -- * 'example.com' - This means that the custom domain 'example.com' is preferred. -- + -- This value should NEVER have a main domain component. It must match a corresponding + -- row in the site_domain (custom domains) table. + -- -- Observe that a site may have many custom domains, and this is unrelated to what - -- its preferred domain is. Of course, if the custom_domain column is not NULL, + -- its preferred domain is. Of course, if the preferred_domain column is not NULL, -- then it must be one of these site domains, it cannot belong to another site. - custom_domain TEXT, + preferred_domain TEXT, layout TEXT, -- Default page layout for the site -- Special condition -- The preferred site for the special 'www' site (the main page) must always be the -- canonical domain. That is, if the main domain is "wikijump.com", then the -- preferred site is "wikijump.com" (since the "www" is elided as a special case). - CHECK (slug != 'www' OR custom_domain IS NULL), + CHECK (slug != 'www' OR preferred_domain IS NULL), -- Enforce site slug uniqueness UNIQUE (slug, deleted_at) @@ -126,8 +131,8 @@ CREATE TABLE site_domain ( ); ALTER TABLE site - ADD CONSTRAINT site_custom_domain_fk - FOREIGN KEY (custom_domain) REFERENCES site_domain(domain); + ADD CONSTRAINT site_preferred_domain_fk + FOREIGN KEY (preferred_domain) REFERENCES site_domain(domain); -- -- Aliases diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 28a823e595..7752a9a7ff 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -66,7 +66,7 @@ pub async fn caddyfile_endpoint( .select_only() .column(site::Column::SiteId) .column(site::Column::Slug) - .column(site::Column::CustomDomain) + .column(site::Column::PreferredDomain) .into_tuple() .all(txn) .await?; diff --git a/deepwell/src/models/site.rs b/deepwell/src/models/site.rs index 1f9844cb8e..97194d8250 100644 --- a/deepwell/src/models/site.rs +++ b/deepwell/src/models/site.rs @@ -28,7 +28,7 @@ pub struct Model { #[sea_orm(column_type = "Text")] pub default_page: String, #[sea_orm(column_type = "Text", nullable)] - pub custom_domain: Option<String>, + pub preferred_domain: Option<String>, #[sea_orm(column_type = "Text", nullable)] pub layout: Option<String>, } @@ -55,12 +55,12 @@ pub enum Relation { SiteDomain, #[sea_orm( belongs_to = "super::site_domain::Entity", - from = "Column::CustomDomain", + from = "Column::PreferredDomain", to = "super::site_domain::Column::Domain", on_update = "NoAction", on_delete = "NoAction" )] - SiteDomainCustomDomain, + SiteDomainPreferredDomain, } impl Related<super::file::Entity> for Entity { diff --git a/deepwell/src/services/domain/service.rs b/deepwell/src/services/domain/service.rs index 492b69ea49..18e4f12bd1 100644 --- a/deepwell/src/services/domain/service.rs +++ b/deepwell/src/services/domain/service.rs @@ -227,7 +227,7 @@ impl DomainService { site.slug, site.site_id, ); - match &site.custom_domain { + match &site.preferred_domain { Some(domain) => cow!(domain), None if site.slug == DEFAULT_SITE_SLUG => Self::www_domain(config), None => Cow::Owned(Self::get_canonical(config, &site.slug)), From bf5ca6a8fcdb022ecb842eff779cdd470a8355c7 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 02:32:38 -0400 Subject: [PATCH 269/306] Implement site bodies in Caddyfile. --- deepwell/src/endpoints/routing.rs | 60 ++++++++++++++++++++++++++++- deepwell/src/services/domain/mod.rs | 3 +- 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 7752a9a7ff..77977872a1 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -24,7 +24,9 @@ use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::AliasType; use crate::models::site::{self, Entity as Site}; use crate::models::site_domain::Model as SiteDomainModel; +use crate::services::domain::DEFAULT_SITE_SLUG; use sea_orm::{EntityTrait, QuerySelect}; +use std::borrow::Cow; use std::collections::HashMap; #[derive(Deserialize, Debug)] @@ -106,7 +108,7 @@ pub fn generate_caddyfile( ) -> String { info!("Generating Caddyfile for {} sites", sites.len()); - let main_domain = &config.main_domain; + let main_domain_no_dot = &config.main_domain_no_dot; let files_domain = &config.files_domain; let mut caddyfile = str!( @@ -167,7 +169,61 @@ pub fn generate_caddyfile( " ); - // TODO generate main site sections + for (site_id, site_slug, preferred_domain) in sites { + let (aliases, domains) = &domains[site_id]; + + // Get canonical and preferred domains, for later generation + let canonical_domain = if site_slug == DEFAULT_SITE_SLUG { + Cow::Borrowed(main_domain_no_dot) + } else { + Cow::Owned(DomainService::get_canonical(config, site_slug)) + }; + + let preferred_domain: &str = preferred_domain.as_ref().unwrap_or(&canonical_domain); + + // Closure to generate a domain entry + let mut generate_entry = |domain: &str| { + if domain == preferred_domain { + str_writeln!( + &mut caddyfile, + " +{domain} {{ + vars {{ + site_id {site_id} + site_slug {site_slug} + }} + + request_header X-Wikijump-Site-Id {{vars.site_id}} + request_header X-Wikijump-Site-Slug {{vars.site_slug}} + + import serve_main +}} +"); + } else { + str_writeln!( + &mut caddyfile, + " +{domain} {{ + redir https://{preferred_domain}{{uri}} +}} +"); + } + }; + + // Canonical domain + generate_entry(&canonical_domain); + + // Custom domains + for model in domains { + generate_entry(&model.domain); + } + + // Aliases (all redirects) + for alias in aliases { + let domain = DomainService::get_canonical(config, &alias.slug); + generate_entry(&domain); + } + } str_writeln!( &mut caddyfile, diff --git a/deepwell/src/services/domain/mod.rs b/deepwell/src/services/domain/mod.rs index e65fc20ffc..640300c36c 100644 --- a/deepwell/src/services/domain/mod.rs +++ b/deepwell/src/services/domain/mod.rs @@ -22,10 +22,11 @@ mod prelude { pub use super::super::prelude::*; pub use super::structs::*; + pub use super::service::DEFAULT_SITE_SLUG; } mod service; mod structs; -pub use self::service::DomainService; +pub use self::service::{DomainService, DEFAULT_SITE_SLUG}; pub use self::structs::*; From b206b7fb247410daf30f1027b7abffdebbe166e4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 02:42:35 -0400 Subject: [PATCH 270/306] Add www redirects. --- deepwell/src/endpoints/routing.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 77977872a1..d90b46b592 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -184,6 +184,8 @@ pub fn generate_caddyfile( // Closure to generate a domain entry let mut generate_entry = |domain: &str| { if domain == preferred_domain { + // Main content, for a preferred domain. + // This is where the request is actually reverse proxied through. str_writeln!( &mut caddyfile, " @@ -200,6 +202,7 @@ pub fn generate_caddyfile( }} "); } else { + // Generate a redirect to the preferred domain. str_writeln!( &mut caddyfile, " @@ -208,6 +211,17 @@ pub fn generate_caddyfile( }} "); } + + // Generate a redirect for the corresponding "www" domain. + // This shouldn't be used so we can redirect for all of them. + // This also naturally captures www.wikijump.com -> wikijump.com. + str_writeln!( + &mut caddyfile, + " +www.{domain} {{ + redir https://{preferred_domain}{{uri}} +}} +"); }; // Canonical domain From 829b78965401c2232056f038d683accee1b0f4a3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 02:44:39 -0400 Subject: [PATCH 271/306] Run rustfmt. --- deepwell/src/endpoints/routing.rs | 19 +++++++++++++------ deepwell/src/services/domain/mod.rs | 2 +- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index d90b46b592..89d32b0b94 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -179,7 +179,8 @@ pub fn generate_caddyfile( Cow::Owned(DomainService::get_canonical(config, site_slug)) }; - let preferred_domain: &str = preferred_domain.as_ref().unwrap_or(&canonical_domain); + let preferred_domain: &str = + preferred_domain.as_ref().unwrap_or(&canonical_domain); // Closure to generate a domain entry let mut generate_entry = |domain: &str| { @@ -200,7 +201,8 @@ pub fn generate_caddyfile( import serve_main }} -"); +" + ); } else { // Generate a redirect to the preferred domain. str_writeln!( @@ -209,7 +211,8 @@ pub fn generate_caddyfile( {domain} {{ redir https://{preferred_domain}{{uri}} }} -"); +" + ); } // Generate a redirect for the corresponding "www" domain. @@ -221,7 +224,8 @@ pub fn generate_caddyfile( www.{domain} {{ redir https://{preferred_domain}{{uri}} }} -"); +" + ); }; // Canonical domain @@ -235,7 +239,7 @@ www.{domain} {{ // Aliases (all redirects) for alias in aliases { let domain = DomainService::get_canonical(config, &alias.slug); - generate_entry(&domain); + generate_entry(&domain); } } @@ -255,7 +259,10 @@ www.{domain} {{ ); for (site_id, site_slug, _) in sites { - str_writeln!(&mut caddyfile, "@{site_slug} host {site_slug}{files_domain}"); + str_writeln!( + &mut caddyfile, + "@{site_slug} host {site_slug}{files_domain}", + ); str_writeln!(&mut caddyfile, "vars @{site_slug} site_id {site_id}"); } diff --git a/deepwell/src/services/domain/mod.rs b/deepwell/src/services/domain/mod.rs index 640300c36c..3be7da7952 100644 --- a/deepwell/src/services/domain/mod.rs +++ b/deepwell/src/services/domain/mod.rs @@ -21,8 +21,8 @@ #[allow(unused_imports)] mod prelude { pub use super::super::prelude::*; - pub use super::structs::*; pub use super::service::DEFAULT_SITE_SLUG; + pub use super::structs::*; } mod service; From ca7e74e6005c0773cc85bd9e1ad0f2a9510df1a3 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 02:45:45 -0400 Subject: [PATCH 272/306] Combine two formatted writes into one. --- deepwell/src/endpoints/routing.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 89d32b0b94..ea837ad392 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -261,9 +261,11 @@ www.{domain} {{ for (site_id, site_slug, _) in sites { str_writeln!( &mut caddyfile, - "@{site_slug} host {site_slug}{files_domain}", + " + @{site_slug} host {site_slug}{files_domain} + vars @{site_slug} site_id {site_id} +" ); - str_writeln!(&mut caddyfile, "vars @{site_slug} site_id {site_id}"); } str_writeln!( From 1bebb7fdfaa62df3fd48e17d1b58b708f01b5d78 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 02:57:36 -0400 Subject: [PATCH 273/306] Modify case of site info headers in framerail. This isn't necessary since Headers.get() is case-insensitive, but we may as well match what we generate in our Caddyfile. --- framerail/src/lib/server/load/site-info.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/framerail/src/lib/server/load/site-info.ts b/framerail/src/lib/server/load/site-info.ts index ad63e1b9e2..638983a0c3 100644 --- a/framerail/src/lib/server/load/site-info.ts +++ b/framerail/src/lib/server/load/site-info.ts @@ -4,8 +4,8 @@ // If the headers are first set by the client, those values // get erased. -const SITE_ID_HEADER = "x-wikijump-site-id" -const SITE_SLUG_HEADER = "x-wikijump-site-slug" +const SITE_ID_HEADER = "X-Wikijump-Site-Id" +const SITE_SLUG_HEADER = "X-Wikijump-Site-Slug" export interface SiteInfo { siteId: number From 637aebb7c90f86e1ea431aa65a5d9fa2cd1625e9 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 03:30:53 -0400 Subject: [PATCH 274/306] Merge in www redirect for cleaner output. --- deepwell/src/endpoints/routing.rs | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index ea837ad392..d4c8573393 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -183,6 +183,12 @@ pub fn generate_caddyfile( preferred_domain.as_ref().unwrap_or(&canonical_domain); // Closure to generate a domain entry + // + // Then, generate a redirect for the corresponding "www" subdomain. + // This shouldn't be used so we should just have it point away to + // the right location. + // + // This also naturally has the benefit of capturing www.wikijump.com -> wikijump.com. let mut generate_entry = |domain: &str| { if domain == preferred_domain { // Main content, for a preferred domain. @@ -201,6 +207,10 @@ pub fn generate_caddyfile( import serve_main }} + +www.{domain} {{ + redir https://{preferred_domain}{{uri}} +}} " ); } else { @@ -208,24 +218,13 @@ pub fn generate_caddyfile( str_writeln!( &mut caddyfile, " -{domain} {{ +{domain}, +www.{domain} {{ redir https://{preferred_domain}{{uri}} }} " ); } - - // Generate a redirect for the corresponding "www" domain. - // This shouldn't be used so we can redirect for all of them. - // This also naturally captures www.wikijump.com -> wikijump.com. - str_writeln!( - &mut caddyfile, - " -www.{domain} {{ - redir https://{preferred_domain}{{uri}} -}} -" - ); }; // Canonical domain From 74e680a8ab1371110f9888b5874efd66c4807aeb Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 03:49:34 -0400 Subject: [PATCH 275/306] Improve spacing in Caddyfile generation. --- deepwell/src/endpoints/routing.rs | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index d4c8573393..d08d7464dd 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -137,7 +137,7 @@ pub fn generate_caddyfile( str_writeln!(&mut caddyfile, "\tskip_install_trust"); } - str_writeln!( + str_write!( &mut caddyfile, "\ }} @@ -193,7 +193,7 @@ pub fn generate_caddyfile( if domain == preferred_domain { // Main content, for a preferred domain. // This is where the request is actually reverse proxied through. - str_writeln!( + str_write!( &mut caddyfile, " {domain} {{ @@ -215,7 +215,7 @@ www.{domain} {{ ); } else { // Generate a redirect to the preferred domain. - str_writeln!( + str_write!( &mut caddyfile, " {domain}, @@ -242,7 +242,7 @@ www.{domain} {{ } } - str_writeln!( + str_write!( &mut caddyfile, " # @@ -253,12 +253,11 @@ www.{domain} {{ reverse_proxy http://{wws_host} }} -*{files_domain} {{ -" +*{files_domain} {{" ); for (site_id, site_slug, _) in sites { - str_writeln!( + str_write!( &mut caddyfile, " @{site_slug} host {site_slug}{files_domain} @@ -267,14 +266,15 @@ www.{domain} {{ ); } - str_writeln!( + str_write!( &mut caddyfile, " request_header X-Wikijump-Site-Slug {{labels.{}}} request_header X-Wikijump-Site-Id {{vars.site_id}} import serve_files -}}", +}} +", // What part of the domain to split // // So if the files domain (with dot) is ".wjfiles.com", there are 2 periods. @@ -296,7 +296,7 @@ www.{domain} {{ files_domain.chars().filter(|&c| c == '.').count(), ); - str_writeln!( + str_write!( &mut caddyfile, " # From b525a12a10238d3f6ed96814ea9fa86d7f5683fa Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 03:50:26 -0400 Subject: [PATCH 276/306] Add comment in serve_main. --- deepwell/src/endpoints/routing.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index d08d7464dd..ae2a225446 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -164,6 +164,8 @@ pub fn generate_caddyfile( }} redir @files https://{{vars.site_slug}}{files_domain}{{uri}} + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://{framerail_host} }} " From 7eb0c686795df7fe10037a9bf20cef285ddb4bb4 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 03:52:03 -0400 Subject: [PATCH 277/306] Fix typo in comment. --- deepwell/src/endpoints/routing.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index ae2a225446..673d0aa6e4 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -113,7 +113,7 @@ pub fn generate_caddyfile( let mut caddyfile = str!( "\ -# Globals option +# Global options { metrics { per_host From bfedb5257069f7a58a60c44daa880e039fd59eca Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 03:52:28 -0400 Subject: [PATCH 278/306] Update stock Caddyfile in local. --- install/local/caddy/Caddyfile | 83 +++++++++++++++++++---------------- 1 file changed, 46 insertions(+), 37 deletions(-) diff --git a/install/local/caddy/Caddyfile b/install/local/caddy/Caddyfile index 3ecb0d9fda..2aa5aefe46 100644 --- a/install/local/caddy/Caddyfile +++ b/install/local/caddy/Caddyfile @@ -3,12 +3,7 @@ metrics { per_host } - - http_port 8000 - https_port 8443 - debug - skip_install_trust } @@ -17,7 +12,7 @@ # (serve_main) { - # Routes that exist on the files server + # Redirect, route is on the files server @files { path /*/code/* path /*/html/* @@ -34,14 +29,15 @@ } redir @files https://{vars.site_slug}.wjfiles.localhost{uri} - reverse_proxy http://localhost:3000 + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 } -# Preferred domain -scpwiki.localhost { +wikijump.localhost { vars { - site_id 4 - site_slug scp-wiki + site_id 1 + site_slug www } request_header X-Wikijump-Site-Id {vars.site_id} @@ -50,6 +46,10 @@ scpwiki.localhost { import serve_main } +www.wikijump.localhost { + redir https://wikijump.localhost{uri} +} + test.wikijump.localhost { vars { site_id 2 @@ -62,26 +62,24 @@ test.wikijump.localhost { import serve_main } -# Alternate domain (canonical) -scp-wiki.wikijump.localhost { - redir https://scpwiki.localhost{uri} +www.test.wikijump.localhost { + redir https://test.wikijump.localhost{uri} } -# Alternate domain (alias) -scpwiki.wikijump.localhost { - redir https://scpwiki.localhost{uri} +wikijump-qdpi9.ondigitalocean.app, +www.wikijump-qdpi9.ondigitalocean.app { + redir https://test.wikijump.localhost{uri} } -# Alternate domain (custom) -testdomain.localhost { +check.wikijump.localhost, +www.check.wikijump.localhost { redir https://test.wikijump.localhost{uri} } -# Other sites -wikijump.localhost { +template-en.wikijump.localhost { vars { - site_id 1 - site_slug www + site_id 3 + site_slug template-en } request_header X-Wikijump-Site-Id {vars.site_id} @@ -90,10 +88,24 @@ wikijump.localhost { import serve_main } -template-en.wikijump.localhost { +www.template-en.wikijump.localhost { + redir https://template-en.wikijump.localhost{uri} +} + +scp-wiki.wikijump.localhost, +www.scp-wiki.wikijump.localhost { + redir https://scpwiki.localhost{uri} +} + +scpwiki.dev, +www.scpwiki.dev { + redir https://scpwiki.localhost{uri} +} + +scpwiki.localhost { vars { - site_id 3 - site_slug template-en + site_id 4 + site_slug scp-wiki } request_header X-Wikijump-Site-Id {vars.site_id} @@ -102,12 +114,16 @@ template-en.wikijump.localhost { import serve_main } +www.scpwiki.localhost { + redir https://scpwiki.localhost{uri} +} + # # FILES # (serve_files) { - reverse_proxy http://localhost:7000 + reverse_proxy http://wws:7000 } *.wjfiles.localhost { @@ -117,7 +133,7 @@ template-en.wikijump.localhost { @test host test.wjfiles.localhost vars @test site_id 2 - @template-en host test.wjfiles.localhost + @template-en host template-en.wjfiles.localhost vars @template-en site_id 3 @scp-wiki host scp-wiki.wjfiles.localhost @@ -130,20 +146,13 @@ template-en.wikijump.localhost { } # -# OTHER +# FALLBACK # -# Remove www -www.wikijump.localhost, -www.scpwiki.localhost { - redir {labels.1}.{labels.0}{uri} -} - -# Fallback route http://, https://, localhost { request_header X-Wikijump-Special-Error 1 rewrite * /-/special-error/missing-site - reverse_proxy http://localhost:3000 + reverse_proxy http://framerail:3000 } From ee326db711902f21e9adaa052cf922d4fe179b30 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 04:19:43 -0400 Subject: [PATCH 279/306] Move Caddyfile generation logic to a new service. This code had been getting complicated for a while, so it made less and less sense to keep it in an endpoint file. This also gives a clear place for all of our Caddyfile generation tests. --- deepwell/src/api.rs | 2 +- deepwell/src/endpoints/mod.rs | 12 +- deepwell/src/endpoints/routing.rs | 303 +------------------------ deepwell/src/services/caddy/mod.rs | 34 +++ deepwell/src/services/caddy/service.rs | 300 ++++++++++++++++++++++++ deepwell/src/services/caddy/structs.rs | 48 ++++ deepwell/src/services/caddy/test.rs | 25 ++ deepwell/src/services/mod.rs | 2 + 8 files changed, 419 insertions(+), 307 deletions(-) create mode 100644 deepwell/src/services/caddy/mod.rs create mode 100644 deepwell/src/services/caddy/service.rs create mode 100644 deepwell/src/services/caddy/structs.rs create mode 100644 deepwell/src/services/caddy/test.rs diff --git a/deepwell/src/api.rs b/deepwell/src/api.rs index adcba83f3c..75d54111fc 100644 --- a/deepwell/src/api.rs +++ b/deepwell/src/api.rs @@ -191,7 +191,7 @@ async fn build_module(app_state: ServerState) -> anyhow::Result<RpcModule<Server register!("translate", translate_strings); // Web routing - register!("caddyfile", caddyfile_endpoint); + register!("caddyfile", generate_caddyfile); // Web server register!("page_view", page_view); diff --git a/deepwell/src/endpoints/mod.rs b/deepwell/src/endpoints/mod.rs index 3e70ba1e2a..14851e15d6 100644 --- a/deepwell/src/endpoints/mod.rs +++ b/deepwell/src/endpoints/mod.rs @@ -31,12 +31,12 @@ mod prelude { pub use crate::api::ServerState; pub use crate::services::{ - AliasService, BlobService, CategoryService, DomainService, Error as ServiceError, - FileRevisionService, FileService, LinkService, MessageReportService, - MessageService, MfaService, PageRevisionService, PageService, ParentService, - RelationService, RenderService, Result, ScoreService, ServiceContext, - SessionService, SettingsService, SiteService, StdResult, TextService, - UserService, ViewService, VoteService, + AliasService, BlobService, CaddyService, CategoryService, DomainService, + Error as ServiceError, FileRevisionService, FileService, LinkService, + MessageReportService, MessageService, MfaService, PageRevisionService, + PageService, ParentService, RelationService, RenderService, Result, ScoreService, + ServiceContext, SessionService, SettingsService, SiteService, StdResult, + TextService, UserService, ViewService, VoteService, }; pub use jsonrpsee::types::params::Params; pub use std::convert::TryFrom; diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index 673d0aa6e4..a178bda04b 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -19,309 +19,12 @@ */ use super::prelude::*; -use crate::config::Config; -use crate::models::alias::Model as AliasModel; -use crate::models::sea_orm_active_enums::AliasType; -use crate::models::site::{self, Entity as Site}; -use crate::models::site_domain::Model as SiteDomainModel; -use crate::services::domain::DEFAULT_SITE_SLUG; -use sea_orm::{EntityTrait, QuerySelect}; -use std::borrow::Cow; -use std::collections::HashMap; +use crate::services::caddy::CaddyfileOptions; -#[derive(Deserialize, Debug)] -pub struct CaddyfileOptions { - #[serde(default)] - pub debug: bool, - - #[serde(default)] - pub local: bool, - - #[serde(default)] - pub http_port: Option<i64>, - - #[serde(default)] - pub https_port: Option<i64>, - - // Infra information - pub framerail_host: String, - pub wws_host: String, -} - -#[derive(Debug)] -pub struct SiteDomainData { - sites: Vec<(i64, String, Option<String>)>, - domains: HashMap<i64, (Vec<AliasModel>, Vec<SiteDomainModel>)>, -} - -pub async fn caddyfile_endpoint( +pub async fn generate_caddyfile( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result<String> { let options: CaddyfileOptions = params.parse()?; - let config = ctx.config(); - - // Gather necessary site data - let txn = ctx.transaction(); - - let sites: Vec<(i64, String, Option<String>)> = Site::find() - .select_only() - .column(site::Column::SiteId) - .column(site::Column::Slug) - .column(site::Column::PreferredDomain) - .into_tuple() - .all(txn) - .await?; - - let domains = { - let mut extras = HashMap::with_capacity(sites.len()); - - for &(site_id, _, _) in &sites { - let site_aliases = - AliasService::get_all(ctx, AliasType::Site, site_id).await?; - - let site_domains = DomainService::list_custom(ctx, site_id).await?; - extras.insert(site_id, (site_aliases, site_domains)); - } - - extras - }; - - Ok(generate_caddyfile( - config, - options, - &SiteDomainData { sites, domains }, - )) -} - -pub fn generate_caddyfile( - config: &Config, - CaddyfileOptions { - debug, - local, - http_port, - https_port, - framerail_host, - wws_host, - }: CaddyfileOptions, - SiteDomainData { sites, domains }: &SiteDomainData, -) -> String { - info!("Generating Caddyfile for {} sites", sites.len()); - - let main_domain_no_dot = &config.main_domain_no_dot; - let files_domain = &config.files_domain; - - let mut caddyfile = str!( - "\ -# Global options -{ - metrics { - per_host - } -" - ); - - if let Some(port) = http_port { - str_writeln!(&mut caddyfile, "\thttp_port {port}"); - } - - if let Some(port) = https_port { - str_writeln!(&mut caddyfile, "\thttps_port {port}"); - } - - if debug { - str_writeln!(&mut caddyfile, "\tdebug"); - } - - if local { - str_writeln!(&mut caddyfile, "\tskip_install_trust"); - } - - str_write!( - &mut caddyfile, - "\ -}} - -# -# MAIN -# - -(serve_main) {{ - # Redirect, route is on the files server - @files {{ - path /*/code/* - path /*/html/* - path /*/file/* # for the /{{slug}}/file/{{filename}} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - }} - redir @files https://{{vars.site_slug}}{files_domain}{{uri}} - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://{framerail_host} -}} -" - ); - - for (site_id, site_slug, preferred_domain) in sites { - let (aliases, domains) = &domains[site_id]; - - // Get canonical and preferred domains, for later generation - let canonical_domain = if site_slug == DEFAULT_SITE_SLUG { - Cow::Borrowed(main_domain_no_dot) - } else { - Cow::Owned(DomainService::get_canonical(config, site_slug)) - }; - - let preferred_domain: &str = - preferred_domain.as_ref().unwrap_or(&canonical_domain); - - // Closure to generate a domain entry - // - // Then, generate a redirect for the corresponding "www" subdomain. - // This shouldn't be used so we should just have it point away to - // the right location. - // - // This also naturally has the benefit of capturing www.wikijump.com -> wikijump.com. - let mut generate_entry = |domain: &str| { - if domain == preferred_domain { - // Main content, for a preferred domain. - // This is where the request is actually reverse proxied through. - str_write!( - &mut caddyfile, - " -{domain} {{ - vars {{ - site_id {site_id} - site_slug {site_slug} - }} - - request_header X-Wikijump-Site-Id {{vars.site_id}} - request_header X-Wikijump-Site-Slug {{vars.site_slug}} - - import serve_main -}} - -www.{domain} {{ - redir https://{preferred_domain}{{uri}} -}} -" - ); - } else { - // Generate a redirect to the preferred domain. - str_write!( - &mut caddyfile, - " -{domain}, -www.{domain} {{ - redir https://{preferred_domain}{{uri}} -}} -" - ); - } - }; - - // Canonical domain - generate_entry(&canonical_domain); - - // Custom domains - for model in domains { - generate_entry(&model.domain); - } - - // Aliases (all redirects) - for alias in aliases { - let domain = DomainService::get_canonical(config, &alias.slug); - generate_entry(&domain); - } - } - - str_write!( - &mut caddyfile, - " -# -# FILES -# - -(serve_files) {{ - reverse_proxy http://{wws_host} -}} - -*{files_domain} {{" - ); - - for (site_id, site_slug, _) in sites { - str_write!( - &mut caddyfile, - " - @{site_slug} host {site_slug}{files_domain} - vars @{site_slug} site_id {site_id} -" - ); - } - - str_write!( - &mut caddyfile, - " - request_header X-Wikijump-Site-Slug {{labels.{}}} - request_header X-Wikijump-Site-Id {{vars.site_id}} - - import serve_files -}} -", - // What part of the domain to split - // - // So if the files domain (with dot) is ".wjfiles.com", there are 2 periods. - // Any site slugs would be before that first dot, such as in "foo.wjfiles.com", - // which would be index 2 using Caddy's domain addressing system: - // - // 0 - "com" - // 1 - "wjfiles" - // 2 - "foo" <-- what we want - // - // An additional example, say the files domain is ".host.wikijump.example.com", - // then there are 4 dots in the files domain, and thus the zero-based index is 4: - // - // 0 - "com" - // 1 - "example" - // 2 - "wikijump" - // 3 - "host" - // 4 - "foo" <-- what we want - files_domain.chars().filter(|&c| c == '.').count(), - ); - - str_write!( - &mut caddyfile, - " -# -# FALLBACK -# - -{} {{ - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://{framerail_host} -}}", - if local { - "http://,\nhttps://,\nlocalhost" - } else { - "http://,\nhttps://" - } - ); - - caddyfile -} - -#[test] -fn test_caddyfile_gen() { - // TODO - todo!() + CaddyService::generate(ctx, &options).await } diff --git a/deepwell/src/services/caddy/mod.rs b/deepwell/src/services/caddy/mod.rs new file mode 100644 index 0000000000..6612790eb9 --- /dev/null +++ b/deepwell/src/services/caddy/mod.rs @@ -0,0 +1,34 @@ +/* + * services/caddy/mod.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#[allow(unused_imports)] +mod prelude { + pub use super::super::prelude::*; + pub use super::structs::*; +} + +mod service; +mod structs; + +#[cfg(test)] +mod test; + +pub use self::service::CaddyService; +pub use self::structs::*; diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs new file mode 100644 index 0000000000..92bf0b0fd0 --- /dev/null +++ b/deepwell/src/services/caddy/service.rs @@ -0,0 +1,300 @@ +/* + * services/caddy/service.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +//! Service to handle the [Caddy webserver](https://caddyserver.com/docs/). +//! +//! This is primarily concerned with generating the `Caddyfile` that +//! powers the server, which is where host → site mapping is performed. + +use super::prelude::*; +use crate::models::sea_orm_active_enums::AliasType; +use crate::models::site::{self, Entity as Site}; +use crate::services::{AliasService, DomainService}; +use crate::services::domain::DEFAULT_SITE_SLUG; +use sea_orm::{EntityTrait, QuerySelect}; +use std::borrow::Cow; +use std::collections::HashMap; + +#[derive(Debug)] +pub struct CaddyService; + +impl CaddyService { + pub async fn generate(ctx: &ServiceContext<'_>, options: &CaddyfileOptions) -> Result<String> { + let config = ctx.config(); + + // Gather necessary site data + let txn = ctx.transaction(); + + let sites: Vec<(i64, String, Option<String>)> = Site::find() + .select_only() + .column(site::Column::SiteId) + .column(site::Column::Slug) + .column(site::Column::PreferredDomain) + .into_tuple() + .all(txn) + .await?; + + let domains = { + let mut extras = HashMap::with_capacity(sites.len()); + + for &(site_id, _, _) in &sites { + let site_aliases = + AliasService::get_all(ctx, AliasType::Site, site_id).await?; + + let site_domains = DomainService::list_custom(ctx, site_id).await?; + extras.insert(site_id, (site_aliases, site_domains)); + } + + extras + }; + + Ok(Self::generate_custom( + config, + options, + &SiteDomainData { sites, domains }, + )) + } + + pub fn generate_custom( + config: &Config, + CaddyfileOptions { + debug, + local, + http_port, + https_port, + framerail_host, + wws_host, + }: &CaddyfileOptions, + SiteDomainData { sites, domains }: &SiteDomainData, + ) -> String { + info!("Generating Caddyfile for {} sites", sites.len()); + + let main_domain_no_dot = &config.main_domain_no_dot; + let files_domain = &config.files_domain; + + let mut caddyfile = str!( + "\ +# Global options +{ + metrics { + per_host + } +" + ); + + if let Some(port) = *http_port { + str_writeln!(&mut caddyfile, "\thttp_port {port}"); + } + + if let Some(port) = *https_port { + str_writeln!(&mut caddyfile, "\thttps_port {port}"); + } + + if *debug { + str_writeln!(&mut caddyfile, "\tdebug"); + } + + if *local { + str_writeln!(&mut caddyfile, "\tskip_install_trust"); + } + + str_write!( + &mut caddyfile, + "\ +}} + +# +# MAIN +# + +(serve_main) {{ + # Redirect, route is on the files server + @files {{ + path /*/code/* + path /*/html/* + path /*/file/* # for the /{{slug}}/file/{{filename}} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + }} + redir @files https://{{vars.site_slug}}{files_domain}{{uri}} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://{framerail_host} +}} +" + ); + + for (site_id, site_slug, preferred_domain) in sites { + let (aliases, domains) = &domains[site_id]; + + // Get canonical and preferred domains, for later generation + let canonical_domain = if site_slug == DEFAULT_SITE_SLUG { + Cow::Borrowed(main_domain_no_dot) + } else { + Cow::Owned(DomainService::get_canonical(config, site_slug)) + }; + + let preferred_domain: &str = + preferred_domain.as_ref().unwrap_or(&canonical_domain); + + // Closure to generate a domain entry + // + // Then, generate a redirect for the corresponding "www" subdomain. + // This shouldn't be used so we should just have it point away to + // the right location. + // + // This also naturally has the benefit of capturing www.wikijump.com -> wikijump.com. + let mut generate_entry = |domain: &str| { + if domain == preferred_domain { + // Main content, for a preferred domain. + // This is where the request is actually reverse proxied through. + str_write!( + &mut caddyfile, + " +{domain} {{ + vars {{ + site_id {site_id} + site_slug {site_slug} + }} + + request_header X-Wikijump-Site-Id {{vars.site_id}} + request_header X-Wikijump-Site-Slug {{vars.site_slug}} + + import serve_main +}} + +www.{domain} {{ + redir https://{preferred_domain}{{uri}} +}} +" + ); + } else { + // Generate a redirect to the preferred domain. + str_write!( + &mut caddyfile, + " +{domain}, +www.{domain} {{ + redir https://{preferred_domain}{{uri}} +}} +" + ); + } + }; + + // Canonical domain + generate_entry(&canonical_domain); + + // Custom domains + for model in domains { + generate_entry(&model.domain); + } + + // Aliases (all redirects) + for alias in aliases { + let domain = DomainService::get_canonical(config, &alias.slug); + generate_entry(&domain); + } + } + + str_write!( + &mut caddyfile, + " +# +# FILES +# + +(serve_files) {{ + reverse_proxy http://{wws_host} +}} + +*{files_domain} {{" + ); + + for (site_id, site_slug, _) in sites { + str_write!( + &mut caddyfile, + " + @{site_slug} host {site_slug}{files_domain} + vars @{site_slug} site_id {site_id} +" + ); + } + + str_write!( + &mut caddyfile, + " + request_header X-Wikijump-Site-Slug {{labels.{}}} + request_header X-Wikijump-Site-Id {{vars.site_id}} + + import serve_files +}} +", + // What part of the domain to split + // + // So if the files domain (with dot) is ".wjfiles.com", there are 2 periods. + // Any site slugs would be before that first dot, such as in "foo.wjfiles.com", + // which would be index 2 using Caddy's domain addressing system: + // + // 0 - "com" + // 1 - "wjfiles" + // 2 - "foo" <-- what we want + // + // An additional example, say the files domain is ".host.wikijump.example.com", + // then there are 4 dots in the files domain, and thus the zero-based index is 4: + // + // 0 - "com" + // 1 - "example" + // 2 - "wikijump" + // 3 - "host" + // 4 - "foo" <-- what we want + files_domain.chars().filter(|&c| c == '.').count(), + ); + + str_write!( + &mut caddyfile, + " +# +# FALLBACK +# + +{} {{ + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://{framerail_host} +}}", + if *local { + "http://,\nhttps://,\nlocalhost" + } else { + "http://,\nhttps://" + } + ); + + caddyfile + } +} diff --git a/deepwell/src/services/caddy/structs.rs b/deepwell/src/services/caddy/structs.rs new file mode 100644 index 0000000000..c05721a285 --- /dev/null +++ b/deepwell/src/services/caddy/structs.rs @@ -0,0 +1,48 @@ +/* + * services/caddy/structs.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +use crate::models::alias::Model as AliasModel; +use crate::models::site_domain::Model as SiteDomainModel; +use std::collections::HashMap; + +#[derive(Deserialize, Debug)] +pub struct CaddyfileOptions { + #[serde(default)] + pub debug: bool, + + #[serde(default)] + pub local: bool, + + #[serde(default)] + pub http_port: Option<i64>, + + #[serde(default)] + pub https_port: Option<i64>, + + // Infra information + pub framerail_host: String, + pub wws_host: String, +} + +#[derive(Debug)] +pub struct SiteDomainData { + pub sites: Vec<(i64, String, Option<String>)>, + pub domains: HashMap<i64, (Vec<AliasModel>, Vec<SiteDomainModel>)>, +} diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs new file mode 100644 index 0000000000..66c8c53bf0 --- /dev/null +++ b/deepwell/src/services/caddy/test.rs @@ -0,0 +1,25 @@ +/* + * services/caddy/test.rs + * + * DEEPWELL - Wikijump API provider and database manager + * Copyright (C) 2019-2025 Wikijump Team + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see <http://www.gnu.org/licenses/>. + */ + +#[test] +fn test_caddyfile_gen() { + // TODO + todo!() +} diff --git a/deepwell/src/services/mod.rs b/deepwell/src/services/mod.rs index 897b839031..40f7898ff4 100644 --- a/deepwell/src/services/mod.rs +++ b/deepwell/src/services/mod.rs @@ -60,6 +60,7 @@ pub mod alias; // TODO create audit pub mod authentication; pub mod blob; +pub mod caddy; pub mod category; pub mod domain; pub mod email; @@ -95,6 +96,7 @@ pub mod vote; pub use self::alias::AliasService; pub use self::authentication::AuthenticationService; pub use self::blob::BlobService; +pub use self::caddy::CaddyService; pub use self::category::CategoryService; pub use self::context::ServiceContext; pub use self::domain::DomainService; From c32595d485fea9ca85fe7e4fed5b7081aa414a8f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 04:33:55 -0400 Subject: [PATCH 280/306] Reduce input data, don't require models. Makes it easier to construct inputs for testing. --- deepwell/src/services/caddy/service.rs | 47 ++++++++++++++++++-------- deepwell/src/services/caddy/structs.rs | 14 +++++--- 2 files changed, 42 insertions(+), 19 deletions(-) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 92bf0b0fd0..f016218aaa 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -24,8 +24,10 @@ //! powers the server, which is where host → site mapping is performed. use super::prelude::*; +use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::AliasType; use crate::models::site::{self, Entity as Site}; +use crate::models::site_domain::Model as SiteDomainModel; use crate::services::{AliasService, DomainService}; use crate::services::domain::DEFAULT_SITE_SLUG; use sea_orm::{EntityTrait, QuerySelect}; @@ -52,23 +54,37 @@ impl CaddyService { .await?; let domains = { - let mut extras = HashMap::with_capacity(sites.len()); + let mut domains = HashMap::with_capacity(sites.len()); for &(site_id, _, _) in &sites { - let site_aliases = - AliasService::get_all(ctx, AliasType::Site, site_id).await?; - - let site_domains = DomainService::list_custom(ctx, site_id).await?; - extras.insert(site_id, (site_aliases, site_domains)); + let aliases = AliasService::get_all(ctx, AliasType::Site, site_id) + .await? + .into_iter() + .map(|AliasModel { slug, .. }| slug) + .collect(); + + let custom_domains = DomainService::list_custom(ctx, site_id) + .await? + .into_iter() + .map(|SiteDomainModel { domain, .. }| domain) + .collect(); + + domains.insert( + site_id, + SiteDomainData { + aliases, + custom_domains, + }, + ); } - extras + domains }; Ok(Self::generate_custom( config, options, - &SiteDomainData { sites, domains }, + &SiteData { sites, domains }, )) } @@ -82,7 +98,7 @@ impl CaddyService { framerail_host, wws_host, }: &CaddyfileOptions, - SiteDomainData { sites, domains }: &SiteDomainData, + SiteData { sites, domains }: &SiteData, ) -> String { info!("Generating Caddyfile for {} sites", sites.len()); @@ -150,7 +166,10 @@ impl CaddyService { ); for (site_id, site_slug, preferred_domain) in sites { - let (aliases, domains) = &domains[site_id]; + let SiteDomainData { + aliases, + custom_domains, + } = &domains[site_id]; // Get canonical and preferred domains, for later generation let canonical_domain = if site_slug == DEFAULT_SITE_SLUG { @@ -211,13 +230,13 @@ www.{domain} {{ generate_entry(&canonical_domain); // Custom domains - for model in domains { - generate_entry(&model.domain); + for domain in custom_domains { + generate_entry(&domain); } // Aliases (all redirects) - for alias in aliases { - let domain = DomainService::get_canonical(config, &alias.slug); + for alias_slug in aliases { + let domain = DomainService::get_canonical(config, &alias_slug); generate_entry(&domain); } } diff --git a/deepwell/src/services/caddy/structs.rs b/deepwell/src/services/caddy/structs.rs index c05721a285..e6a5bd8562 100644 --- a/deepwell/src/services/caddy/structs.rs +++ b/deepwell/src/services/caddy/structs.rs @@ -18,8 +18,6 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::models::alias::Model as AliasModel; -use crate::models::site_domain::Model as SiteDomainModel; use std::collections::HashMap; #[derive(Deserialize, Debug)] @@ -41,8 +39,14 @@ pub struct CaddyfileOptions { pub wws_host: String, } -#[derive(Debug)] -pub struct SiteDomainData { +#[derive(Deserialize, Debug)] +pub struct SiteData { pub sites: Vec<(i64, String, Option<String>)>, - pub domains: HashMap<i64, (Vec<AliasModel>, Vec<SiteDomainModel>)>, + pub domains: HashMap<i64, SiteDomainData>, +} + +#[derive(Deserialize, Debug)] +pub struct SiteDomainData { + pub aliases: Vec<String>, + pub custom_domains: Vec<String>, } From 6d72bc74e87c061d0810ed60d908bd5f443bb9ae Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 04:35:17 -0400 Subject: [PATCH 281/306] Address clippy lints. --- deepwell/src/services/caddy/service.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index f016218aaa..c52480eef6 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -231,12 +231,12 @@ www.{domain} {{ // Custom domains for domain in custom_domains { - generate_entry(&domain); + generate_entry(domain); } // Aliases (all redirects) for alias_slug in aliases { - let domain = DomainService::get_canonical(config, &alias_slug); + let domain = DomainService::get_canonical(config, alias_slug); generate_entry(&domain); } } From 21e53ccdbfe10b2d90aa3cc9b79f7ac88842a532 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 04:37:52 -0400 Subject: [PATCH 282/306] Use direct query to remove need for full model. --- deepwell/src/services/caddy/service.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index c52480eef6..d01ce07c97 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -27,7 +27,7 @@ use super::prelude::*; use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::AliasType; use crate::models::site::{self, Entity as Site}; -use crate::models::site_domain::Model as SiteDomainModel; +use crate::models::site_domain::{self, Entity as SiteDomain}; use crate::services::{AliasService, DomainService}; use crate::services::domain::DEFAULT_SITE_SLUG; use sea_orm::{EntityTrait, QuerySelect}; @@ -63,11 +63,13 @@ impl CaddyService { .map(|AliasModel { slug, .. }| slug) .collect(); - let custom_domains = DomainService::list_custom(ctx, site_id) - .await? - .into_iter() - .map(|SiteDomainModel { domain, .. }| domain) - .collect(); + let custom_domains = SiteDomain::find() + .select_only() + .column(site_domain::Column::Domain) + .filter(site_domain::Column::SiteId.eq(site_id)) + .into_tuple() + .all(txn) + .await?; domains.insert( site_id, From d47d19316845e054f0c4f1e2174bd7dc0ed4af99 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 04:54:56 -0400 Subject: [PATCH 283/306] Run rustfmt on CaddyService file. --- deepwell/src/services/caddy/service.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index d01ce07c97..0cd5a2989a 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -28,8 +28,8 @@ use crate::models::alias::Model as AliasModel; use crate::models::sea_orm_active_enums::AliasType; use crate::models::site::{self, Entity as Site}; use crate::models::site_domain::{self, Entity as SiteDomain}; -use crate::services::{AliasService, DomainService}; use crate::services::domain::DEFAULT_SITE_SLUG; +use crate::services::{AliasService, DomainService}; use sea_orm::{EntityTrait, QuerySelect}; use std::borrow::Cow; use std::collections::HashMap; @@ -38,7 +38,10 @@ use std::collections::HashMap; pub struct CaddyService; impl CaddyService { - pub async fn generate(ctx: &ServiceContext<'_>, options: &CaddyfileOptions) -> Result<String> { + pub async fn generate( + ctx: &ServiceContext<'_>, + options: &CaddyfileOptions, + ) -> Result<String> { let config = ctx.config(); // Gather necessary site data From 57820cb9fa47c1b11306356f0fe4c8f830b0afa1 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 04:55:05 -0400 Subject: [PATCH 284/306] Add helper method to generate local Config struct. --- deepwell/src/services/caddy/test.rs | 73 +++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 66c8c53bf0..595107de8d 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -18,6 +18,79 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +use super::prelude::*; +use crate::config::Config; + +/// Produce a `Config` struct for use in `CaddyService` testing. +fn build_config() -> Config { + use femme::LevelFilter; + use ftml::layout::Layout; + use std::num::NonZeroU16; + use std::path::PathBuf; + use std::time::Duration as StdDuration; + use time::Duration as TimeDuration; + + const MAIN_DOMAIN: &str = "wikijump.test"; + const FILES_DOMAIN: &str = "wjfiles.test"; + + Config { + main_domain_no_dot: str!(MAIN_DOMAIN), + main_domain: format!(".{MAIN_DOMAIN}"), + files_domain_no_dot: str!(FILES_DOMAIN), + files_domain: format!(".{FILES_DOMAIN}"), + + // Unused fields + raw_toml: String::new(), + raw_toml_path: PathBuf::new(), + logger: false, + logger_level: LevelFilter::Off, + address: "[::]:2747".parse().unwrap(), + pid_file: None, + watch_files: false, + run_seeder: false, + seeder_path: PathBuf::new(), + localization_path: PathBuf::new(), + authentication_fail_delay: StdDuration::from_secs(0), + session_token_prefix: String::new(), + session_token_length: 0, + normal_session_duration: TimeDuration::seconds(0), + restricted_session_duration: TimeDuration::seconds(0), + recovery_code_count: 0, + recovery_code_length: 0, + totp_time_step: 0, + totp_time_skew: 0, + job_workers: NonZeroU16::new(1).unwrap(), + job_max_attempts: 0, + job_work_delay: StdDuration::from_secs(0), + job_min_poll_delay: StdDuration::from_secs(0), + job_max_poll_delay: StdDuration::from_secs(0), + job_prune_session: StdDuration::from_secs(0), + job_prune_text: StdDuration::from_secs(0), + job_name_change_refill: StdDuration::from_secs(0), + job_lift_expired_punishments: StdDuration::from_secs(0), + render_timeout: StdDuration::from_secs(0), + rerender_skip: Vec::new(), + message_layout: Layout::Wikijump, + default_page_layout: Layout::Wikijump, + special_page_prefix: String::new(), + special_page_template: String::new(), + special_page_missing: String::new(), + special_page_private: String::new(), + special_page_banned: String::new(), + default_name_changes: 0, + maximum_name_changes: 0, + refill_name_change: None, + minimum_name_bytes: 0, + presigned_path_length: 0, + presigned_expiry_secs: 0, + maximum_blob_size: 0, + maximum_avatar_size: 0, + maximum_message_subject_bytes: 0, + maximum_message_body_bytes: 0, + maximum_message_recipients: 0, + } +} + #[test] fn test_caddyfile_gen() { // TODO From f4a96efcd1c6ab2a5d5ef76de119b5e4a2f920a2 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:05:53 -0400 Subject: [PATCH 285/306] Add Default to SiteDomainData. --- deepwell/src/services/caddy/structs.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deepwell/src/services/caddy/structs.rs b/deepwell/src/services/caddy/structs.rs index e6a5bd8562..705e84ce59 100644 --- a/deepwell/src/services/caddy/structs.rs +++ b/deepwell/src/services/caddy/structs.rs @@ -45,7 +45,7 @@ pub struct SiteData { pub domains: HashMap<i64, SiteDomainData>, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Default)] pub struct SiteDomainData { pub aliases: Vec<String>, pub custom_domains: Vec<String>, From 6c63ab865e9b87ff19abba25cae1204271eec819 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:06:14 -0400 Subject: [PATCH 286/306] Add Clone to caddy service structs. --- deepwell/src/services/caddy/structs.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/deepwell/src/services/caddy/structs.rs b/deepwell/src/services/caddy/structs.rs index 705e84ce59..656ccb6ba2 100644 --- a/deepwell/src/services/caddy/structs.rs +++ b/deepwell/src/services/caddy/structs.rs @@ -20,7 +20,7 @@ use std::collections::HashMap; -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CaddyfileOptions { #[serde(default)] pub debug: bool, @@ -39,13 +39,13 @@ pub struct CaddyfileOptions { pub wws_host: String, } -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct SiteData { pub sites: Vec<(i64, String, Option<String>)>, pub domains: HashMap<i64, SiteDomainData>, } -#[derive(Deserialize, Debug, Default)] +#[derive(Deserialize, Debug, Default, Clone)] pub struct SiteDomainData { pub aliases: Vec<String>, pub custom_domains: Vec<String>, From 232c933fcb56aa57073b69a34354c7294eee39ae Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:07:22 -0400 Subject: [PATCH 287/306] Use Cow for CaddyfileOptions. We don't necessarily need owned strings for hosts. --- deepwell/src/endpoints/routing.rs | 2 +- deepwell/src/services/caddy/service.rs | 4 ++-- deepwell/src/services/caddy/structs.rs | 7 ++++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/deepwell/src/endpoints/routing.rs b/deepwell/src/endpoints/routing.rs index a178bda04b..74df4f193b 100644 --- a/deepwell/src/endpoints/routing.rs +++ b/deepwell/src/endpoints/routing.rs @@ -25,6 +25,6 @@ pub async fn generate_caddyfile( ctx: &ServiceContext<'_>, params: Params<'static>, ) -> Result<String> { - let options: CaddyfileOptions = params.parse()?; + let options: CaddyfileOptions<'static> = params.parse()?; CaddyService::generate(ctx, &options).await } diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 0cd5a2989a..674730bf3f 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -40,7 +40,7 @@ pub struct CaddyService; impl CaddyService { pub async fn generate( ctx: &ServiceContext<'_>, - options: &CaddyfileOptions, + options: &CaddyfileOptions<'_>, ) -> Result<String> { let config = ctx.config(); @@ -102,7 +102,7 @@ impl CaddyService { https_port, framerail_host, wws_host, - }: &CaddyfileOptions, + }: &CaddyfileOptions<'_>, SiteData { sites, domains }: &SiteData, ) -> String { info!("Generating Caddyfile for {} sites", sites.len()); diff --git a/deepwell/src/services/caddy/structs.rs b/deepwell/src/services/caddy/structs.rs index 656ccb6ba2..5a51ac545d 100644 --- a/deepwell/src/services/caddy/structs.rs +++ b/deepwell/src/services/caddy/structs.rs @@ -18,10 +18,11 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +use std::borrow::Cow; use std::collections::HashMap; #[derive(Deserialize, Debug, Clone)] -pub struct CaddyfileOptions { +pub struct CaddyfileOptions<'a> { #[serde(default)] pub debug: bool, @@ -35,8 +36,8 @@ pub struct CaddyfileOptions { pub https_port: Option<i64>, // Infra information - pub framerail_host: String, - pub wws_host: String, + pub framerail_host: Cow<'a, str>, + pub wws_host: Cow<'a, str>, } #[derive(Deserialize, Debug, Clone)] From fc73333762de34251379d43839331345ce79a3da Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:25:14 -0400 Subject: [PATCH 288/306] Add initial code setup for Caddyfile generation unit tests. --- deepwell/Cargo.lock | 1 + deepwell/Cargo.toml | 3 + deepwell/src/services/caddy/test.rs | 168 +++++++++++++++++++++++++++- 3 files changed, 168 insertions(+), 4 deletions(-) diff --git a/deepwell/Cargo.lock b/deepwell/Cargo.lock index d9dd7ba519..52f6de8909 100644 --- a/deepwell/Cargo.lock +++ b/deepwell/Cargo.lock @@ -799,6 +799,7 @@ dependencies = [ "intl-memoizer", "jsonrpsee", "log", + "maplit", "notify", "once_cell", "paste", diff --git a/deepwell/Cargo.toml b/deepwell/Cargo.toml index 3f0f52c7ca..7fc462e529 100644 --- a/deepwell/Cargo.toml +++ b/deepwell/Cargo.toml @@ -71,6 +71,9 @@ unicase = "2" wikidot-normalize = "0.12" wikidot-path = "0.6" +[dev-dependencies] +maplit = "1" + [build-dependencies] built = { version = "0.7", features = ["git2"] } diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 595107de8d..74c3f5b8c8 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -18,10 +18,13 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ +//! Unit testing for our generated `Caddyfile`s. + use super::prelude::*; use crate::config::Config; +use crate::services::CaddyService; +use maplit::hashmap; -/// Produce a `Config` struct for use in `CaddyService` testing. fn build_config() -> Config { use femme::LevelFilter; use ftml::layout::Layout; @@ -91,8 +94,165 @@ fn build_config() -> Config { } } +fn build_site_data() -> (SiteData, SiteData) { + let basic = SiteData { + sites: vec![ + (1, str!("foo"), None), + (2, str!("bar"), Some(str!("example.com"))), + ], + domains: hashmap! { + 1 => SiteDomainData::default(), + 2 => SiteDomainData { + aliases: vec![], + custom_domains: vec![str!("example.com")], + }, + }, + }; + + let full = SiteData { + sites: vec![ + (1, str!("www"), None), + (2, str!("empty"), None), + (3, str!("test"), None), + ( + 4, + str!("wanderers-library"), + Some(str!("wandererslibrary.com")), + ), + (5, str!("scp-wiki"), Some(str!("scpwiki.com"))), + ], + domains: hashmap! { + 1 => SiteDomainData::default(), + 2 => SiteDomainData::default(), + 3 => SiteDomainData { + aliases: vec![str!("check")], + custom_domains: vec![str!("example.com"), str!("example.net")], + }, + 4 => SiteDomainData { + aliases: vec![], + custom_domains: vec![str!("wandererslibrary.com")], + }, + 5 => SiteDomainData { + aliases: vec![str!("scpwiki")], + custom_domains: vec![str!("scpwiki.com"), str!("scp-wiki.net"), str!("scp.foundation"), str!("foundation.scp")], + }, + }, + }; + + (basic, full) +} + +const CADDYFILE_BASIC_PROD: &str = " +"; + +const CADDYFILE_BASIC_LOCAL: &str = " +"; + +const CADDYFILE_BASIC_LOCAL_DEV: &str = " +"; + +const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = " +"; + +const CADDYFILE_FULL_PROD: &str = " +"; + +const CADDYFILE_FULL_LOCAL: &str = " +"; + #[test] -fn test_caddyfile_gen() { - // TODO - todo!() +fn generate_caddyfiles() { + const FRAMERAIL_HOST: &str = "framerail:3000"; + const WWS_HOST: &str = "wws:7000"; + + let config = build_config(); + let (sites_basic, sites_full) = build_site_data(); + + macro_rules! check { + ($expected:expr, $sites:expr, $options:expr $(,)?) => {{ + let actual = CaddyService::generate_custom(&config, &$options, &$sites); + assert_eq!( + actual, $expected, + "Actual generated Caddyfile does not match", + ); + }}; + } + + check!( + CADDYFILE_BASIC_PROD, + sites_basic, + CaddyfileOptions { + debug: false, + local: false, + http_port: None, + https_port: None, + framerail_host: cow!(FRAMERAIL_HOST), + wws_host: cow!(WWS_HOST), + }, + ); + + check!( + CADDYFILE_BASIC_LOCAL, + sites_basic, + CaddyfileOptions { + debug: false, + local: true, + http_port: None, + https_port: None, + framerail_host: cow!(FRAMERAIL_HOST), + wws_host: cow!(WWS_HOST), + }, + ); + + check!( + CADDYFILE_BASIC_LOCAL_DEV, + sites_basic, + CaddyfileOptions { + debug: true, + local: true, + http_port: Some(8000), + https_port: Some(8443), + framerail_host: cow!(FRAMERAIL_HOST), + wws_host: cow!(WWS_HOST), + }, + ); + + check!( + CADDYFILE_BASIC_DIFFERENT_PROXIES, + sites_basic, + CaddyfileOptions { + debug: false, + local: false, + http_port: None, + https_port: None, + framerail_host: cow!("web_proxy_host"), + wws_host: cow!("wws_proxy_host"), + }, + ); + + check!( + CADDYFILE_FULL_PROD, + sites_full, + CaddyfileOptions { + debug: false, + local: false, + http_port: None, + https_port: None, + framerail_host: cow!(FRAMERAIL_HOST), + wws_host: cow!(WWS_HOST), + }, + ); + + check!( + CADDYFILE_FULL_LOCAL, + sites_basic, + CaddyfileOptions { + debug: true, + local: true, + http_port: None, + https_port: None, + framerail_host: cow!(FRAMERAIL_HOST), + wws_host: cow!(WWS_HOST), + }, + ); } From 3ab95f787baa0e7e1b11c9387138b00b6e7ef256 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:32:59 -0400 Subject: [PATCH 289/306] Improve Caddyfile test output on failure. --- deepwell/src/services/caddy/test.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 74c3f5b8c8..fb53183d59 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -171,10 +171,22 @@ fn generate_caddyfiles() { macro_rules! check { ($expected:expr, $sites:expr, $options:expr $(,)?) => {{ let actual = CaddyService::generate_custom(&config, &$options, &$sites); - assert_eq!( - actual, $expected, - "Actual generated Caddyfile does not match", - ); + let expected = $expected; + + // We do this check ourselves instead of using assert_eq! for a cleaner error message. + if actual != expected { + eprintln!("Unit test failure!"); + eprintln!(); + eprintln!("ACTUAL generated Caddyfile:\n{actual:?}\n[BEGIN]\n{actual}\n[END]"); + eprintln!(); + eprintln!("EXPECTED generated Caddyfile:\n{expected:?}\n[BEGIN]\n{expected}\n[END]"); + eprintln!(); + eprintln!("UNIT TEST INFO:"); + eprintln!("* Expected output: {}", stringify!($expected)); + eprintln!("* Site data: {}", stringify!($sites)); + eprintln!("* Options: {:#?}", $options); + panic!("Generated Caddy file did not match!"); + } }}; } From 065d402423d1153a218ffd1a165d44eca3d26388 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:41:34 -0400 Subject: [PATCH 290/306] Add first unit test data. --- deepwell/src/services/caddy/test.rs | 104 +++++++++++++++++++++++++++- 1 file changed, 103 insertions(+), 1 deletion(-) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index fb53183d59..bd4ca06abc 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -142,7 +142,109 @@ fn build_site_data() -> (SiteData, SiteData) { (basic, full) } -const CADDYFILE_BASIC_PROD: &str = " +const CADDYFILE_BASIC_PROD: &str = "\ +# Global options +{ + metrics { + per_host + } +} + +# +# MAIN +# + +(serve_main) { + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.test{uri} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.wikijump.test { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.test { + redir https://foo.wikijump.test{uri} +} + +bar.wikijump.test, +www.bar.wikijump.test { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://wws:7000 +} + +*.wjfiles.test { + @foo host foo.wjfiles.test + vars @foo site_id 1 + + @bar host bar.wjfiles.test + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} "; const CADDYFILE_BASIC_LOCAL: &str = " From 0da6eb911f1156bc80c44796881f89364483f027 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:47:48 -0400 Subject: [PATCH 291/306] Allow varying the main and files domains in tests. This will enable us to use local Caddyfiles in manual tests, since it won't be trying to use "real" TLS certs, and verifies our site slug extraction indexing logic. --- deepwell/src/services/caddy/test.rs | 47 +++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index bd4ca06abc..82d3c9e43e 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -25,7 +25,7 @@ use crate::config::Config; use crate::services::CaddyService; use maplit::hashmap; -fn build_config() -> Config { +fn build_config(main_domain: &str, files_domain: &str) -> Config { use femme::LevelFilter; use ftml::layout::Layout; use std::num::NonZeroU16; @@ -33,14 +33,14 @@ fn build_config() -> Config { use std::time::Duration as StdDuration; use time::Duration as TimeDuration; - const MAIN_DOMAIN: &str = "wikijump.test"; - const FILES_DOMAIN: &str = "wjfiles.test"; + assert!(!main_domain.starts_with('.')); + assert!(!files_domain.starts_with('.')); Config { - main_domain_no_dot: str!(MAIN_DOMAIN), - main_domain: format!(".{MAIN_DOMAIN}"), - files_domain_no_dot: str!(FILES_DOMAIN), - files_domain: format!(".{FILES_DOMAIN}"), + main_domain_no_dot: str!(main_domain), + main_domain: format!(".{main_domain}"), + files_domain_no_dot: str!(files_domain), + files_domain: format!(".{files_domain}"), // Unused fields raw_toml: String::new(), @@ -262,17 +262,22 @@ const CADDYFILE_FULL_PROD: &str = " const CADDYFILE_FULL_LOCAL: &str = " "; +const CADDYFILE_LONG_DOMAIN: &str = " +"; + #[test] fn generate_caddyfiles() { const FRAMERAIL_HOST: &str = "framerail:3000"; const WWS_HOST: &str = "wws:7000"; - let config = build_config(); + let config_basic = build_config("wikijump.test", "wjfiles.test"); + let config_local = build_config("wikijump.localhost", "wjfiles.localhost"); + let config_long = build_config("site.wikijump.com", "host.wjfiles.example.org"); let (sites_basic, sites_full) = build_site_data(); macro_rules! check { - ($expected:expr, $sites:expr, $options:expr $(,)?) => {{ - let actual = CaddyService::generate_custom(&config, &$options, &$sites); + ($expected:expr, $config:expr, $sites:expr, $options:expr $(,)?) => {{ + let actual = CaddyService::generate_custom(&$config, &$options, &$sites); let expected = $expected; // We do this check ourselves instead of using assert_eq! for a cleaner error message. @@ -285,6 +290,8 @@ fn generate_caddyfiles() { eprintln!(); eprintln!("UNIT TEST INFO:"); eprintln!("* Expected output: {}", stringify!($expected)); + eprintln!("* Main domain: {}", $config.main_domain_no_dot); + eprintln!("* Files domain: {}", $config.files_domain_no_dot); eprintln!("* Site data: {}", stringify!($sites)); eprintln!("* Options: {:#?}", $options); panic!("Generated Caddy file did not match!"); @@ -294,6 +301,7 @@ fn generate_caddyfiles() { check!( CADDYFILE_BASIC_PROD, + config_basic, sites_basic, CaddyfileOptions { debug: false, @@ -307,6 +315,7 @@ fn generate_caddyfiles() { check!( CADDYFILE_BASIC_LOCAL, + config_local, sites_basic, CaddyfileOptions { debug: false, @@ -320,6 +329,7 @@ fn generate_caddyfiles() { check!( CADDYFILE_BASIC_LOCAL_DEV, + config_local, sites_basic, CaddyfileOptions { debug: true, @@ -333,6 +343,7 @@ fn generate_caddyfiles() { check!( CADDYFILE_BASIC_DIFFERENT_PROXIES, + config_basic, sites_basic, CaddyfileOptions { debug: false, @@ -346,6 +357,7 @@ fn generate_caddyfiles() { check!( CADDYFILE_FULL_PROD, + config_basic, sites_full, CaddyfileOptions { debug: false, @@ -359,6 +371,7 @@ fn generate_caddyfiles() { check!( CADDYFILE_FULL_LOCAL, + config_local, sites_basic, CaddyfileOptions { debug: true, @@ -369,4 +382,18 @@ fn generate_caddyfiles() { wws_host: cow!(WWS_HOST), }, ); + + check!( + CADDYFILE_LONG_DOMAIN, + config_long, + sites_basic, + CaddyfileOptions { + debug: false, + local: false, + http_port: None, + https_port: None, + framerail_host: cow!(FRAMERAIL_HOST), + wws_host: cow!(WWS_HOST), + }, + ); } From e035e3ebdbf5d537d3b4751937e3d81e7483321c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:55:41 -0400 Subject: [PATCH 292/306] Trim newlines off end of Caddyfile. --- deepwell/src/services/caddy/test.rs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 82d3c9e43e..f6981d2c0e 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -277,9 +277,15 @@ fn generate_caddyfiles() { macro_rules! check { ($expected:expr, $config:expr, $sites:expr, $options:expr $(,)?) => {{ - let actual = CaddyService::generate_custom(&$config, &$options, &$sites); + let mut actual = CaddyService::generate_custom(&$config, &$options, &$sites); let expected = $expected; + // Strip off trailing newlines, not something we care about, + // and precisely managing them is a waste of time. + while actual.ends_with('\n') { + actual.pop(); + } + // We do this check ourselves instead of using assert_eq! for a cleaner error message. if actual != expected { eprintln!("Unit test failure!"); From 4608ab5904f250914948312eb8550b0122b8b87c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 05:57:32 -0400 Subject: [PATCH 293/306] Print which output this is at top and bottom. --- deepwell/src/services/caddy/test.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index f6981d2c0e..510f36716b 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -290,9 +290,9 @@ fn generate_caddyfiles() { if actual != expected { eprintln!("Unit test failure!"); eprintln!(); - eprintln!("ACTUAL generated Caddyfile:\n{actual:?}\n[BEGIN]\n{actual}\n[END]"); + eprintln!("Actual generated Caddyfile:\n{actual:?}\n[BEGIN ACTUAL]\n{actual}\n[END ACTUAL]"); eprintln!(); - eprintln!("EXPECTED generated Caddyfile:\n{expected:?}\n[BEGIN]\n{expected}\n[END]"); + eprintln!("Expected generated Caddyfile:\n{expected:?}\n[BEGIN EXPECTED]\n{expected}\n[END EXPECTED]"); eprintln!(); eprintln!("UNIT TEST INFO:"); eprintln!("* Expected output: {}", stringify!($expected)); From b886643d151bb39b78ef89c8d9b251334e45685e Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:02:57 -0400 Subject: [PATCH 294/306] Add error to catch stray end newlines in test cases. --- deepwell/src/services/caddy/test.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 510f36716b..801f4fbb32 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -286,6 +286,15 @@ fn generate_caddyfiles() { actual.pop(); } + // Meanwhile, if the 'expected' string ends with newline(s), + // it's never going to match the above. + // Such constant strings should be fixed. + assert!( + !expected.ends_with('\n'), + "Expected test string {} ends in a newline! Fix the test case.", + stringify!($expected), + ); + // We do this check ourselves instead of using assert_eq! for a cleaner error message. if actual != expected { eprintln!("Unit test failure!"); From 3db4feab144f8e63e5d31be3ad4270a7c7c3e85e Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:16:42 -0400 Subject: [PATCH 295/306] Use pretty-assertions to show diffs for generated Caddyfiles. --- deepwell/Cargo.lock | 17 +++++++++++++ deepwell/Cargo.toml | 1 + deepwell/src/services/caddy/test.rs | 37 ++++++++++++++++------------- 3 files changed, 39 insertions(+), 16 deletions(-) diff --git a/deepwell/Cargo.lock b/deepwell/Cargo.lock index 52f6de8909..ad44b10f0e 100644 --- a/deepwell/Cargo.lock +++ b/deepwell/Cargo.lock @@ -803,6 +803,7 @@ dependencies = [ "notify", "once_cell", "paste", + "pretty_assertions", "rand 0.8.5", "redis", "ref-map", @@ -866,6 +867,12 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + [[package]] name = "digest" version = "0.10.7" @@ -2720,6 +2727,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + [[package]] name = "proc-macro-crate" version = "3.2.0" diff --git a/deepwell/Cargo.toml b/deepwell/Cargo.toml index 7fc462e529..18ab749130 100644 --- a/deepwell/Cargo.toml +++ b/deepwell/Cargo.toml @@ -73,6 +73,7 @@ wikidot-path = "0.6" [dev-dependencies] maplit = "1" +pretty_assertions = "1" [build-dependencies] built = { version = "0.7", features = ["git2"] } diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 801f4fbb32..89486a442d 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -24,6 +24,7 @@ use super::prelude::*; use crate::config::Config; use crate::services::CaddyService; use maplit::hashmap; +use pretty_assertions::assert_eq; fn build_config(main_domain: &str, files_domain: &str) -> Config { use femme::LevelFilter; @@ -295,22 +296,26 @@ fn generate_caddyfiles() { stringify!($expected), ); - // We do this check ourselves instead of using assert_eq! for a cleaner error message. - if actual != expected { - eprintln!("Unit test failure!"); - eprintln!(); - eprintln!("Actual generated Caddyfile:\n{actual:?}\n[BEGIN ACTUAL]\n{actual}\n[END ACTUAL]"); - eprintln!(); - eprintln!("Expected generated Caddyfile:\n{expected:?}\n[BEGIN EXPECTED]\n{expected}\n[END EXPECTED]"); - eprintln!(); - eprintln!("UNIT TEST INFO:"); - eprintln!("* Expected output: {}", stringify!($expected)); - eprintln!("* Main domain: {}", $config.main_domain_no_dot); - eprintln!("* Files domain: {}", $config.files_domain_no_dot); - eprintln!("* Site data: {}", stringify!($sites)); - eprintln!("* Options: {:#?}", $options); - panic!("Generated Caddy file did not match!"); - } + assert_eq!( + expected, + actual, + "\ +Generated Caddy file did not match! + + +UNIT TEST INFO: +* Expected output: {} +* Main domain: {} +* Files domain: {} +* Site data: {} +* Options: {:#?} +", + stringify!($expected), + $config.main_domain_no_dot, + $config.files_domain_no_dot, + stringify!($sites), + $options, + ); }}; } From 8cdb74021ac3c8ebac8ff8eaacac943e582e41dc Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:22:55 -0400 Subject: [PATCH 296/306] Add Caddyfile test data. Yay! This is done now. --- deepwell/src/services/caddy/test.rs | 651 ++++++++++++++++++++++++++-- 1 file changed, 623 insertions(+), 28 deletions(-) diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 89486a442d..fea8ba3d04 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -114,7 +114,7 @@ fn build_site_data() -> (SiteData, SiteData) { sites: vec![ (1, str!("www"), None), (2, str!("empty"), None), - (3, str!("test"), None), + (3, str!("mytest"), None), ( 4, str!("wanderers-library"), @@ -245,25 +245,631 @@ https:// { request_header X-Wikijump-Special-Error 1 rewrite * /-/special-error/missing-site reverse_proxy http://framerail:3000 +}"; + +const CADDYFILE_BASIC_LOCAL: &str = "\ +# Global options +{ + metrics { + per_host + } + skip_install_trust +} + +# +# MAIN +# + +(serve_main) { + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.wikijump.localhost { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.localhost { + redir https://foo.wikijump.localhost{uri} +} + +bar.wikijump.localhost, +www.bar.wikijump.localhost { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://wws:7000 +} + +*.wjfiles.localhost { + @foo host foo.wjfiles.localhost + vars @foo site_id 1 + + @bar host bar.wjfiles.localhost + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https://, +localhost { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +}"; + +const CADDYFILE_BASIC_LOCAL_DEV: &str = "\ +# Global options +{ + metrics { + per_host + } + http_port 8000 + https_port 8443 + debug + skip_install_trust +} + +# +# MAIN +# + +(serve_main) { + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.wikijump.localhost { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.localhost { + redir https://foo.wikijump.localhost{uri} +} + +bar.wikijump.localhost, +www.bar.wikijump.localhost { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://wws:7000 +} + +*.wjfiles.localhost { + @foo host foo.wjfiles.localhost + vars @foo site_id 1 + + @bar host bar.wjfiles.localhost + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https://, +localhost { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +}"; + +const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = "\ +# Global options +{ + metrics { + per_host + } +} + +# +# MAIN +# + +(serve_main) { + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.test{uri} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://web_proxy_host +} + +foo.wikijump.test { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.test { + redir https://foo.wikijump.test{uri} +} + +bar.wikijump.test, +www.bar.wikijump.test { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://wws_proxy_host +} + +*.wjfiles.test { + @foo host foo.wjfiles.test + vars @foo site_id 1 + + @bar host bar.wjfiles.test + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files } -"; -const CADDYFILE_BASIC_LOCAL: &str = " -"; +# +# FALLBACK +# -const CADDYFILE_BASIC_LOCAL_DEV: &str = " -"; +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://web_proxy_host +}"; -const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = " -"; +const CADDYFILE_FULL_PROD: &str = "\ +# Global options +{ + metrics { + per_host + } +} -const CADDYFILE_FULL_PROD: &str = " -"; +# +# MAIN +# -const CADDYFILE_FULL_LOCAL: &str = " -"; +(serve_main) { + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.test{uri} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +wikijump.test { + vars { + site_id 1 + site_slug www + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.wikijump.test { + redir https://wikijump.test{uri} +} + +empty.wikijump.test { + vars { + site_id 2 + site_slug empty + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.empty.wikijump.test { + redir https://empty.wikijump.test{uri} +} + +mytest.wikijump.test { + vars { + site_id 3 + site_slug mytest + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.mytest.wikijump.test { + redir https://mytest.wikijump.test{uri} +} + +example.com, +www.example.com { + redir https://mytest.wikijump.test{uri} +} + +example.net, +www.example.net { + redir https://mytest.wikijump.test{uri} +} + +check.wikijump.test, +www.check.wikijump.test { + redir https://mytest.wikijump.test{uri} +} + +wanderers-library.wikijump.test, +www.wanderers-library.wikijump.test { + redir https://wandererslibrary.com{uri} +} + +wandererslibrary.com { + vars { + site_id 4 + site_slug wanderers-library + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.wandererslibrary.com { + redir https://wandererslibrary.com{uri} +} + +scp-wiki.wikijump.test, +www.scp-wiki.wikijump.test { + redir https://scpwiki.com{uri} +} + +scpwiki.com { + vars { + site_id 5 + site_slug scp-wiki + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.scpwiki.com { + redir https://scpwiki.com{uri} +} + +scp-wiki.net, +www.scp-wiki.net { + redir https://scpwiki.com{uri} +} + +scp.foundation, +www.scp.foundation { + redir https://scpwiki.com{uri} +} + +foundation.scp, +www.foundation.scp { + redir https://scpwiki.com{uri} +} + +scpwiki.wikijump.test, +www.scpwiki.wikijump.test { + redir https://scpwiki.com{uri} +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://wws:7000 +} -const CADDYFILE_LONG_DOMAIN: &str = " +*.wjfiles.test { + @www host www.wjfiles.test + vars @www site_id 1 + + @empty host empty.wjfiles.test + vars @empty site_id 2 + + @mytest host mytest.wjfiles.test + vars @mytest site_id 3 + + @wanderers-library host wanderers-library.wjfiles.test + vars @wanderers-library site_id 4 + + @scp-wiki host scp-wiki.wjfiles.test + vars @scp-wiki site_id 5 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +}"; + +const CADDYFILE_LONG_DOMAIN: &str = "\ +# Global options +{ + metrics { + per_host + } +} + +# +# MAIN +# + +(serve_main) { + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.host.site.somedomain.example.com{uri} + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.site.wikijump.com { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.site.wikijump.com { + redir https://foo.site.wikijump.com{uri} +} + +bar.site.wikijump.com, +www.bar.site.wikijump.com { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + reverse_proxy http://wws:7000 +} + +*.wjfiles.host.site.somedomain.example.com { + @foo host foo.wjfiles.host.site.somedomain.example.com + vars @foo site_id 1 + + @bar host bar.wjfiles.host.site.somedomain.example.com + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.6} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} "; #[test] @@ -273,7 +879,10 @@ fn generate_caddyfiles() { let config_basic = build_config("wikijump.test", "wjfiles.test"); let config_local = build_config("wikijump.localhost", "wjfiles.localhost"); - let config_long = build_config("site.wikijump.com", "host.wjfiles.example.org"); + let config_long = build_config( + "site.wikijump.com", + "wjfiles.host.site.somedomain.example.com", + ); let (sites_basic, sites_full) = build_site_data(); macro_rules! check { @@ -389,20 +998,6 @@ UNIT TEST INFO: }, ); - check!( - CADDYFILE_FULL_LOCAL, - config_local, - sites_basic, - CaddyfileOptions { - debug: true, - local: true, - http_port: None, - https_port: None, - framerail_host: cow!(FRAMERAIL_HOST), - wws_host: cow!(WWS_HOST), - }, - ); - check!( CADDYFILE_LONG_DOMAIN, config_long, From abf4beaa16bd24d197e19959bd2b9c6b491db342 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:25:54 -0400 Subject: [PATCH 297/306] Add note about hard tabs. --- deepwell/src/services/caddy/service.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 674730bf3f..3296d3aa0a 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -22,6 +22,10 @@ //! //! This is primarily concerned with generating the `Caddyfile` that //! powers the server, which is where host → site mapping is performed. +//! +//! NOTE: This file contains hard tabs, as this is what we want to use for +//! `Caddyfile` generation. If you're opening this file, mind the git +//! diff! If your editor munges the tabs please discard those changes. use super::prelude::*; use crate::models::alias::Model as AliasModel; From d232365831ca82613e0caee093ce8ba029a9300f Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:26:55 -0400 Subject: [PATCH 298/306] Add sorting to DB queries. This avoids us forcing caddy to reload just because we're getting different row orders from the database, and thus generating a technically different (but actually identical) Caddyfile. --- deepwell/src/services/caddy/service.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 3296d3aa0a..51a890dbd3 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -52,6 +52,7 @@ impl CaddyService { let txn = ctx.transaction(); let sites: Vec<(i64, String, Option<String>)> = Site::find() + .order_by_asc(site::Column::SiteId) .select_only() .column(site::Column::SiteId) .column(site::Column::Slug) @@ -71,6 +72,7 @@ impl CaddyService { .collect(); let custom_domains = SiteDomain::find() + .order_by_asc(site_domain::Column::Domain) .select_only() .column(site_domain::Column::Domain) .filter(site_domain::Column::SiteId.eq(site_id)) From 7548fec586f6a2903fc5b0467d4ce27a4c640ecf Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:40:09 -0400 Subject: [PATCH 299/306] Get rid of tls feature, set appropriate real IP source. Since we know we're always going to sit behind Caddy now, we can set a constant for our IP source and not have to both with any configuration field stuff. --- install/local/docker-compose.yaml | 1 - wws/.env.example | 14 -------- wws/Cargo.lock | 27 ---------------- wws/Cargo.toml | 7 ---- wws/src/config/mod.rs | 54 +++---------------------------- wws/src/config/object.rs | 8 ----- wws/src/config/secrets.rs | 20 ------------ wws/src/main.rs | 39 ++-------------------- wws/src/route.rs | 9 ++++-- wws/src/state.rs | 4 --- 10 files changed, 15 insertions(+), 168 deletions(-) diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index c4542bf003..f6fbebfde8 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -119,7 +119,6 @@ services: - "S3_CUSTOM_ENDPOINT=http://files:9000" - "S3_ACCESS_KEY_ID=minio" - "S3_SECRET_ACCESS_KEY=defaultpassword" - - "CLIENT_IP_SOURCE=ConnectInfo" restart: always healthcheck: test: ["CMD", "wikijump-health-check"] diff --git a/wws/.env.example b/wws/.env.example index 36e1e3daa3..10be1350da 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -38,20 +38,6 @@ S3_SECRET_ACCESS_KEY= # But don't include both. AWS_PROFILE_NAME=wikijump -# How client IP addresses are determined. -# See https://docs.rs/axum-client-ip/latest/axum_client_ip/enum.SecureClientIpSource.html -# -# Must have one of these values: -# - RightmostForwarded -# - RightmostXForwardedFor -# - XRealIp -# - FlyClientIp -# - TrueClientIp -# - CfConnectingIp -# - ConnectInfo -# - CloudFrontViewerAddress -CLIENT_IP_SOURCE=XRealIp - # What TLS certificate and secret key to use for this server. # # Requires the "tls" feature to be enabled in wws. diff --git a/wws/Cargo.lock b/wws/Cargo.lock index 20ee2a0e44..b95db9cc89 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -283,30 +283,6 @@ dependencies = [ "syn", ] -[[package]] -name = "axum-server" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56bac90848f6a9393ac03c63c640925c4b7c8ca21654de40d53f55964667c7d8" -dependencies = [ - "arc-swap", - "bytes", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.5.2", - "hyper-util", - "pin-project-lite", - "rustls 0.23.20", - "rustls-pemfile 2.2.0", - "rustls-pki-types", - "tokio", - "tokio-rustls 0.26.1", - "tower 0.4.13", - "tower-service", -] - [[package]] name = "backtrace" version = "0.3.74" @@ -2751,9 +2727,7 @@ dependencies = [ "axum", "axum-client-ip", "axum-extra", - "axum-server", "built", - "cfg-if", "clap", "color-backtrace", "dotenvy", @@ -2764,7 +2738,6 @@ dependencies = [ "redis", "ref-map", "rust-s3", - "rustls 0.23.20", "serde", "str-macro", "thiserror 2.0.11", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 20bb4de3a3..4f533db9f7 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -12,18 +12,12 @@ version = "2025.2.6" authors = ["Emmie Smith <emmie.maeda@gmail.com>"] edition = "2021" -[features] -default = [] -tls = ["axum-server"] - [dependencies] accept-language = "3" anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } axum-client-ip = "0.7" axum-extra = { version = "0.10", features = ["attachment"] } -axum-server = { version = "0.7", features = ["tls-rustls-no-provider"], optional = true } -cfg-if = "1" clap = "4" color-backtrace = "0.6" dotenvy = "0.15" @@ -34,7 +28,6 @@ paste = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "r2d2", "tokio-comp", "tokio-rustls-comp"], default-features = false } ref-map = "0.1" rust-s3 = { version = "0.35", features = ["with-tokio", "tokio-rustls-tls"], default-features = false } -rustls = { version = "0.23", features = ["logging", "std", "ring", "tls12"], default-features = false } # see https://github.com/programatik29/axum-server/issues/153#issuecomment-2605740256 serde = { version = "1", features = ["derive"] } str-macro = "1" thiserror = "2" diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 825bc67b5e..76e3429f7d 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -26,7 +26,6 @@ pub use self::object::Config; pub use self::secrets::Secrets; use self::args::Arguments; -use cfg_if::cfg_if; use dotenvy::dotenv; use ref_map::*; use s3::{creds::Credentials, region::Region}; @@ -49,20 +48,6 @@ pub fn load_config() -> (Config, Secrets) { }; } - // The OsString version of get_env!() - #[cfg(feature = "tls")] - macro_rules! get_env_os { - ($name:expr) => { - match env::var_os($name) { - Some(value) => value, - None => { - eprintln!("Unable to read environment variable {}", $name); - process::exit(1); - } - } - }; - } - // Process arguments and overrides let Arguments { enable_trace, @@ -139,40 +124,12 @@ pub fn load_config() -> (Config, Secrets) { } }; - let client_ip_source = match get_env!("CLIENT_IP_SOURCE").parse() { - Ok(ip_source) => ip_source, - Err(_) => { - eprintln!("CLIENT_IP_SOURCE variable does not have a valid enum value"); - process::exit(1); - } - }; - - cfg_if! { - if #[cfg(feature = "tls")] { - let tls_certificate = PathBuf::from(get_env_os!("TLS_CERTIFICATE")); - let tls_secret_key = PathBuf::from(get_env_os!("TLS_SECRET_KEY")); - } - } - // Build and return - - cfg_if! { - if #[cfg(feature = "tls")] { - let config = Config { - enable_trace, - pid_file, - address, - tls_certificate, - tls_secret_key, - }; - } else { - let config = Config { - enable_trace, - pid_file, - address, - }; - } - } + let config = Config { + enable_trace, + pid_file, + address, + }; let secrets = Secrets { deepwell_url, @@ -182,7 +139,6 @@ pub fn load_config() -> (Config, Secrets) { s3_region, s3_path_style, s3_credentials, - client_ip_source, }; (config, secrets) diff --git a/wws/src/config/object.rs b/wws/src/config/object.rs index e5169aeef0..f0d13dd967 100644 --- a/wws/src/config/object.rs +++ b/wws/src/config/object.rs @@ -32,12 +32,4 @@ pub struct Config { /// The address the server will be hosted on. pub address: SocketAddr, - - /// Specify where to get the certificate PEM file for TLS. - #[cfg(feature = "tls")] - pub tls_certificate: PathBuf, - - /// Specify where to get the secret key PEM file for TLS. - #[cfg(feature = "tls")] - pub tls_secret_key: PathBuf, } diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index c80461e5a0..fcf9e8ace1 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -18,7 +18,6 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use axum_client_ip::SecureClientIpSource; use s3::{creds::Credentials, region::Region}; #[derive(Debug, Clone)] @@ -63,23 +62,4 @@ pub struct Secrets { /// Alternatively you can have it read from the AWS credentials file. /// The profile to read from can be set in the `AWS_PROFILE_NAME` environment variable. pub s3_credentials: Credentials, - - /// Specify how client IP addresses are determined. - /// - /// In the crate `axum-client-ip`, you need to specify hoow `SecureClientIp` sources its - /// information, since it depends on the exact stack your web application is in. - /// - /// Set using environment variable `CLIENT_IP_SOURCE`, must have one of the following values: - /// (see [`SecureClientIpSource`]) - /// * `RightmostForwarded` - /// * `RightmostXForwardedFor` - /// * `XRealIp` - /// * `FlyClientIp` - /// * `TrueClientIp` - /// * `CfConnectingIp` - /// * `ConnectInfo` - /// * `CloudFrontViewerAddress` - /// - /// [`SecureClientIpSource`]: https://docs.rs/axum-client-ip/latest/axum_client_ip/enum.SecureClientIpSource.html)) - pub client_ip_source: SecureClientIpSource, } diff --git a/wws/src/main.rs b/wws/src/main.rs index 4bbae7811e..d53be88f4d 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -45,16 +45,15 @@ mod route; mod state; mod trace; -use self::config::{load_config, Config}; +use self::config::load_config; use self::route::build_router; use self::state::build_server_state; use self::trace::setup_tracing; use anyhow::Result; -use axum::Router; use std::fs::File; use std::io::Write; -use std::net::SocketAddr; use std::process; +use tokio::net::TcpListener; #[tokio::main] async fn main() -> Result<()> { @@ -75,6 +74,7 @@ async fn main() -> Result<()> { // Connect to services, build server state and then run let state = build_server_state(secrets).await?; let router = build_router(state); + let app = router.into_make_service(); // Begin listening info!( @@ -82,39 +82,6 @@ async fn main() -> Result<()> { "Listening to connections...", ); - serve(&config, router).await?; - Ok(()) -} - -// Snake oil TLS -// For local -#[cfg(feature = "tls")] -async fn serve(config: &Config, router: Router) -> Result<()> { - use axum_server::tls_rustls::RustlsConfig; - - // NOTE: This does not include a HTTP -> HTTPS redirector - let app = router.into_make_service_with_connect_info::<SocketAddr>(); - let tls_config = RustlsConfig::from_pem_file( - // Added in Docker container - &config.tls_certificate, - &config.tls_secret_key, - ) - .await?; - - axum_server::bind_rustls(config.address, tls_config) - .serve(app) - .await?; - - Ok(()) -} - -// TLS-terminated HTTP server -// For dev and prod -#[cfg(not(feature = "tls"))] -async fn serve(config: &Config, router: Router) -> Result<()> { - use tokio::net::TcpListener; - - let app = router.into_make_service_with_connect_info::<SocketAddr>(); let listener = TcpListener::bind(config.address).await?; axum::serve(listener, app).await?; Ok(()) diff --git a/wws/src/route.rs b/wws/src/route.rs index f9bd0d6591..bd9b44d129 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -26,7 +26,7 @@ use axum::{ routing::{any, get}, Router, }; -use axum_client_ip::SecureClientIp; +use axum_client_ip::{SecureClientIp, SecureClientIpSource}; use axum_extra::extract::Host; use std::sync::Arc; use tower_http::{ @@ -34,6 +34,11 @@ use tower_http::{ set_header::SetResponseHeaderLayer, trace::TraceLayer, }; +/// How we determine what the "real IP" of the user is, since this service sits behind a reverse proxy. +/// Here, since we are using [Caddy](https://caddyserver.com), which sets `X-Forwarded-For`, we +/// should use `SecureClientIpSource::RightmostXForwardedFor`. +pub const REAL_IP_SOURCE: SecureClientIpSource = SecureClientIpSource::RightmostXForwardedFor; + pub fn build_router(state: ServerState) -> Router { let main_state = Arc::clone(&state); let file_state = Arc::clone(&state); @@ -129,7 +134,7 @@ pub fn build_router(state: ServerState) -> Router { .br(true) .zstd(true), ) - .layer(state.client_ip_source.clone().into_extension()) + .layer(REAL_IP_SOURCE.clone().into_extension()) .layer(SetResponseHeaderLayer::overriding( HEADER_IS_WIKIJUMP, Some(HeaderValue::from_static("1")), diff --git a/wws/src/state.rs b/wws/src/state.rs index 24301da5ce..190faf7376 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -27,7 +27,6 @@ use crate::{ host::SiteAndHost, }; use axum::body::Body; -use axum_client_ip::SecureClientIpSource; use hyper_util::{ client::legacy::{connect::HttpConnector, Client as HyperClient}, rt::TokioExecutor, @@ -49,7 +48,6 @@ pub struct ServerStateInner { pub framerail: Framerail, pub cache: Cache, pub s3_bucket: Box<Bucket>, - pub client_ip_source: SecureClientIpSource, } pub async fn build_server_state( @@ -61,7 +59,6 @@ pub async fn build_server_state( s3_region, s3_credentials, s3_path_style, - client_ip_source, }: Secrets, ) -> Result<ServerState> { let framerail = Framerail::new(framerail_host); @@ -88,7 +85,6 @@ pub async fn build_server_state( framerail, cache, s3_bucket, - client_ip_source, })) } From 3a93cd28da3547eceeba747f80b1bccbcfa970a8 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:48:00 -0400 Subject: [PATCH 300/306] Add compression settings to Caddyfile generation. The defaults Caddy uses are fine, we just need to enable it. --- deepwell/src/services/caddy/service.rs | 4 ++++ deepwell/src/services/caddy/test.rs | 27 ++++++++++++++++++++++++-- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 51a890dbd3..14259d5664 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -169,6 +169,9 @@ impl CaddyService { }} redir @files https://{{vars.site_slug}}{files_domain}{{uri}} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://{framerail_host} @@ -260,6 +263,7 @@ www.{domain} {{ # (serve_files) {{ + encode reverse_proxy http://{wws_host} }} diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index fea8ba3d04..4e393f7de2 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -173,6 +173,9 @@ const CADDYFILE_BASIC_PROD: &str = "\ } redir @files https://{vars.site_slug}.wjfiles.test{uri} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://framerail:3000 @@ -220,6 +223,7 @@ www.example.com { # (serve_files) { + encode reverse_proxy http://wws:7000 } @@ -278,6 +282,9 @@ const CADDYFILE_BASIC_LOCAL: &str = "\ } redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://framerail:3000 @@ -325,6 +332,7 @@ www.example.com { # (serve_files) { + encode reverse_proxy http://wws:7000 } @@ -387,6 +395,9 @@ const CADDYFILE_BASIC_LOCAL_DEV: &str = "\ } redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://framerail:3000 @@ -434,6 +445,7 @@ www.example.com { # (serve_files) { + encode reverse_proxy http://wws:7000 } @@ -492,6 +504,9 @@ const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = "\ } redir @files https://{vars.site_slug}.wjfiles.test{uri} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://web_proxy_host @@ -539,6 +554,7 @@ www.example.com { # (serve_files) { + encode reverse_proxy http://wws_proxy_host } @@ -596,6 +612,9 @@ const CADDYFILE_FULL_PROD: &str = "\ } redir @files https://{vars.site_slug}.wjfiles.test{uri} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://framerail:3000 @@ -731,6 +750,7 @@ www.scpwiki.wikijump.test { # (serve_files) { + encode reverse_proxy http://wws:7000 } @@ -797,6 +817,9 @@ const CADDYFILE_LONG_DOMAIN: &str = "\ } redir @files https://{vars.site_slug}.wjfiles.host.site.somedomain.example.com{uri} + # Enable default compression settings + encode + # Finally, proxy to framerail to get the actual HTML # Note, the x-wikijump-site-* headers have already been set at this point reverse_proxy http://framerail:3000 @@ -844,6 +867,7 @@ www.example.com { # (serve_files) { + encode reverse_proxy http://wws:7000 } @@ -869,8 +893,7 @@ https:// { request_header X-Wikijump-Special-Error 1 rewrite * /-/special-error/missing-site reverse_proxy http://framerail:3000 -} -"; +}"; #[test] fn generate_caddyfiles() { From c6197b4fc01cb2562e9e5e8c6efa357fcf7dd045 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 06:58:17 -0400 Subject: [PATCH 301/306] Always strip all X-Wikijump-* headers before reverse_proxy. Just as a safety measure, we toss all of them every time. This way, there is no possibility for clients sneaking in directives that cause our service to behave in unintended ways. --- deepwell/src/services/caddy/service.rs | 8 +++++ deepwell/src/services/caddy/test.rs | 48 ++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 14259d5664..28ce94e944 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -147,11 +147,18 @@ impl CaddyService { "\ }} +(strip_headers) {{ + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +}} + # # MAIN # (serve_main) {{ + import strip_headers + # Redirect, route is on the files server @files {{ path /*/code/* @@ -263,6 +270,7 @@ www.{domain} {{ # (serve_files) {{ + import strip_headers encode reverse_proxy http://{wws_host} }} diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 4e393f7de2..127e65a3b6 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -151,11 +151,18 @@ const CADDYFILE_BASIC_PROD: &str = "\ } } +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + # # MAIN # (serve_main) { + import strip_headers + # Redirect, route is on the files server @files { path /*/code/* @@ -223,6 +230,7 @@ www.example.com { # (serve_files) { + import strip_headers encode reverse_proxy http://wws:7000 } @@ -260,11 +268,18 @@ const CADDYFILE_BASIC_LOCAL: &str = "\ skip_install_trust } +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + # # MAIN # (serve_main) { + import strip_headers + # Redirect, route is on the files server @files { path /*/code/* @@ -332,6 +347,7 @@ www.example.com { # (serve_files) { + import strip_headers encode reverse_proxy http://wws:7000 } @@ -373,11 +389,18 @@ const CADDYFILE_BASIC_LOCAL_DEV: &str = "\ skip_install_trust } +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + # # MAIN # (serve_main) { + import strip_headers + # Redirect, route is on the files server @files { path /*/code/* @@ -445,6 +468,7 @@ www.example.com { # (serve_files) { + import strip_headers encode reverse_proxy http://wws:7000 } @@ -482,11 +506,18 @@ const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = "\ } } +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + # # MAIN # (serve_main) { + import strip_headers + # Redirect, route is on the files server @files { path /*/code/* @@ -554,6 +585,7 @@ www.example.com { # (serve_files) { + import strip_headers encode reverse_proxy http://wws_proxy_host } @@ -590,11 +622,18 @@ const CADDYFILE_FULL_PROD: &str = "\ } } +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + # # MAIN # (serve_main) { + import strip_headers + # Redirect, route is on the files server @files { path /*/code/* @@ -750,6 +789,7 @@ www.scpwiki.wikijump.test { # (serve_files) { + import strip_headers encode reverse_proxy http://wws:7000 } @@ -795,11 +835,18 @@ const CADDYFILE_LONG_DOMAIN: &str = "\ } } +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + # # MAIN # (serve_main) { + import strip_headers + # Redirect, route is on the files server @files { path /*/code/* @@ -867,6 +914,7 @@ www.example.com { # (serve_files) { + import strip_headers encode reverse_proxy http://wws:7000 } From 14a2d8c0b1ec4cc37bc117018ed2c2ed52f8973c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 22:56:10 -0400 Subject: [PATCH 302/306] Move output Caddyfiles to new directory. This way we can more easily view the outputs and get syntax highlighting. --- deepwell/src/services/caddy/test.rs | 825 +------------------------ deepwell/test/Caddyfile.basic_local | 116 ++++ deepwell/test/Caddyfile.basic_localdev | 119 ++++ deepwell/test/Caddyfile.basic_prod | 114 ++++ deepwell/test/Caddyfile.full_prod | 211 +++++++ deepwell/test/Caddyfile.long | 114 ++++ deepwell/test/Caddyfile.proxies | 114 ++++ 7 files changed, 806 insertions(+), 807 deletions(-) create mode 100644 deepwell/test/Caddyfile.basic_local create mode 100644 deepwell/test/Caddyfile.basic_localdev create mode 100644 deepwell/test/Caddyfile.basic_prod create mode 100644 deepwell/test/Caddyfile.full_prod create mode 100644 deepwell/test/Caddyfile.long create mode 100644 deepwell/test/Caddyfile.proxies diff --git a/deepwell/src/services/caddy/test.rs b/deepwell/src/services/caddy/test.rs index 127e65a3b6..a7c642d351 100644 --- a/deepwell/src/services/caddy/test.rs +++ b/deepwell/src/services/caddy/test.rs @@ -143,805 +143,24 @@ fn build_site_data() -> (SiteData, SiteData) { (basic, full) } -const CADDYFILE_BASIC_PROD: &str = "\ -# Global options -{ - metrics { - per_host - } -} - -(strip_headers) { - # Strip internal headers used by Wikijump - request_header -X-Wikijump-* -} - -# -# MAIN -# - -(serve_main) { - import strip_headers - - # Redirect, route is on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* # for the /{slug}/file/{filename} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - } - redir @files https://{vars.site_slug}.wjfiles.test{uri} - - # Enable default compression settings - encode - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://framerail:3000 -} - -foo.wikijump.test { - vars { - site_id 1 - site_slug foo - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.foo.wikijump.test { - redir https://foo.wikijump.test{uri} -} - -bar.wikijump.test, -www.bar.wikijump.test { - redir https://example.com{uri} -} - -example.com { - vars { - site_id 2 - site_slug bar - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.example.com { - redir https://example.com{uri} -} - -# -# FILES -# - -(serve_files) { - import strip_headers - encode - reverse_proxy http://wws:7000 -} - -*.wjfiles.test { - @foo host foo.wjfiles.test - vars @foo site_id 1 - - @bar host bar.wjfiles.test - vars @bar site_id 2 - - request_header X-Wikijump-Site-Slug {labels.2} - request_header X-Wikijump-Site-Id {vars.site_id} - - import serve_files -} - -# -# FALLBACK -# - -http://, -https:// { - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://framerail:3000 -}"; - -const CADDYFILE_BASIC_LOCAL: &str = "\ -# Global options -{ - metrics { - per_host - } - skip_install_trust -} - -(strip_headers) { - # Strip internal headers used by Wikijump - request_header -X-Wikijump-* -} - -# -# MAIN -# - -(serve_main) { - import strip_headers - - # Redirect, route is on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* # for the /{slug}/file/{filename} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - } - redir @files https://{vars.site_slug}.wjfiles.localhost{uri} - - # Enable default compression settings - encode - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://framerail:3000 -} - -foo.wikijump.localhost { - vars { - site_id 1 - site_slug foo - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.foo.wikijump.localhost { - redir https://foo.wikijump.localhost{uri} -} - -bar.wikijump.localhost, -www.bar.wikijump.localhost { - redir https://example.com{uri} -} - -example.com { - vars { - site_id 2 - site_slug bar - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.example.com { - redir https://example.com{uri} -} - -# -# FILES -# - -(serve_files) { - import strip_headers - encode - reverse_proxy http://wws:7000 -} - -*.wjfiles.localhost { - @foo host foo.wjfiles.localhost - vars @foo site_id 1 - - @bar host bar.wjfiles.localhost - vars @bar site_id 2 - - request_header X-Wikijump-Site-Slug {labels.2} - request_header X-Wikijump-Site-Id {vars.site_id} - - import serve_files -} - -# -# FALLBACK -# - -http://, -https://, -localhost { - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://framerail:3000 -}"; - -const CADDYFILE_BASIC_LOCAL_DEV: &str = "\ -# Global options -{ - metrics { - per_host - } - http_port 8000 - https_port 8443 - debug - skip_install_trust -} - -(strip_headers) { - # Strip internal headers used by Wikijump - request_header -X-Wikijump-* -} - -# -# MAIN -# - -(serve_main) { - import strip_headers - - # Redirect, route is on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* # for the /{slug}/file/{filename} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - } - redir @files https://{vars.site_slug}.wjfiles.localhost{uri} - - # Enable default compression settings - encode - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://framerail:3000 -} - -foo.wikijump.localhost { - vars { - site_id 1 - site_slug foo - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.foo.wikijump.localhost { - redir https://foo.wikijump.localhost{uri} -} - -bar.wikijump.localhost, -www.bar.wikijump.localhost { - redir https://example.com{uri} -} - -example.com { - vars { - site_id 2 - site_slug bar - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.example.com { - redir https://example.com{uri} -} - -# -# FILES -# - -(serve_files) { - import strip_headers - encode - reverse_proxy http://wws:7000 -} - -*.wjfiles.localhost { - @foo host foo.wjfiles.localhost - vars @foo site_id 1 - - @bar host bar.wjfiles.localhost - vars @bar site_id 2 - - request_header X-Wikijump-Site-Slug {labels.2} - request_header X-Wikijump-Site-Id {vars.site_id} - - import serve_files -} - -# -# FALLBACK -# - -http://, -https://, -localhost { - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://framerail:3000 -}"; - -const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = "\ -# Global options -{ - metrics { - per_host - } -} - -(strip_headers) { - # Strip internal headers used by Wikijump - request_header -X-Wikijump-* -} - -# -# MAIN -# - -(serve_main) { - import strip_headers - - # Redirect, route is on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* # for the /{slug}/file/{filename} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - } - redir @files https://{vars.site_slug}.wjfiles.test{uri} - - # Enable default compression settings - encode - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://web_proxy_host -} - -foo.wikijump.test { - vars { - site_id 1 - site_slug foo - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.foo.wikijump.test { - redir https://foo.wikijump.test{uri} -} - -bar.wikijump.test, -www.bar.wikijump.test { - redir https://example.com{uri} -} - -example.com { - vars { - site_id 2 - site_slug bar - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.example.com { - redir https://example.com{uri} -} - -# -# FILES -# - -(serve_files) { - import strip_headers - encode - reverse_proxy http://wws_proxy_host -} - -*.wjfiles.test { - @foo host foo.wjfiles.test - vars @foo site_id 1 - - @bar host bar.wjfiles.test - vars @bar site_id 2 - - request_header X-Wikijump-Site-Slug {labels.2} - request_header X-Wikijump-Site-Id {vars.site_id} - - import serve_files -} - -# -# FALLBACK -# - -http://, -https:// { - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://web_proxy_host -}"; - -const CADDYFILE_FULL_PROD: &str = "\ -# Global options -{ - metrics { - per_host - } -} - -(strip_headers) { - # Strip internal headers used by Wikijump - request_header -X-Wikijump-* -} - -# -# MAIN -# - -(serve_main) { - import strip_headers - - # Redirect, route is on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* # for the /{slug}/file/{filename} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - } - redir @files https://{vars.site_slug}.wjfiles.test{uri} - - # Enable default compression settings - encode - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://framerail:3000 -} - -wikijump.test { - vars { - site_id 1 - site_slug www - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.wikijump.test { - redir https://wikijump.test{uri} -} - -empty.wikijump.test { - vars { - site_id 2 - site_slug empty - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.empty.wikijump.test { - redir https://empty.wikijump.test{uri} -} - -mytest.wikijump.test { - vars { - site_id 3 - site_slug mytest - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.mytest.wikijump.test { - redir https://mytest.wikijump.test{uri} -} - -example.com, -www.example.com { - redir https://mytest.wikijump.test{uri} -} - -example.net, -www.example.net { - redir https://mytest.wikijump.test{uri} -} - -check.wikijump.test, -www.check.wikijump.test { - redir https://mytest.wikijump.test{uri} -} - -wanderers-library.wikijump.test, -www.wanderers-library.wikijump.test { - redir https://wandererslibrary.com{uri} -} - -wandererslibrary.com { - vars { - site_id 4 - site_slug wanderers-library - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.wandererslibrary.com { - redir https://wandererslibrary.com{uri} -} - -scp-wiki.wikijump.test, -www.scp-wiki.wikijump.test { - redir https://scpwiki.com{uri} -} - -scpwiki.com { - vars { - site_id 5 - site_slug scp-wiki - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.scpwiki.com { - redir https://scpwiki.com{uri} -} - -scp-wiki.net, -www.scp-wiki.net { - redir https://scpwiki.com{uri} -} - -scp.foundation, -www.scp.foundation { - redir https://scpwiki.com{uri} -} - -foundation.scp, -www.foundation.scp { - redir https://scpwiki.com{uri} -} - -scpwiki.wikijump.test, -www.scpwiki.wikijump.test { - redir https://scpwiki.com{uri} -} - -# -# FILES -# - -(serve_files) { - import strip_headers - encode - reverse_proxy http://wws:7000 -} - -*.wjfiles.test { - @www host www.wjfiles.test - vars @www site_id 1 - - @empty host empty.wjfiles.test - vars @empty site_id 2 - - @mytest host mytest.wjfiles.test - vars @mytest site_id 3 - - @wanderers-library host wanderers-library.wjfiles.test - vars @wanderers-library site_id 4 - - @scp-wiki host scp-wiki.wjfiles.test - vars @scp-wiki site_id 5 - - request_header X-Wikijump-Site-Slug {labels.2} - request_header X-Wikijump-Site-Id {vars.site_id} - - import serve_files -} - -# -# FALLBACK -# - -http://, -https:// { - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://framerail:3000 -}"; - -const CADDYFILE_LONG_DOMAIN: &str = "\ -# Global options -{ - metrics { - per_host - } -} - -(strip_headers) { - # Strip internal headers used by Wikijump - request_header -X-Wikijump-* -} - -# -# MAIN -# - -(serve_main) { - import strip_headers - - # Redirect, route is on the files server - @files { - path /*/code/* - path /*/html/* - path /*/file/* # for the /{slug}/file/{filename} convenience routes - path /*/download/* - path /local--files/* - path /local--code/* - path /local--html/* - path /-/files/* - path /-/file/* - path /-/download/* - path /-/code/* - path /-/html/* - } - redir @files https://{vars.site_slug}.wjfiles.host.site.somedomain.example.com{uri} - - # Enable default compression settings - encode - - # Finally, proxy to framerail to get the actual HTML - # Note, the x-wikijump-site-* headers have already been set at this point - reverse_proxy http://framerail:3000 -} - -foo.site.wikijump.com { - vars { - site_id 1 - site_slug foo - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.foo.site.wikijump.com { - redir https://foo.site.wikijump.com{uri} -} - -bar.site.wikijump.com, -www.bar.site.wikijump.com { - redir https://example.com{uri} -} - -example.com { - vars { - site_id 2 - site_slug bar - } - - request_header X-Wikijump-Site-Id {vars.site_id} - request_header X-Wikijump-Site-Slug {vars.site_slug} - - import serve_main -} - -www.example.com { - redir https://example.com{uri} -} - -# -# FILES -# - -(serve_files) { - import strip_headers - encode - reverse_proxy http://wws:7000 -} - -*.wjfiles.host.site.somedomain.example.com { - @foo host foo.wjfiles.host.site.somedomain.example.com - vars @foo site_id 1 - - @bar host bar.wjfiles.host.site.somedomain.example.com - vars @bar site_id 2 - - request_header X-Wikijump-Site-Slug {labels.6} - request_header X-Wikijump-Site-Id {vars.site_id} - - import serve_files +macro_rules! test_output { + ($suffix:expr) => { + include_str!( + concat!( + env!("CARGO_MANIFEST_DIR"), + "/test/Caddyfile.", + $suffix, + ) + ) + }; } -# -# FALLBACK -# - -http://, -https:// { - request_header X-Wikijump-Special-Error 1 - rewrite * /-/special-error/missing-site - reverse_proxy http://framerail:3000 -}"; +const CADDYFILE_BASIC_PROD: &str = test_output!("basic_prod"); +const CADDYFILE_BASIC_LOCAL: &str = test_output!("basic_local"); +const CADDYFILE_BASIC_LOCAL_DEV: &str = test_output!("basic_localdev"); +const CADDYFILE_BASIC_DIFFERENT_PROXIES: &str = test_output!("proxies"); +const CADDYFILE_FULL_PROD: &str = test_output!("full_prod"); +const CADDYFILE_LONG_DOMAIN: &str = test_output!("long"); #[test] fn generate_caddyfiles() { @@ -959,23 +178,15 @@ fn generate_caddyfiles() { macro_rules! check { ($expected:expr, $config:expr, $sites:expr, $options:expr $(,)?) => {{ let mut actual = CaddyService::generate_custom(&$config, &$options, &$sites); - let expected = $expected; // Strip off trailing newlines, not something we care about, // and precisely managing them is a waste of time. + + let expected = $expected.trim(); while actual.ends_with('\n') { actual.pop(); } - // Meanwhile, if the 'expected' string ends with newline(s), - // it's never going to match the above. - // Such constant strings should be fixed. - assert!( - !expected.ends_with('\n'), - "Expected test string {} ends in a newline! Fix the test case.", - stringify!($expected), - ); - assert_eq!( expected, actual, diff --git a/deepwell/test/Caddyfile.basic_local b/deepwell/test/Caddyfile.basic_local new file mode 100644 index 0000000000..1193957ba8 --- /dev/null +++ b/deepwell/test/Caddyfile.basic_local @@ -0,0 +1,116 @@ +# Global options +{ + metrics { + per_host + } + skip_install_trust +} + +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + +# +# MAIN +# + +(serve_main) { + import strip_headers + + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + + # Enable default compression settings + encode + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.wikijump.localhost { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.localhost { + redir https://foo.wikijump.localhost{uri} +} + +bar.wikijump.localhost, +www.bar.wikijump.localhost { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + import strip_headers + encode + reverse_proxy http://wws:7000 +} + +*.wjfiles.localhost { + @foo host foo.wjfiles.localhost + vars @foo site_id 1 + + @bar host bar.wjfiles.localhost + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https://, +localhost { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} diff --git a/deepwell/test/Caddyfile.basic_localdev b/deepwell/test/Caddyfile.basic_localdev new file mode 100644 index 0000000000..a497852250 --- /dev/null +++ b/deepwell/test/Caddyfile.basic_localdev @@ -0,0 +1,119 @@ +# Global options +{ + metrics { + per_host + } + http_port 8000 + https_port 8443 + debug + skip_install_trust +} + +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + +# +# MAIN +# + +(serve_main) { + import strip_headers + + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.localhost{uri} + + # Enable default compression settings + encode + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.wikijump.localhost { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.localhost { + redir https://foo.wikijump.localhost{uri} +} + +bar.wikijump.localhost, +www.bar.wikijump.localhost { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + import strip_headers + encode + reverse_proxy http://wws:7000 +} + +*.wjfiles.localhost { + @foo host foo.wjfiles.localhost + vars @foo site_id 1 + + @bar host bar.wjfiles.localhost + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https://, +localhost { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} diff --git a/deepwell/test/Caddyfile.basic_prod b/deepwell/test/Caddyfile.basic_prod new file mode 100644 index 0000000000..3c22500927 --- /dev/null +++ b/deepwell/test/Caddyfile.basic_prod @@ -0,0 +1,114 @@ +# Global options +{ + metrics { + per_host + } +} + +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + +# +# MAIN +# + +(serve_main) { + import strip_headers + + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.test{uri} + + # Enable default compression settings + encode + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.wikijump.test { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.test { + redir https://foo.wikijump.test{uri} +} + +bar.wikijump.test, +www.bar.wikijump.test { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + import strip_headers + encode + reverse_proxy http://wws:7000 +} + +*.wjfiles.test { + @foo host foo.wjfiles.test + vars @foo site_id 1 + + @bar host bar.wjfiles.test + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} diff --git a/deepwell/test/Caddyfile.full_prod b/deepwell/test/Caddyfile.full_prod new file mode 100644 index 0000000000..60be71f4b3 --- /dev/null +++ b/deepwell/test/Caddyfile.full_prod @@ -0,0 +1,211 @@ +# Global options +{ + metrics { + per_host + } +} + +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + +# +# MAIN +# + +(serve_main) { + import strip_headers + + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.test{uri} + + # Enable default compression settings + encode + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +wikijump.test { + vars { + site_id 1 + site_slug www + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.wikijump.test { + redir https://wikijump.test{uri} +} + +empty.wikijump.test { + vars { + site_id 2 + site_slug empty + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.empty.wikijump.test { + redir https://empty.wikijump.test{uri} +} + +mytest.wikijump.test { + vars { + site_id 3 + site_slug mytest + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.mytest.wikijump.test { + redir https://mytest.wikijump.test{uri} +} + +example.com, +www.example.com { + redir https://mytest.wikijump.test{uri} +} + +example.net, +www.example.net { + redir https://mytest.wikijump.test{uri} +} + +check.wikijump.test, +www.check.wikijump.test { + redir https://mytest.wikijump.test{uri} +} + +wanderers-library.wikijump.test, +www.wanderers-library.wikijump.test { + redir https://wandererslibrary.com{uri} +} + +wandererslibrary.com { + vars { + site_id 4 + site_slug wanderers-library + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.wandererslibrary.com { + redir https://wandererslibrary.com{uri} +} + +scp-wiki.wikijump.test, +www.scp-wiki.wikijump.test { + redir https://scpwiki.com{uri} +} + +scpwiki.com { + vars { + site_id 5 + site_slug scp-wiki + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.scpwiki.com { + redir https://scpwiki.com{uri} +} + +scp-wiki.net, +www.scp-wiki.net { + redir https://scpwiki.com{uri} +} + +scp.foundation, +www.scp.foundation { + redir https://scpwiki.com{uri} +} + +foundation.scp, +www.foundation.scp { + redir https://scpwiki.com{uri} +} + +scpwiki.wikijump.test, +www.scpwiki.wikijump.test { + redir https://scpwiki.com{uri} +} + +# +# FILES +# + +(serve_files) { + import strip_headers + encode + reverse_proxy http://wws:7000 +} + +*.wjfiles.test { + @www host www.wjfiles.test + vars @www site_id 1 + + @empty host empty.wjfiles.test + vars @empty site_id 2 + + @mytest host mytest.wjfiles.test + vars @mytest site_id 3 + + @wanderers-library host wanderers-library.wjfiles.test + vars @wanderers-library site_id 4 + + @scp-wiki host scp-wiki.wjfiles.test + vars @scp-wiki site_id 5 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} diff --git a/deepwell/test/Caddyfile.long b/deepwell/test/Caddyfile.long new file mode 100644 index 0000000000..c166afd001 --- /dev/null +++ b/deepwell/test/Caddyfile.long @@ -0,0 +1,114 @@ +# Global options +{ + metrics { + per_host + } +} + +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + +# +# MAIN +# + +(serve_main) { + import strip_headers + + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.host.site.somedomain.example.com{uri} + + # Enable default compression settings + encode + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://framerail:3000 +} + +foo.site.wikijump.com { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.site.wikijump.com { + redir https://foo.site.wikijump.com{uri} +} + +bar.site.wikijump.com, +www.bar.site.wikijump.com { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + import strip_headers + encode + reverse_proxy http://wws:7000 +} + +*.wjfiles.host.site.somedomain.example.com { + @foo host foo.wjfiles.host.site.somedomain.example.com + vars @foo site_id 1 + + @bar host bar.wjfiles.host.site.somedomain.example.com + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.6} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://framerail:3000 +} diff --git a/deepwell/test/Caddyfile.proxies b/deepwell/test/Caddyfile.proxies new file mode 100644 index 0000000000..d8f92ab1ea --- /dev/null +++ b/deepwell/test/Caddyfile.proxies @@ -0,0 +1,114 @@ +# Global options +{ + metrics { + per_host + } +} + +(strip_headers) { + # Strip internal headers used by Wikijump + request_header -X-Wikijump-* +} + +# +# MAIN +# + +(serve_main) { + import strip_headers + + # Redirect, route is on the files server + @files { + path /*/code/* + path /*/html/* + path /*/file/* # for the /{slug}/file/{filename} convenience routes + path /*/download/* + path /local--files/* + path /local--code/* + path /local--html/* + path /-/files/* + path /-/file/* + path /-/download/* + path /-/code/* + path /-/html/* + } + redir @files https://{vars.site_slug}.wjfiles.test{uri} + + # Enable default compression settings + encode + + # Finally, proxy to framerail to get the actual HTML + # Note, the x-wikijump-site-* headers have already been set at this point + reverse_proxy http://web_proxy_host +} + +foo.wikijump.test { + vars { + site_id 1 + site_slug foo + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.foo.wikijump.test { + redir https://foo.wikijump.test{uri} +} + +bar.wikijump.test, +www.bar.wikijump.test { + redir https://example.com{uri} +} + +example.com { + vars { + site_id 2 + site_slug bar + } + + request_header X-Wikijump-Site-Id {vars.site_id} + request_header X-Wikijump-Site-Slug {vars.site_slug} + + import serve_main +} + +www.example.com { + redir https://example.com{uri} +} + +# +# FILES +# + +(serve_files) { + import strip_headers + encode + reverse_proxy http://wws_proxy_host +} + +*.wjfiles.test { + @foo host foo.wjfiles.test + vars @foo site_id 1 + + @bar host bar.wjfiles.test + vars @bar site_id 2 + + request_header X-Wikijump-Site-Slug {labels.2} + request_header X-Wikijump-Site-Id {vars.site_id} + + import serve_files +} + +# +# FALLBACK +# + +http://, +https:// { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/missing-site + reverse_proxy http://web_proxy_host +} From bc0344c9567ab32dd203759c55668fd7fc8effbb Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 23:05:05 -0400 Subject: [PATCH 303/306] Remove version headers from wws. --- wws/src/handler/mod.rs | 6 ++---- wws/src/route.rs | 8 -------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 12e30ff729..07b43eb584 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -52,13 +52,11 @@ use axum::{ use std::{future::Future, net::IpAddr}; use tower::util::ServiceExt; +pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump"); + pub const HEADER_SITE_ID: HeaderName = HeaderName::from_static("x-wikijump-site-id"); pub const HEADER_SITE_SLUG: HeaderName = HeaderName::from_static("x-wikijump-site-slug"); -pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump"); -pub const HEADER_WWS_VERSION: HeaderName = HeaderName::from_static("x-wikijump-wws-ver"); -pub const HEADER_DEEPWELL_VERSION: HeaderName = HeaderName::from_static("x-wikijump-deepwell-ver"); - pub const HEADER_X_REAL_IP: HeaderName = HeaderName::from_static("x-real-ip"); /// Helper function to get the site ID and slug from headers. diff --git a/wws/src/route.rs b/wws/src/route.rs index bd9b44d129..f7aafc6b03 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -139,13 +139,5 @@ pub fn build_router(state: ServerState) -> Router { HEADER_IS_WIKIJUMP, Some(HeaderValue::from_static("1")), )) - .layer(SetResponseHeaderLayer::overriding( - HEADER_WWS_VERSION, - Some(header_value!(&*info::VERSION_INFO)), - )) - .layer(SetResponseHeaderLayer::overriding( - HEADER_DEEPWELL_VERSION, - Some(header_value!(&header_state.domains.deepwell_version)), - )) .with_state(state) } From ce356d1ad0f8e27fe55980c91ac10a24d0dde166 Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 23:14:48 -0400 Subject: [PATCH 304/306] Add handler for files domain with no site slug. --- deepwell/src/services/caddy/service.rs | 9 ++++++++- deepwell/test/Caddyfile.basic_local | 6 ++++++ deepwell/test/Caddyfile.basic_localdev | 6 ++++++ deepwell/test/Caddyfile.basic_prod | 6 ++++++ deepwell/test/Caddyfile.full_prod | 6 ++++++ deepwell/test/Caddyfile.long | 6 ++++++ deepwell/test/Caddyfile.proxies | 6 ++++++ 7 files changed, 44 insertions(+), 1 deletion(-) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index 28ce94e944..cebfd5e948 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -113,8 +113,9 @@ impl CaddyService { ) -> String { info!("Generating Caddyfile for {} sites", sites.len()); - let main_domain_no_dot = &config.main_domain_no_dot; let files_domain = &config.files_domain; + let files_domain_no_dot = &config.files_domain_no_dot; + let main_domain_no_dot = &config.main_domain_no_dot; let mut caddyfile = str!( "\ @@ -275,6 +276,12 @@ www.{domain} {{ reverse_proxy http://{wws_host} }} +{files_domain_no_dot} {{ + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://{framerail_host} +}} + *{files_domain} {{" ); diff --git a/deepwell/test/Caddyfile.basic_local b/deepwell/test/Caddyfile.basic_local index 1193957ba8..027d301807 100644 --- a/deepwell/test/Caddyfile.basic_local +++ b/deepwell/test/Caddyfile.basic_local @@ -90,6 +90,12 @@ www.example.com { reverse_proxy http://wws:7000 } +wjfiles.localhost { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://framerail:3000 +} + *.wjfiles.localhost { @foo host foo.wjfiles.localhost vars @foo site_id 1 diff --git a/deepwell/test/Caddyfile.basic_localdev b/deepwell/test/Caddyfile.basic_localdev index a497852250..37f6ed309d 100644 --- a/deepwell/test/Caddyfile.basic_localdev +++ b/deepwell/test/Caddyfile.basic_localdev @@ -93,6 +93,12 @@ www.example.com { reverse_proxy http://wws:7000 } +wjfiles.localhost { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://framerail:3000 +} + *.wjfiles.localhost { @foo host foo.wjfiles.localhost vars @foo site_id 1 diff --git a/deepwell/test/Caddyfile.basic_prod b/deepwell/test/Caddyfile.basic_prod index 3c22500927..b73be00664 100644 --- a/deepwell/test/Caddyfile.basic_prod +++ b/deepwell/test/Caddyfile.basic_prod @@ -89,6 +89,12 @@ www.example.com { reverse_proxy http://wws:7000 } +wjfiles.test { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://framerail:3000 +} + *.wjfiles.test { @foo host foo.wjfiles.test vars @foo site_id 1 diff --git a/deepwell/test/Caddyfile.full_prod b/deepwell/test/Caddyfile.full_prod index 60be71f4b3..b7f9aa1e6d 100644 --- a/deepwell/test/Caddyfile.full_prod +++ b/deepwell/test/Caddyfile.full_prod @@ -177,6 +177,12 @@ www.scpwiki.wikijump.test { reverse_proxy http://wws:7000 } +wjfiles.test { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://framerail:3000 +} + *.wjfiles.test { @www host www.wjfiles.test vars @www site_id 1 diff --git a/deepwell/test/Caddyfile.long b/deepwell/test/Caddyfile.long index c166afd001..3c243da36b 100644 --- a/deepwell/test/Caddyfile.long +++ b/deepwell/test/Caddyfile.long @@ -89,6 +89,12 @@ www.example.com { reverse_proxy http://wws:7000 } +wjfiles.host.site.somedomain.example.com { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://framerail:3000 +} + *.wjfiles.host.site.somedomain.example.com { @foo host foo.wjfiles.host.site.somedomain.example.com vars @foo site_id 1 diff --git a/deepwell/test/Caddyfile.proxies b/deepwell/test/Caddyfile.proxies index d8f92ab1ea..143e39e33c 100644 --- a/deepwell/test/Caddyfile.proxies +++ b/deepwell/test/Caddyfile.proxies @@ -89,6 +89,12 @@ www.example.com { reverse_proxy http://wws_proxy_host } +wjfiles.test { + request_header X-Wikijump-Special-Error 1 + rewrite * /-/special-error/file-root + reverse_proxy http://web_proxy_host +} + *.wjfiles.test { @foo host foo.wjfiles.test vars @foo site_id 1 From 1b73bc3f19fcde6a68ea530c7e45970655fe0b2c Mon Sep 17 00:00:00 2001 From: Emmie Maeda <emmie.maeda@gmail.com> Date: Fri, 28 Mar 2025 23:59:45 -0400 Subject: [PATCH 305/306] Initial deletion of host work within wws. Moving towards gutting the logic that now lives in caddy. --- deepwell/src/services/error.rs | 33 ----- install/local/docker-compose.yaml | 1 - wws/.env.example | 4 - wws/Cargo.lock | 9 -- wws/Cargo.toml | 3 - wws/src/cache.rs | 98 +------------ wws/src/config/mod.rs | 2 - wws/src/config/secrets.rs | 6 - wws/src/deepwell.rs | 79 +---------- wws/src/{error/rust.rs => error.rs} | 2 +- wws/src/error/html.rs | 207 ---------------------------- wws/src/error/mod.rs | 31 ----- wws/src/framerail.rs | 38 ----- wws/src/handler/file.rs | 32 ++--- wws/src/handler/misc.rs | 4 - wws/src/handler/mod.rs | 202 +-------------------------- wws/src/handler/redirect.rs | 19 +-- wws/src/host.rs | 92 ------------- wws/src/info.rs | 25 ---- wws/src/macros.rs | 44 ------ wws/src/main.rs | 5 - wws/src/route.rs | 54 +------- wws/src/state.rs | 42 +----- 23 files changed, 23 insertions(+), 1009 deletions(-) rename wws/src/{error/rust.rs => error.rs} (98%) delete mode 100644 wws/src/error/html.rs delete mode 100644 wws/src/error/mod.rs delete mode 100644 wws/src/framerail.rs delete mode 100644 wws/src/host.rs delete mode 100644 wws/src/macros.rs diff --git a/deepwell/src/services/error.rs b/deepwell/src/services/error.rs index b520a0fb2f..20ef2dda2f 100644 --- a/deepwell/src/services/error.rs +++ b/deepwell/src/services/error.rs @@ -301,26 +301,6 @@ pub enum Error { #[error("The rate limit for an external API has been reached")] RateLimited, - - // Errors for wws - // See the 8000 section in the error codes table - #[error("The web server failed to process the request")] - WebServerFailure, - - #[error("The web server did not get a successful DEEPWELL response")] - DeepwellFailure, - - #[error("The web server cannot fetch site information")] - SiteFetch, - - #[error("The web server cannot fetch page information")] - PageFetch, - - #[error("The web server cannot fetch file information")] - FileFetch, - - #[error("The web server cannot fetch blob data")] - BlobFetch, } impl Error { @@ -450,19 +430,6 @@ impl Error { Error::InvalidSessionToken => 5001, Error::SessionUserId { .. } => 5002, // TODO: permission errors (e.g. locked page, cannot apply bans) - - // 8000 - Web Server / Routing errors - // - // This block is reserved for errors exclusively returned by WWS. - // These errors are not be used by DEEPWELL. - // - // WebServerFailure is pretty general, avoid using it if possible. - Error::WebServerFailure => 6000, - Error::DeepwellFailure => 6001, - Error::SiteFetch => 6002, - Error::PageFetch => 6003, - Error::FileFetch => 6004, - Error::BlobFetch => 6005, } } diff --git a/install/local/docker-compose.yaml b/install/local/docker-compose.yaml index f6fbebfde8..82e1fbd2aa 100644 --- a/install/local/docker-compose.yaml +++ b/install/local/docker-compose.yaml @@ -112,7 +112,6 @@ services: - "ADDRESS=[::]:7000" - "DEEPWELL_URL=http://deepwell:2747" - "REDIS_URL=redis://cache" - - "FRAMERAIL_HOST=framerail:3000" - "S3_BUCKET=deepwell-files" - "S3_REGION_NAME=local" - "S3_PATH_STYLE=true" diff --git a/wws/.env.example b/wws/.env.example index 10be1350da..0a8791b86a 100644 --- a/wws/.env.example +++ b/wws/.env.example @@ -13,10 +13,6 @@ DEEPWELL_URL=http://localhost:2747 # Includes password (if needed) to connect. REDIS_URL=redis://localhost -# framerail host -# Includes the port number. -FRAMERAIL_HOST=localhost:3000 - # S3 configuration settings S3_BUCKET=deepwell-files diff --git a/wws/Cargo.lock b/wws/Cargo.lock index b95db9cc89..2ba919555e 100644 --- a/wws/Cargo.lock +++ b/wws/Cargo.lock @@ -2,12 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "accept-language" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f27d075294830fcab6f66e320dab524bc6d048f4a151698e153205559113772" - [[package]] name = "addr2line" version = "0.24.2" @@ -2722,7 +2716,6 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" name = "wws" version = "2025.2.6" dependencies = [ - "accept-language", "anyhow", "axum", "axum-client-ip", @@ -2731,9 +2724,7 @@ dependencies = [ "clap", "color-backtrace", "dotenvy", - "hyper-util", "jsonrpsee", - "once_cell", "paste", "redis", "ref-map", diff --git a/wws/Cargo.toml b/wws/Cargo.toml index 4f533db9f7..71317e15b4 100644 --- a/wws/Cargo.toml +++ b/wws/Cargo.toml @@ -13,7 +13,6 @@ authors = ["Emmie Smith <emmie.maeda@gmail.com>"] edition = "2021" [dependencies] -accept-language = "3" anyhow = "1" axum = { version = "0.8", features = ["http1", "http2", "macros", "tokio", "tower-log", "tracing"], default-features = false } axum-client-ip = "0.7" @@ -21,9 +20,7 @@ axum-extra = { version = "0.10", features = ["attachment"] } clap = "4" color-backtrace = "0.6" dotenvy = "0.15" -hyper-util = { version = "0.1", features = ["client", "client-legacy", "http1", "http2", "tokio"] } jsonrpsee = { version = "0.24", features = ["async-client", "jsonrpsee-http-client"] } -once_cell = "1" paste = "1" redis = { version = "0.25", features = ["aio", "connection-manager", "keep-alive", "r2d2", "tokio-comp", "tokio-rustls-comp"], default-features = false } ref-map = "0.1" diff --git a/wws/src/cache.rs b/wws/src/cache.rs index d136d699e8..8031b06794 100644 --- a/wws/src/cache.rs +++ b/wws/src/cache.rs @@ -23,9 +23,8 @@ //! Whenever you make changes to this module, make sure that the code is //! compatible with DEEPWELL's Redis code. -use crate::{deepwell::FileData, error::Result, host::SiteAndHost}; +use crate::{deepwell::FileData, error::Result}; use redis::{aio::MultiplexedConnection, AsyncCommands}; -use ref_map::*; macro_rules! get_connection { ($client:expr) => { @@ -39,14 +38,6 @@ macro_rules! hset { }; } -macro_rules! hset_opt { - ($conn:expr, $key:expr, $field:expr, $value:expr $(,)?) => { - if let Some(value) = $value { - hset!($conn, $key, $field, value) - } - }; -} - macro_rules! hdel { ($conn:expr, $key:expr, $field:expr $(,)?) => { $conn.hdel::<_, _, ()>(&$key, $field).await? @@ -65,93 +56,6 @@ impl Cache { Ok(Cache { client }) } - pub async fn get_site_from_slug(&self, site_slug: &str) -> Result<Option<i64>> { - let mut conn = get_connection!(self.client); - let key = format!("site_slug:{site_slug}"); - let value = conn.hget(key, "id").await?; - Ok(value) - } - - pub async fn set_site_from_slug(&self, site_slug: &str, site_id: i64) -> Result<()> { - let mut conn = get_connection!(self.client); - let key = format!("site_slug:{site_slug}"); - hset!(conn, key, "id", site_id); - Ok(()) - } - - pub async fn get_host_from_domain(&self, domain: &str) -> Result<Option<SiteAndHost>> { - type SiteDomainDataTuple = (Option<String>, Option<i64>, Option<String>, Option<String>); - - let mut conn = get_connection!(self.client); - let key = format!("site_domain:{domain}"); - let fields = &["variant", "id", "slug", "domain"]; - let (variant, site_id, slug, domain) = - conn.hget::<_, _, SiteDomainDataTuple>(&key, fields).await?; - - let variant = variant.ref_map(|s| s.as_str()); - match (variant, site_id, slug, domain) { - // Each variant value has a set of fields that should be set for it - // If a different group of fields are set, then it's invalid - (Some("main_site"), Some(site_id), Some(site_slug), None) => { - Ok(Some(SiteAndHost::MainSite { site_id, site_slug })) - } - (Some("main_site_redirect"), None, None, Some(domain)) => { - Ok(Some(SiteAndHost::MainSiteRedirect { domain })) - } - (Some("missing_site_slug"), None, Some(site_slug), None) => { - Ok(Some(SiteAndHost::MissingSiteSlug { site_slug })) - } - (Some("missing_custom_domain"), None, None, Some(domain)) => { - Ok(Some(SiteAndHost::MissingCustomDomain { domain })) - } - - // Cache miss - (None, None, None, None) => Ok(None), - - // Not a valid variant or set of fields - _ => { - clear_inconsistent_fields(&mut conn, &key, fields).await?; - Ok(None) - } - } - } - - pub async fn set_host_from_domain(&self, domain: &str, host: &SiteAndHost) -> Result<()> { - let mut conn = get_connection!(self.client); - let key = format!("site_domain:{domain}"); - - let (variant, site_id, slug, domain): ( - &'static str, - Option<i64>, - Option<&str>, - Option<&str>, - ) = match host { - SiteAndHost::MainSite { site_id, site_slug } => { - ("site_found", Some(*site_id), Some(site_slug), Some(domain)) - } - SiteAndHost::MainSiteRedirect { domain } => { - ("main_site_redirect", None, None, Some(domain)) - } - SiteAndHost::MissingSiteSlug { site_slug } => { - ("missing_site_slug", None, Some(site_slug), None) - } - SiteAndHost::MissingCustomDomain { domain } => { - ("missing_custom_domain", None, None, Some(domain)) - } - SiteAndHost::FileSite { .. } | SiteAndHost::FileRoot => { - panic!( - "Cannot cache SiteAndHost value corresponding to the files router: {host:#?}" - ); - } - }; - - hset!(conn, key, "variant", variant); - hset_opt!(conn, key, "id", site_id); - hset_opt!(conn, key, "slug", slug); - hset_opt!(conn, key, "domain", domain); - Ok(()) - } - pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result<Option<i64>> { let mut conn = get_connection!(self.client); let key = format!("page_slug:{site_id}:{page_slug}"); diff --git a/wws/src/config/mod.rs b/wws/src/config/mod.rs index 76e3429f7d..7db1fd23e6 100644 --- a/wws/src/config/mod.rs +++ b/wws/src/config/mod.rs @@ -66,7 +66,6 @@ pub fn load_config() -> (Config, Secrets) { // Process secrets let deepwell_url = get_env!("DEEPWELL_URL"); let redis_url = get_env!("REDIS_URL"); - let framerail_host = get_env!("FRAMERAIL_HOST"); let s3_bucket = get_env!("S3_BUCKET"); let s3_region = match env::var("S3_AWS_REGION") { @@ -134,7 +133,6 @@ pub fn load_config() -> (Config, Secrets) { let secrets = Secrets { deepwell_url, redis_url, - framerail_host, s3_bucket, s3_region, s3_path_style, diff --git a/wws/src/config/secrets.rs b/wws/src/config/secrets.rs index fcf9e8ace1..319cc3b2d3 100644 --- a/wws/src/config/secrets.rs +++ b/wws/src/config/secrets.rs @@ -32,12 +32,6 @@ pub struct Secrets { /// Set using environment variable `REDIS_URL`. pub redis_url: String, - /// The host of the framerail server to reverse proxy from. - /// This includes the port number, if it's not `80`. - /// - /// Set using environment variable `FRAMERAIL_HOST`. - pub framerail_host: String, - /// The name of the S3 bucket that file blobs are kept in. /// The bucket must already exist prior to program invocation. /// diff --git a/wws/src/deepwell.rs b/wws/src/deepwell.rs index 1ea1428f28..3032571eae 100644 --- a/wws/src/deepwell.rs +++ b/wws/src/deepwell.rs @@ -18,7 +18,7 @@ * along with this program. If not, see <http://www.gnu.org/licenses/>. */ -use crate::{error::Result, host::SiteAndHost}; +use crate::error::Result; use jsonrpsee::{core::client::ClientT, http_client::HttpClient, rpc_params}; use serde::Deserialize; use std::time::Duration; @@ -116,24 +116,6 @@ impl Deepwell { }) } - pub async fn get_site_from_slug(&self, slug: &str) -> Result<Option<SiteData>> { - let site_data: Option<SiteData> = self - .client - .request("site_get", rpc_object! { "site" => slug }) - .await?; - - Ok(site_data) - } - - pub async fn get_site_from_domain(&self, domain: &str) -> Result<SiteAndHost> { - let host: SiteAndHost = self - .client - .request("site_from_domain", rpc_params![domain]) - .await?; - - Ok(host) - } - pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result<Option<PageData>> { let params = rpc_object! { "site_id" => site_id, @@ -162,60 +144,6 @@ impl Deepwell { let file_data: Option<FileData> = self.client.request("file_get", params).await?; Ok(file_data) } - - pub async fn get_special_error_missing_site_slug( - &self, - locales: &[String], - site_slug: &str, - ) -> Result<String> { - let params = rpc_object! { - "locales" => locales, - "site_slug" => site_slug, - }; - - let html: String = self - .client - .request("special_error_missing_site_slug", params) - .await?; - - Ok(html) - } - - pub async fn get_special_error_missing_custom_domain( - &self, - locales: &[String], - domain: &str, - ) -> Result<String> { - let params = rpc_object! { - "locales" => locales, - "domain" => domain, - }; - - let html: String = self - .client - .request("special_error_missing_custom_domain", params) - .await?; - - Ok(html) - } - - pub async fn get_special_error_site_fetch( - &self, - locales: &[String], - domain: &str, - ) -> Result<String> { - let params = rpc_object! { - "locales" => locales, - "domain" => domain, - }; - - let html: String = self - .client - .request("special_error_missing_custom_domain", params) - .await?; - - Ok(html) - } } #[derive(Debug, Clone)] @@ -227,11 +155,6 @@ pub struct Domains { pub deepwell_version: String, } -#[derive(Deserialize, Debug, Clone)] -pub struct SiteData { - pub site_id: i64, -} - #[derive(Deserialize, Debug, Clone)] pub struct PageData { pub page_id: i64, diff --git a/wws/src/error/rust.rs b/wws/src/error.rs similarity index 98% rename from wws/src/error/rust.rs rename to wws/src/error.rs index 03d18c9677..74118d681f 100644 --- a/wws/src/error/rust.rs +++ b/wws/src/error.rs @@ -1,5 +1,5 @@ /* - * error/rust.rs + * error.rs * * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) * Copyright (C) 2019-2025 Wikijump Team diff --git a/wws/src/error/html.rs b/wws/src/error/html.rs deleted file mode 100644 index 55f6a47844..0000000000 --- a/wws/src/error/html.rs +++ /dev/null @@ -1,207 +0,0 @@ -/* - * error/html.rs - * - * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) - * Copyright (C) 2019-2025 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see <http://www.gnu.org/licenses/>. - */ - -//! Helpers for converting error states into axum responses. -//! -//! This is for cases where getting a full or proper error message -//! (complete with localization) is not feasible due to how high -//! up this error is, and so we return an error message annotated -//! with an error code instead. -//! -//! This is very basic HTML generation. If we need to do anything -//! more fancy in the future, then feel free to replace this with -//! something better. -//! -//! Alternatively, you may want to move these error cases to -//! `SpecialErrorService` in DEEPWELL, where they can benefit -//! from localization. - -use axum::{ - body::Body, - http::{ - header::{self, HeaderValue}, - StatusCode, - }, - response::Response, -}; -use v_htmlescape::escape as html_escape; - -const HTML_BEGIN: &str = r"<html><head><title>"; -const HTML_MIDDLE: &str = ""; -const HTML_END: &str = ""; - -/// Error codes represented in wws. -/// These must match the corresponding errors in deepwell (`src/service/error.rs`) -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum ServerErrorCode<'a> { - PageNotFound { - site_id: i64, - page_slug: &'a str, - }, - FileNotFound { - site_id: i64, - page_id: i64, - filename: &'a str, - }, - DeepwellFailure, - PageFetch { - site_id: i64, - page_slug: &'a str, - }, - FileFetch { - site_id: i64, - page_id: i64, - filename: &'a str, - }, - BlobFetch { - site_id: i64, - page_slug: &'a str, - filename: &'a str, - }, -} - -impl ServerErrorCode<'_> { - /// Returns the error code corresponding to this error. - /// - /// See `src/service/error.rs` for a listing. - /// - /// Note that, despite the acceptable error range only being positive, - /// the same type (`i32`) is used here as in DEEPWELL. - pub fn error_code(self) -> i32 { - match self { - ServerErrorCode::PageNotFound { .. } => 2005, - ServerErrorCode::FileNotFound { .. } => 2009, - ServerErrorCode::DeepwellFailure => 6001, - ServerErrorCode::PageFetch { .. } => 6003, - ServerErrorCode::FileFetch { .. } => 6004, - ServerErrorCode::BlobFetch { .. } => 6005, - } - } - - /// Returns the HTTP status code for this error. - pub fn status_code(self) -> StatusCode { - match self { - ServerErrorCode::PageNotFound { .. } | ServerErrorCode::FileNotFound { .. } => { - StatusCode::NOT_FOUND - } - ServerErrorCode::DeepwellFailure - | ServerErrorCode::PageFetch { .. } - | ServerErrorCode::FileFetch { .. } - | ServerErrorCode::BlobFetch { .. } => StatusCode::INTERNAL_SERVER_ERROR, - } - } - - /// Returns the HTML title for this error. - fn title(self) -> &'static str { - match self { - ServerErrorCode::PageNotFound { .. } => "Page not found", - ServerErrorCode::FileNotFound { .. } => "File not found", - ServerErrorCode::DeepwellFailure => "Server error", - ServerErrorCode::PageFetch { .. } => "Cannot load page", - ServerErrorCode::FileFetch { .. } => "Cannot load file", - ServerErrorCode::BlobFetch { .. } => "Cannot load file data", - } - } - - pub fn into_response(self) -> Response { - // Build error HTML - let mut body = String::with_capacity(HTML_BEGIN.len() + HTML_END.len() + 70); - body.push_str(HTML_BEGIN); - body.push_str(self.title()); - body.push_str(HTML_MIDDLE); - - let error_code = self.error_code(); - str_write!(&mut body, "[Error #{error_code}] "); - - // Write error body - match self { - ServerErrorCode::PageNotFound { site_id, page_slug } => { - str_write!( - body, - "Cannot find page \"{}\" in site ID {}.", - html_escape(page_slug), - site_id, - ); - } - ServerErrorCode::FileNotFound { - site_id, - page_id, - filename, - } => { - str_write!( - body, - "Cannot find file \"{}\" in page ID {} in site ID {}", - html_escape(filename), - page_id, - site_id, - ); - } - ServerErrorCode::DeepwellFailure => { - str_write!(body, "Fatal: Cannot process request from backend server"); - } - ServerErrorCode::PageFetch { site_id, page_slug } => { - str_write!( - body, - "Cannot load page \"{}\" in site ID {}.", - html_escape(page_slug), - site_id, - ); - } - ServerErrorCode::FileFetch { - site_id, - page_id, - filename, - } => { - str_write!( - body, - "Cannot load file \"{}\", in page ID {} in site ID {}.", - html_escape(filename), - page_id, - site_id, - ); - } - ServerErrorCode::BlobFetch { - site_id, - page_slug, - filename, - } => { - str_write!( - body, - "Cannot load file data for \"{}\", in page \"{}\" in site ID {}.", - html_escape(filename), - html_escape(page_slug), - site_id, - ); - } - }; - - body.push_str(HTML_END); - - // Build and return response - Response::builder() - .status(self.status_code()) - .header( - header::CONTENT_TYPE, - HeaderValue::from_static("text/html; charset=utf-8"), - ) - .body(Body::from(body)) - .expect("Unable to build response") - } -} diff --git a/wws/src/error/mod.rs b/wws/src/error/mod.rs deleted file mode 100644 index 4aa3001235..0000000000 --- a/wws/src/error/mod.rs +++ /dev/null @@ -1,31 +0,0 @@ -/* - * error/mod.rs - * - * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) - * Copyright (C) 2019-2025 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -//! Error handling and error responses. -//! -//! This module has two semi-related functions, first, the -//! structures for error handling within Rust, and second, -//! utilities to convert final error states into axum responses. - -mod html; -mod rust; - -pub use self::html::*; -pub use self::rust::*; diff --git a/wws/src/framerail.rs b/wws/src/framerail.rs deleted file mode 100644 index 662e0e262f..0000000000 --- a/wws/src/framerail.rs +++ /dev/null @@ -1,38 +0,0 @@ -/* - * framerail.rs - * - * DEEPWELL - Wikijump API provider and database manager - * Copyright (C) 2019-2025 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -use axum::http::Uri; - -#[derive(Debug)] -pub struct Framerail { - host: String, -} - -impl Framerail { - #[inline] - pub fn new(host: String) -> Self { - Framerail { host } - } - - pub fn proxy_uri(&self, path: &str) -> Uri { - let uri = format!("http://{}{}", self.host, path); - Uri::try_from(uri).expect("Internal framerail URI is invalid") - } -} diff --git a/wws/src/handler/file.rs b/wws/src/handler/file.rs index acc1d262fa..04194bca59 100644 --- a/wws/src/handler/file.rs +++ b/wws/src/handler/file.rs @@ -19,7 +19,7 @@ */ use super::get_site_info; -use crate::{error::ServerErrorCode, state::ServerState}; +use crate::state::ServerState; use axum::{ body::Body, extract::{Path, State}, @@ -49,7 +49,8 @@ macro_rules! fetch_file { page_slug = page_slug, "Cannot get file, no such page", ); - return ServerErrorCode::PageNotFound { site_id, page_slug }.into_response(); + // TODO + todo!() } Err(error) => { error!( @@ -57,7 +58,8 @@ macro_rules! fetch_file { page_slug = page_slug, "Cannot get page info: {error}", ); - return ServerErrorCode::PageFetch { site_id, page_slug }.into_response(); + // TODO + todo!() } }; @@ -70,12 +72,8 @@ macro_rules! fetch_file { filename = filename, "Cannot get file, none with filename", ); - return ServerErrorCode::FileNotFound { - site_id, - page_id, - filename, - } - .into_response(); + // TODO + todo!() } Err(error) => { error!( @@ -84,12 +82,8 @@ macro_rules! fetch_file { filename = filename, "Cannot get file info: {error}", ); - return ServerErrorCode::FileFetch { - site_id, - page_id, - filename, - } - .into_response(); + // TODO + todo!() } }; @@ -117,12 +111,8 @@ macro_rules! fetch_file { s3_hash = &file_info.s3_hash, "Cannot get blob data: {error}", ); - return ServerErrorCode::BlobFetch { - site_id, - page_slug, - filename, - } - .into_response(); + // TODO + todo!() } }; diff --git a/wws/src/handler/misc.rs b/wws/src/handler/misc.rs index 7b47b7b821..a6b25b91ca 100644 --- a/wws/src/handler/misc.rs +++ b/wws/src/handler/misc.rs @@ -34,10 +34,6 @@ fn text_response(body: &'static str, status: StatusCode) -> Response { .expect("Unable to convert response data") } -pub async fn handle_teapot() -> Response { - text_response("🫖", StatusCode::IM_A_TEAPOT) -} - pub async fn handle_health_check(State(state): State) -> Response { // DEEPWELL's ping ensures both Postgres and Redis are connected match state.deepwell.ping().await { diff --git a/wws/src/handler/mod.rs b/wws/src/handler/mod.rs index 07b43eb584..91bf7b4806 100644 --- a/wws/src/handler/mod.rs +++ b/wws/src/handler/mod.rs @@ -20,7 +20,6 @@ mod code; mod file; -mod framerail; mod html; mod misc; mod redirect; @@ -29,36 +28,19 @@ mod well_known; pub use self::code::*; pub use self::file::*; -pub use self::framerail::*; pub use self::html::*; pub use self::misc::*; pub use self::redirect::*; pub use self::robots::*; pub use self::well_known::*; -use crate::{ - error::{Result, ServerErrorCode}, - host::{lookup_host, SiteAndHost}, - path::get_path, - state::ServerState, -}; -use axum::{ - body::Body, - extract::Request, - http::header::{HeaderMap, HeaderName}, - response::{Html, IntoResponse, Redirect, Response}, - Router, -}; -use std::{future::Future, net::IpAddr}; -use tower::util::ServiceExt; +use axum::http::header::{HeaderMap, HeaderName}; pub const HEADER_IS_WIKIJUMP: HeaderName = HeaderName::from_static("x-wikijump"); pub const HEADER_SITE_ID: HeaderName = HeaderName::from_static("x-wikijump-site-id"); pub const HEADER_SITE_SLUG: HeaderName = HeaderName::from_static("x-wikijump-site-slug"); -pub const HEADER_X_REAL_IP: HeaderName = HeaderName::from_static("x-real-ip"); - /// Helper function to get the site ID and slug from headers. fn get_site_info(headers: &HeaderMap) -> (i64, &str) { let site_id = headers @@ -77,185 +59,3 @@ fn get_site_info(headers: &HeaderMap) -> (i64, &str) { (site_id, site_slug) } - -/// Parse the `Accept-Language` header. -/// If there are no languages, or there is no header, then use English. -fn parse_accept_language(headers: &HeaderMap) -> Vec { - fn get_header_value(headers: &HeaderMap) -> Option<&str> { - match headers.get("accept-language") { - Some(value) => value.to_str().ok(), - None => None, - } - } - - let header_value = match get_header_value(headers) { - Some(value) => value, - None => return vec![str!("en")], - }; - - let mut languages = accept_language::parse(header_value); - if languages.is_empty() { - languages.push(str!("en")); - } - - languages -} - -/// Helper function to return a special error response. -async fn special_error(headers: &HeaderMap, f: F) -> Response -where - F: FnOnce(Vec) -> Fut, - Fut: Future>, -{ - let locales = parse_accept_language(headers); - match f(locales).await { - // TODO wrap HTML output into body - Ok(html) => Html(html).into_response(), - Err(error) => { - error!("Unable to get special error HTML: {error}"); - ServerErrorCode::DeepwellFailure.into_response() - } - } -} - -/// Entry route handler to first process host information. -/// -/// Before we can give this request to the right place, -/// we first must determine if it's a main or files request, -/// and then what site it corresponds to. Then we can pass -/// it to the appropriate location. -pub async fn handle_host_delegation( - state: ServerState, - hostname: String, - ip: IpAddr, - mut request: Request, - main_router: Router, - files_router: Router, -) -> Response { - { - // Strip internal headers, just to be safe. - let headers = request.headers_mut(); - headers.remove(HEADER_SITE_ID); - headers.remove(HEADER_SITE_SLUG); - headers.remove(HEADER_X_REAL_IP); - } - - macro_rules! forward_request { - ($router:expr) => { - match $router.oneshot(request).await { - Ok(response) => response, - Err(infallible) => match infallible {}, - } - }; - } - - macro_rules! add_headers { - ($site_id:expr, $site_slug:expr) => {{ - // Validate types - let _: i64 = $site_id; - let _: &str = &$site_slug; - - // Add headers - let headers = request.headers_mut(); - headers.insert(HEADER_SITE_ID, header_value!(str!($site_id))); - headers.insert(HEADER_SITE_SLUG, header_value!($site_slug)); - headers.insert(HEADER_X_REAL_IP, header_value!(str!(ip))); - }}; - } - - // Determine what host and site (e.g. main vs files, what site slug and ID) - let host_data = match lookup_host(&state, &hostname).await { - Ok(host_data) => host_data, - Err(error) => { - error!("Unable to fetch site/host information: {error}"); - return special_error(request.headers(), |locales| async move { - state - .deepwell - .get_special_error_site_fetch(&locales, &hostname) - .await - }) - .await; - } - }; - - // Now that we have the general category of request type, we can - // give it to the right place to be processed. - match host_data { - // Main site route handling - SiteAndHost::MainSite { site_id, site_slug } => { - info!( - r#type = "main", - domain = hostname, - site_id = site_id, - site_slug = site_slug, - "Routing site request", - ); - add_headers!(site_id, site_slug); - forward_request!(main_router) - } - // Main site redirect - SiteAndHost::MainSiteRedirect { domain } => { - info!( - r#type = "main", - domain = domain, - "Found site, but needs redirect to preferred domain", - ); - let destination = format!("https://{}{}", domain, get_path(request.uri())); - Redirect::permanent(&destination).into_response() - } - // Files site route handling - SiteAndHost::FileSite { site_id, site_slug } => { - info!( - r#type = "files", - domain = hostname, - site_slug = site_slug, - site_id = site_id, - "Routing site request", - ); - add_headers!(site_id, site_slug); - forward_request!(files_router) - } - // Files site by itself - // See the case in host.rs for an explanation - SiteAndHost::FileRoot => { - info!( - r#type = "files", - domain = hostname, - "Handling lone files site request", - ); - let destination = format!("https://{}", state.domains.main_domain_no_dot); - Redirect::temporary(&destination).into_response() - } - // Canonical domain, site missing - SiteAndHost::MissingSiteSlug { ref site_slug } => { - info!( - r#type = "main", - domain = hostname, - site_slug = site_slug, - "No such site with slug", - ); - special_error(request.headers(), |locales| async move { - state - .deepwell - .get_special_error_missing_site_slug(&locales, site_slug) - .await - }) - .await - } - // Custom domain missing - SiteAndHost::MissingCustomDomain { ref domain } => { - info!( - r#type = "main", - domain = domain, - "No such site with custom domain", - ); - special_error(request.headers(), |locales| async move { - state - .deepwell - .get_special_error_missing_custom_domain(&locales, domain) - .await - }) - .await - } - } -} diff --git a/wws/src/handler/redirect.rs b/wws/src/handler/redirect.rs index 2df08d51ad..17bb1e0acf 100644 --- a/wws/src/handler/redirect.rs +++ b/wws/src/handler/redirect.rs @@ -19,7 +19,7 @@ */ use super::get_site_info; -use crate::{host::DEFAULT_SITE_SLUG, path::get_path, state::ServerState}; +use crate::{path::get_path, state::ServerState}; use axum::{ extract::{Path, State}, http::{header::HeaderMap, Uri}, @@ -27,21 +27,6 @@ use axum::{ }; use paste::paste; -pub async fn redirect_to_files( - State(state): State, - headers: HeaderMap, - uri: Uri, -) -> Redirect { - // xyz.wikijump.com -> xyz.wjfiles.com - // customdomain.com -> xyz.wjfiles.com - - let (_, site_slug) = get_site_info(&headers); - let path = get_path(&uri); - let domain = &state.domains.files_domain; - let destination = format!("https://{site_slug}{domain}{path}"); - Redirect::permanent(&destination) -} - pub async fn redirect_to_main( State(state): State, headers: HeaderMap, @@ -52,6 +37,8 @@ pub async fn redirect_to_main( // Only remove www for the main site. // The files site should always have an explicit site slug. + + const DEFAULT_SITE_SLUG: &str = "www"; // TODO let destination = if site_slug == DEFAULT_SITE_SLUG { let domain = &state.domains.main_domain_no_dot; format!("https://{domain}{path}") diff --git a/wws/src/host.rs b/wws/src/host.rs deleted file mode 100644 index 6641586b10..0000000000 --- a/wws/src/host.rs +++ /dev/null @@ -1,92 +0,0 @@ -/* - * host.rs - * - * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) - * Copyright (C) 2019-2025 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -use crate::{deepwell::Domains, error::Result, state::ServerState}; -use serde::Deserialize; - -/// The slug for the default site. -/// -/// This refers to the site displayed when you visit `wikijump.com` -/// with no subdomain component. -pub const DEFAULT_SITE_SLUG: &str = "www"; - -/// Describes which Wikijump site and router this request is pointed towards. -/// Gets the data from DEEPWELL, but adds fields for the files server routing. -/// -/// * "Main" refers to the framerail handler, i.e. `[site-slug].wikijump.com`. -/// * "Files" refers to the wjfiles handlers, i.e. `[site-slug].wjfiles.com`. -#[derive(Deserialize, Debug, Clone)] -#[serde(rename_all = "snake_case", tag = "result", content = "data")] -pub enum SiteAndHost { - /// Main router existent site, ready to process request. - MainSite { site_id: i64, site_slug: String }, - - /// Main router existent site, request to preferred domain. - MainSiteRedirect { domain: String }, - - /// Files router, existent site. - FileSite { site_id: i64, site_slug: String }, - - /// Request is the root domain on the files router, which has no meaning. - /// Special case. - FileRoot, - - /// Any router, non-existent site, canonical domain. - MissingSiteSlug { site_slug: String }, - - /// Any router, non-existent site, custom domain. - MissingCustomDomain { domain: String }, -} - -pub async fn lookup_host(state: &ServerState, hostname: &str) -> Result { - let Domains { - ref files_domain, - ref files_domain_no_dot, - .. - } = state.domains; - - if let Some(site_slug) = hostname.strip_suffix(files_domain) { - // Determine if it's a files domain. - let site_id = state.get_site_from_slug(site_slug).await?; - let site_slug = site_slug.to_owned(); // We cannot use the borrowed version because - // the struct is Deserialize. - match site_id { - // Site exists - Some(site_id) => Ok(SiteAndHost::FileSite { site_id, site_slug }), - // Site missing - None => Ok(SiteAndHost::MissingSiteSlug { site_slug }), - } - } else if hostname == files_domain_no_dot { - // Check if it's the files domain by itself. - // - // This is weird, wjfiles should always a site slug subdomain, - // so in this case we just temporary redirect to the main domain, - // stripping the path. - Ok(SiteAndHost::FileRoot) - } else { - // If it's anything else, it must be a canonical or custom domain. - // That means it's the main site. Let's do a lookup and let - // DomainService handle it for us. - // - // This also caches the lookup, to avoid us having to talk to - // DEEPWELL more than necessary. - state.get_host_from_domain(hostname).await - } -} diff --git a/wws/src/info.rs b/wws/src/info.rs index a208913029..750e805212 100644 --- a/wws/src/info.rs +++ b/wws/src/info.rs @@ -22,33 +22,8 @@ mod build { include!(concat!(env!("OUT_DIR"), "/built.rs")); } -use once_cell::sync::Lazy; - #[allow(unused_imports)] pub use self::build::{ CFG_ENDIAN, GIT_COMMIT_HASH, NUM_JOBS, PKG_AUTHORS, PKG_DESCRIPTION, PKG_LICENSE, PKG_NAME, PKG_REPOSITORY, PKG_VERSION, RUSTC_VERSION, TARGET, }; - -pub static VERSION_INFO: Lazy = Lazy::new(|| { - let mut version = format!("v{PKG_VERSION}"); - - if let Some(commit_hash) = *GIT_COMMIT_HASH_SHORT { - str_write!(&mut version, " [{commit_hash}]"); - } - - version -}); - -pub static GIT_COMMIT_HASH_SHORT: Lazy> = - Lazy::new(|| build::GIT_COMMIT_HASH.map(|s| &s[..8])); - -#[test] -fn info() { - assert!(VERSION.starts_with(PKG_NAME)); - assert!(VERSION.ends_with(&*VERSION_INFO)); - - if let Some(hash) = *GIT_COMMIT_HASH_SHORT { - assert_eq!(hash.len(), 8); - } -} diff --git a/wws/src/macros.rs b/wws/src/macros.rs deleted file mode 100644 index 6a6f99f650..0000000000 --- a/wws/src/macros.rs +++ /dev/null @@ -1,44 +0,0 @@ -/* - * macros.rs - * - * Wilson's Web Server - Serves a zoo of content (framerail, user files, code, etc) - * Copyright (C) 2019-2025 Wikijump Team - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -/// Like `std::write!()`, except it asserts the writing succeeded. -/// -/// This is done because the only failure mode for writing to a `String` -/// would be insufficient memory, which would cause an abort anyways. -/// -/// # See also -/// * [`str_writeln!`](macro.str_writeln.html) -macro_rules! str_write { - ($dest:expr, $($arg:tt)*) => {{ - use std::fmt::Write; - write!($dest, $($arg)*).expect("Writing to string failed"); - }}; -} - -/// Convert a string to a `HeaderValue`. -/// -/// This code assumes the string in question is valid and can be -/// converted to a header value. -macro_rules! header_value { - ($value:expr) => {{ - use axum::http::header::HeaderValue; - HeaderValue::from_str(&$value).expect("String is not a valid header value") - }}; -} diff --git a/wws/src/main.rs b/wws/src/main.rs index d53be88f4d..f2be2ac2fb 100644 --- a/wws/src/main.rs +++ b/wws/src/main.rs @@ -29,16 +29,11 @@ extern crate str_macro; #[macro_use] extern crate tracing; -#[macro_use] -mod macros; - mod cache; mod config; mod deepwell; mod error; -mod framerail; mod handler; -mod host; mod info; mod path; mod route; diff --git a/wws/src/route.rs b/wws/src/route.rs index f7aafc6b03..945d882aba 100644 --- a/wws/src/route.rs +++ b/wws/src/route.rs @@ -18,17 +18,13 @@ * along with this program. If not, see . */ -use crate::{handler::*, info, state::ServerState}; +use crate::{handler::*, state::ServerState}; use axum::{ - body::Body, - extract::{Request, State}, http::header::HeaderValue, routing::{any, get}, Router, }; use axum_client_ip::{SecureClientIp, SecureClientIpSource}; -use axum_extra::extract::Host; -use std::sync::Arc; use tower_http::{ compression::CompressionLayer, normalize_path::NormalizePathLayer, set_header::SetResponseHeaderLayer, trace::TraceLayer, @@ -40,35 +36,6 @@ use tower_http::{ pub const REAL_IP_SOURCE: SecureClientIpSource = SecureClientIpSource::RightmostXForwardedFor; pub fn build_router(state: ServerState) -> Router { - let main_state = Arc::clone(&state); - let file_state = Arc::clone(&state); - let header_state = Arc::clone(&state); - - // Router that serves framerail - let main_router = Router::new() - // Convenience redirect routes - .route("/{page_slug}/code/{index}", any(redirect_to_code_route)) - .route("/{page_slug}/html/{id}", any(redirect_to_html_route)) - .route("/{page_slug}/file/{filename}", any(redirect_to_file_route)) - .route( - "/{page_slug}/download/{filename}", - any(redirect_to_download_route), - ) - // Routes that are really on wjfiles - .route("/local--files/{*rest}", any(redirect_to_files)) - .route("/local--code/{*rest}", any(redirect_to_files)) - .route("/local--html/{*rest}", any(redirect_to_files)) - .route("/-/files/{*rest}", any(redirect_to_files)) - .route("/-/file/{*rest}", any(redirect_to_files)) - .route("/-/download/{*rest}", any(redirect_to_files)) - .route("/-/code/{*rest}", any(redirect_to_files)) - .route("/-/html/{*rest}", any(redirect_to_files)) - // Main handler - .fallback(proxy_framerail) - .with_state(main_state); - - // Router that serves wjfiles - // // NOTE: For all GET routes, axum automatically handles HEAD requests. // The same logic is run, but the body is removed, which is very // convenient for us. @@ -76,7 +43,8 @@ pub fn build_router(state: ServerState) -> Router { // If we can avoid an expensive operation in a HEAD, then add // a "method: http::Method" parameter in the request then check // that before doing the relevant operation. - let files_router = Router::new() + + Router::new() // Wikidot routes .route( "/local--files/{page_slug}/{filename}", @@ -105,25 +73,11 @@ pub fn build_router(state: ServerState) -> Router { .route("/-/code/{page_slug}/{index}", any(handle_invalid_method)) .route("/-/html/{page_slug}/{id}", get(handle_html_block)) .route("/-/html/{page_slug}/{id}", any(handle_invalid_method)) - .fallback(redirect_to_main) - .with_state(file_state); - - Router::new() - // Forward requests to the appropriate sub-router depending on the hostname - .fallback( - |State(state): State, - Host(hostname): Host, - SecureClientIp(ip): SecureClientIp, - request: Request| async move { - handle_host_delegation(state, hostname, ip, request, main_router, files_router) - .await - }, - ) // General routes .route("/robots.txt", get(handle_robots_txt)) // TODO .route("/.well-known", any(handle_well_known)) // TODO - .route("/-/teapot", any(handle_teapot)) .route("/-/health-check", any(handle_health_check)) + .fallback(redirect_to_main) // Middleware .layer(TraceLayer::new_for_http()) .layer(NormalizePathLayer::trim_trailing_slash()) diff --git a/wws/src/state.rs b/wws/src/state.rs index 190faf7376..cc633e9a27 100644 --- a/wws/src/state.rs +++ b/wws/src/state.rs @@ -21,15 +21,8 @@ use crate::{ cache::Cache, config::Secrets, - deepwell::{Deepwell, Domains, FileData, PageData, SiteData}, + deepwell::{Deepwell, Domains, FileData, PageData}, error::Result, - framerail::Framerail, - host::SiteAndHost, -}; -use axum::body::Body; -use hyper_util::{ - client::legacy::{connect::HttpConnector, Client as HyperClient}, - rt::TokioExecutor, }; use s3::bucket::Bucket; use std::sync::Arc; @@ -38,21 +31,17 @@ use std::time::Duration; const BUCKET_REQUEST_TIMEOUT: Duration = Duration::from_millis(200); pub type ServerState = Arc; -pub type Client = HyperClient; #[derive(Debug)] pub struct ServerStateInner { pub domains: Domains, - pub client: Client, pub deepwell: Deepwell, - pub framerail: Framerail, pub cache: Cache, pub s3_bucket: Box, } pub async fn build_server_state( Secrets { - framerail_host, deepwell_url, redis_url, s3_bucket, @@ -61,12 +50,10 @@ pub async fn build_server_state( s3_path_style, }: Secrets, ) -> Result { - let framerail = Framerail::new(framerail_host); let deepwell = Deepwell::connect(&deepwell_url)?; deepwell.check().await; let domains = deepwell.domains().await?; let cache = Cache::connect(&redis_url)?; - let client = HyperClient::builder(TokioExecutor::new()).build(HttpConnector::new()); let s3_bucket = { let mut bucket = Bucket::new(&s3_bucket, s3_region.clone(), s3_credentials.clone())?; @@ -80,9 +67,7 @@ pub async fn build_server_state( Ok(Arc::new(ServerStateInner { domains, - client, deepwell, - framerail, cache, s3_bucket, })) @@ -92,31 +77,6 @@ impl ServerStateInner { // Contains implementations for the common pattern of "check the cache, // if not present, get it from DEEPWELL and populate it". - pub async fn get_site_from_slug(&self, site_slug: &str) -> Result> { - match self.cache.get_site_from_slug(site_slug).await? { - Some(site_id) => Ok(Some(site_id)), - None => match self.deepwell.get_site_from_slug(site_slug).await? { - None => Ok(None), - Some(SiteData { site_id }) => { - self.cache.set_site_from_slug(site_slug, site_id).await?; - Ok(Some(site_id)) - } - }, - } - } - - pub async fn get_host_from_domain(&self, domain: &str) -> Result { - match self.cache.get_host_from_domain(domain).await? { - Some(host) => Ok(host), - None => { - let host = self.deepwell.get_site_from_domain(domain).await?; - self.cache.set_host_from_domain(domain, &host).await?; - - Ok(host) - } - } - } - pub async fn get_page(&self, site_id: i64, page_slug: &str) -> Result> { match self.cache.get_page(site_id, page_slug).await? { Some(page_id) => Ok(Some(page_id)), From 1f3cb62e42b47b9ea5d02adc8cee56c6142851cb Mon Sep 17 00:00:00 2001 From: Emmie Maeda Date: Sat, 29 Mar 2025 00:25:49 -0400 Subject: [PATCH 306/306] Add teapot route to Caddyfile. --- deepwell/src/services/caddy/service.rs | 3 +++ deepwell/test/Caddyfile.basic_local | 3 +++ deepwell/test/Caddyfile.basic_localdev | 3 +++ deepwell/test/Caddyfile.basic_prod | 3 +++ deepwell/test/Caddyfile.full_prod | 3 +++ deepwell/test/Caddyfile.long | 3 +++ deepwell/test/Caddyfile.proxies | 3 +++ 7 files changed, 21 insertions(+) diff --git a/deepwell/src/services/caddy/service.rs b/deepwell/src/services/caddy/service.rs index cebfd5e948..473aee2691 100644 --- a/deepwell/src/services/caddy/service.rs +++ b/deepwell/src/services/caddy/service.rs @@ -160,6 +160,8 @@ impl CaddyService { (serve_main) {{ import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files {{ path /*/code/* @@ -273,6 +275,7 @@ www.{domain} {{ (serve_files) {{ import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://{wws_host} }} diff --git a/deepwell/test/Caddyfile.basic_local b/deepwell/test/Caddyfile.basic_local index 027d301807..01d434e8af 100644 --- a/deepwell/test/Caddyfile.basic_local +++ b/deepwell/test/Caddyfile.basic_local @@ -18,6 +18,8 @@ (serve_main) { import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files { path /*/code/* @@ -87,6 +89,7 @@ www.example.com { (serve_files) { import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://wws:7000 } diff --git a/deepwell/test/Caddyfile.basic_localdev b/deepwell/test/Caddyfile.basic_localdev index 37f6ed309d..6d93239299 100644 --- a/deepwell/test/Caddyfile.basic_localdev +++ b/deepwell/test/Caddyfile.basic_localdev @@ -21,6 +21,8 @@ (serve_main) { import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files { path /*/code/* @@ -90,6 +92,7 @@ www.example.com { (serve_files) { import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://wws:7000 } diff --git a/deepwell/test/Caddyfile.basic_prod b/deepwell/test/Caddyfile.basic_prod index b73be00664..74985c058e 100644 --- a/deepwell/test/Caddyfile.basic_prod +++ b/deepwell/test/Caddyfile.basic_prod @@ -17,6 +17,8 @@ (serve_main) { import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files { path /*/code/* @@ -86,6 +88,7 @@ www.example.com { (serve_files) { import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://wws:7000 } diff --git a/deepwell/test/Caddyfile.full_prod b/deepwell/test/Caddyfile.full_prod index b7f9aa1e6d..66c1e2c4f9 100644 --- a/deepwell/test/Caddyfile.full_prod +++ b/deepwell/test/Caddyfile.full_prod @@ -17,6 +17,8 @@ (serve_main) { import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files { path /*/code/* @@ -174,6 +176,7 @@ www.scpwiki.wikijump.test { (serve_files) { import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://wws:7000 } diff --git a/deepwell/test/Caddyfile.long b/deepwell/test/Caddyfile.long index 3c243da36b..5abc97ca26 100644 --- a/deepwell/test/Caddyfile.long +++ b/deepwell/test/Caddyfile.long @@ -17,6 +17,8 @@ (serve_main) { import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files { path /*/code/* @@ -86,6 +88,7 @@ www.example.com { (serve_files) { import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://wws:7000 } diff --git a/deepwell/test/Caddyfile.proxies b/deepwell/test/Caddyfile.proxies index 143e39e33c..d8a1476f2a 100644 --- a/deepwell/test/Caddyfile.proxies +++ b/deepwell/test/Caddyfile.proxies @@ -17,6 +17,8 @@ (serve_main) { import strip_headers + respond /-/teapot '🫖' 418 + # Redirect, route is on the files server @files { path /*/code/* @@ -86,6 +88,7 @@ www.example.com { (serve_files) { import strip_headers encode + respond /-/teapot '🫖' 418 reverse_proxy http://wws_proxy_host }