From 0617663b249f9ca488e5de652108b17d67fbaf45 Mon Sep 17 00:00:00 2001 From: Vika Date: Sat, 29 Jul 2023 21:59:56 +0300 Subject: Moved the entire Kittybox tree into the root --- Cargo.lock | 4589 ++++++++++++++++++++ Cargo.toml | 158 + build.rs | 30 + companion-lite/index.html | 127 + companion-lite/src/base64.ts | 89 + companion-lite/src/indieauth.ts | 113 + companion-lite/src/main.ts | 178 + companion-lite/src/micropub_api.ts | 125 + companion-lite/style.css | 47 + companion-lite/tsconfig.json | 104 + dev.sh | 32 + examples/password-hasher.rs | 33 + indieauth/Cargo.toml | 31 + indieauth/src/lib.rs | 773 ++++ indieauth/src/pkce.rs | 132 + indieauth/src/scopes.rs | 208 + kittybox-rs/Cargo.lock | 4589 -------------------- kittybox-rs/Cargo.toml | 158 - kittybox-rs/build.rs | 30 - kittybox-rs/companion-lite/index.html | 127 - kittybox-rs/companion-lite/src/base64.ts | 89 - kittybox-rs/companion-lite/src/indieauth.ts | 113 - kittybox-rs/companion-lite/src/main.ts | 178 - kittybox-rs/companion-lite/src/micropub_api.ts | 125 - kittybox-rs/companion-lite/style.css | 47 - kittybox-rs/companion-lite/tsconfig.json | 104 - kittybox-rs/dev.sh | 32 - kittybox-rs/examples/password-hasher.rs | 33 - kittybox-rs/indieauth/Cargo.toml | 31 - kittybox-rs/indieauth/src/lib.rs | 773 ---- kittybox-rs/indieauth/src/pkce.rs | 132 - kittybox-rs/indieauth/src/scopes.rs | 208 - kittybox-rs/migrations/0001_init.sql | 62 - kittybox-rs/migrations/webmention/0001_init.sql | 15 - kittybox-rs/src/bin/kittybox-check-webmention.rs | 152 - kittybox-rs/src/bin/kittybox-indieauth-helper.rs | 233 - kittybox-rs/src/bin/kittybox-mf2.rs | 49 - kittybox-rs/src/bin/kittybox_bulk_import.rs | 66 - kittybox-rs/src/bin/kittybox_database_converter.rs | 106 - kittybox-rs/src/database/file/mod.rs | 733 ---- kittybox-rs/src/database/memory.rs | 249 -- kittybox-rs/src/database/mod.rs | 793 ---- kittybox-rs/src/database/postgres/mod.rs | 416 -- kittybox-rs/src/database/redis/edit_post.lua | 93 - kittybox-rs/src/database/redis/mod.rs | 398 -- kittybox-rs/src/frontend/login.rs | 333 -- kittybox-rs/src/frontend/mod.rs | 404 -- kittybox-rs/src/frontend/onboarding.rs | 181 - kittybox-rs/src/indieauth/backend.rs | 105 - kittybox-rs/src/indieauth/backend/fs.rs | 420 -- kittybox-rs/src/indieauth/mod.rs | 883 ---- kittybox-rs/src/indieauth/webauthn.rs | 140 - kittybox-rs/src/lib.rs | 93 - kittybox-rs/src/main.rs | 489 --- kittybox-rs/src/media/mod.rs | 141 - kittybox-rs/src/media/storage/file.rs | 434 -- kittybox-rs/src/media/storage/mod.rs | 177 - kittybox-rs/src/metrics.rs | 21 - kittybox-rs/src/micropub/get.rs | 82 - kittybox-rs/src/micropub/mod.rs | 846 ---- kittybox-rs/src/micropub/util.rs | 444 -- kittybox-rs/src/tokenauth.rs | 358 -- kittybox-rs/src/webmentions/check.rs | 113 - kittybox-rs/src/webmentions/mod.rs | 195 - kittybox-rs/src/webmentions/queue.rs | 303 -- kittybox-rs/templates/Cargo.toml | 33 - kittybox-rs/templates/assets/jslicense.html | 31 - kittybox-rs/templates/assets/onboarding.css | 33 - kittybox-rs/templates/assets/style.css | 236 - kittybox-rs/templates/build.rs | 90 - kittybox-rs/templates/javascript/dist/indieauth.js | 118 - .../templates/javascript/dist/webauthn/register.js | 1 - kittybox-rs/templates/javascript/src/indieauth.ts | 150 - kittybox-rs/templates/javascript/src/lib.ts | 3 - kittybox-rs/templates/javascript/src/onboarding.ts | 120 - .../templates/javascript/src/webauthn/register.ts | 0 kittybox-rs/templates/javascript/tsconfig.json | 104 - kittybox-rs/templates/src/indieauth.rs | 201 - kittybox-rs/templates/src/lib.rs | 367 -- kittybox-rs/templates/src/login.rs | 17 - kittybox-rs/templates/src/mf2.rs | 478 -- kittybox-rs/templates/src/onboarding.rs | 196 - kittybox-rs/templates/src/templates.rs | 168 - kittybox-rs/util/Cargo.toml | 29 - kittybox-rs/util/src/error.rs | 95 - kittybox-rs/util/src/lib.rs | 123 - kittybox-rs/util/src/queue.rs | 66 - kittybox.nix | 2 +- migrations/0001_init.sql | 62 + migrations/webmention/0001_init.sql | 15 + src/bin/kittybox-check-webmention.rs | 152 + src/bin/kittybox-indieauth-helper.rs | 233 + src/bin/kittybox-mf2.rs | 49 + src/bin/kittybox_bulk_import.rs | 66 + src/bin/kittybox_database_converter.rs | 106 + src/database/file/mod.rs | 733 ++++ src/database/memory.rs | 249 ++ src/database/mod.rs | 793 ++++ src/database/postgres/mod.rs | 416 ++ src/database/redis/edit_post.lua | 93 + src/database/redis/mod.rs | 398 ++ src/frontend/login.rs | 333 ++ src/frontend/mod.rs | 404 ++ src/frontend/onboarding.rs | 181 + src/indieauth/backend.rs | 105 + src/indieauth/backend/fs.rs | 420 ++ src/indieauth/mod.rs | 883 ++++ src/indieauth/webauthn.rs | 140 + src/lib.rs | 93 + src/main.rs | 489 +++ src/media/mod.rs | 141 + src/media/storage/file.rs | 434 ++ src/media/storage/mod.rs | 177 + src/metrics.rs | 21 + src/micropub/get.rs | 82 + src/micropub/mod.rs | 846 ++++ src/micropub/util.rs | 444 ++ src/tokenauth.rs | 358 ++ src/webmentions/check.rs | 113 + src/webmentions/mod.rs | 195 + src/webmentions/queue.rs | 303 ++ templates/Cargo.toml | 33 + templates/assets/jslicense.html | 31 + templates/assets/onboarding.css | 33 + templates/assets/style.css | 236 + templates/build.rs | 90 + templates/javascript/dist/indieauth.js | 118 + templates/javascript/dist/webauthn/register.js | 1 + templates/javascript/src/indieauth.ts | 150 + templates/javascript/src/lib.ts | 3 + templates/javascript/src/onboarding.ts | 120 + templates/javascript/src/webauthn/register.ts | 0 templates/javascript/tsconfig.json | 104 + templates/src/indieauth.rs | 201 + templates/src/lib.rs | 367 ++ templates/src/login.rs | 17 + templates/src/mf2.rs | 478 ++ templates/src/onboarding.rs | 196 + templates/src/templates.rs | 168 + util/Cargo.toml | 29 + util/src/error.rs | 95 + util/src/lib.rs | 123 + util/src/queue.rs | 66 + 143 files changed, 18956 insertions(+), 18956 deletions(-) create mode 100644 Cargo.lock create mode 100644 Cargo.toml create mode 100644 build.rs create mode 100644 companion-lite/index.html create mode 100644 companion-lite/src/base64.ts create mode 100644 companion-lite/src/indieauth.ts create mode 100644 companion-lite/src/main.ts create mode 100644 companion-lite/src/micropub_api.ts create mode 100644 companion-lite/style.css create mode 100644 companion-lite/tsconfig.json create mode 100755 dev.sh create mode 100644 examples/password-hasher.rs create mode 100644 indieauth/Cargo.toml create mode 100644 indieauth/src/lib.rs create mode 100644 indieauth/src/pkce.rs create mode 100644 indieauth/src/scopes.rs delete mode 100644 kittybox-rs/Cargo.lock delete mode 100644 kittybox-rs/Cargo.toml delete mode 100644 kittybox-rs/build.rs delete mode 100644 kittybox-rs/companion-lite/index.html delete mode 100644 kittybox-rs/companion-lite/src/base64.ts delete mode 100644 kittybox-rs/companion-lite/src/indieauth.ts delete mode 100644 kittybox-rs/companion-lite/src/main.ts delete mode 100644 kittybox-rs/companion-lite/src/micropub_api.ts delete mode 100644 kittybox-rs/companion-lite/style.css delete mode 100644 kittybox-rs/companion-lite/tsconfig.json delete mode 100755 kittybox-rs/dev.sh delete mode 100644 kittybox-rs/examples/password-hasher.rs delete mode 100644 kittybox-rs/indieauth/Cargo.toml delete mode 100644 kittybox-rs/indieauth/src/lib.rs delete mode 100644 kittybox-rs/indieauth/src/pkce.rs delete mode 100644 kittybox-rs/indieauth/src/scopes.rs delete mode 100644 kittybox-rs/migrations/0001_init.sql delete mode 100644 kittybox-rs/migrations/webmention/0001_init.sql delete mode 100644 kittybox-rs/src/bin/kittybox-check-webmention.rs delete mode 100644 kittybox-rs/src/bin/kittybox-indieauth-helper.rs delete mode 100644 kittybox-rs/src/bin/kittybox-mf2.rs delete mode 100644 kittybox-rs/src/bin/kittybox_bulk_import.rs delete mode 100644 kittybox-rs/src/bin/kittybox_database_converter.rs delete mode 100644 kittybox-rs/src/database/file/mod.rs delete mode 100644 kittybox-rs/src/database/memory.rs delete mode 100644 kittybox-rs/src/database/mod.rs delete mode 100644 kittybox-rs/src/database/postgres/mod.rs delete mode 100644 kittybox-rs/src/database/redis/edit_post.lua delete mode 100644 kittybox-rs/src/database/redis/mod.rs delete mode 100644 kittybox-rs/src/frontend/login.rs delete mode 100644 kittybox-rs/src/frontend/mod.rs delete mode 100644 kittybox-rs/src/frontend/onboarding.rs delete mode 100644 kittybox-rs/src/indieauth/backend.rs delete mode 100644 kittybox-rs/src/indieauth/backend/fs.rs delete mode 100644 kittybox-rs/src/indieauth/mod.rs delete mode 100644 kittybox-rs/src/indieauth/webauthn.rs delete mode 100644 kittybox-rs/src/lib.rs delete mode 100644 kittybox-rs/src/main.rs delete mode 100644 kittybox-rs/src/media/mod.rs delete mode 100644 kittybox-rs/src/media/storage/file.rs delete mode 100644 kittybox-rs/src/media/storage/mod.rs delete mode 100644 kittybox-rs/src/metrics.rs delete mode 100644 kittybox-rs/src/micropub/get.rs delete mode 100644 kittybox-rs/src/micropub/mod.rs delete mode 100644 kittybox-rs/src/micropub/util.rs delete mode 100644 kittybox-rs/src/tokenauth.rs delete mode 100644 kittybox-rs/src/webmentions/check.rs delete mode 100644 kittybox-rs/src/webmentions/mod.rs delete mode 100644 kittybox-rs/src/webmentions/queue.rs delete mode 100644 kittybox-rs/templates/Cargo.toml delete mode 100644 kittybox-rs/templates/assets/jslicense.html delete mode 100644 kittybox-rs/templates/assets/onboarding.css delete mode 100644 kittybox-rs/templates/assets/style.css delete mode 100644 kittybox-rs/templates/build.rs delete mode 100644 kittybox-rs/templates/javascript/dist/indieauth.js delete mode 100644 kittybox-rs/templates/javascript/dist/webauthn/register.js delete mode 100644 kittybox-rs/templates/javascript/src/indieauth.ts delete mode 100644 kittybox-rs/templates/javascript/src/lib.ts delete mode 100644 kittybox-rs/templates/javascript/src/onboarding.ts delete mode 100644 kittybox-rs/templates/javascript/src/webauthn/register.ts delete mode 100644 kittybox-rs/templates/javascript/tsconfig.json delete mode 100644 kittybox-rs/templates/src/indieauth.rs delete mode 100644 kittybox-rs/templates/src/lib.rs delete mode 100644 kittybox-rs/templates/src/login.rs delete mode 100644 kittybox-rs/templates/src/mf2.rs delete mode 100644 kittybox-rs/templates/src/onboarding.rs delete mode 100644 kittybox-rs/templates/src/templates.rs delete mode 100644 kittybox-rs/util/Cargo.toml delete mode 100644 kittybox-rs/util/src/error.rs delete mode 100644 kittybox-rs/util/src/lib.rs delete mode 100644 kittybox-rs/util/src/queue.rs create mode 100644 migrations/0001_init.sql create mode 100644 migrations/webmention/0001_init.sql create mode 100644 src/bin/kittybox-check-webmention.rs create mode 100644 src/bin/kittybox-indieauth-helper.rs create mode 100644 src/bin/kittybox-mf2.rs create mode 100644 src/bin/kittybox_bulk_import.rs create mode 100644 src/bin/kittybox_database_converter.rs create mode 100644 src/database/file/mod.rs create mode 100644 src/database/memory.rs create mode 100644 src/database/mod.rs create mode 100644 src/database/postgres/mod.rs create mode 100644 src/database/redis/edit_post.lua create mode 100644 src/database/redis/mod.rs create mode 100644 src/frontend/login.rs create mode 100644 src/frontend/mod.rs create mode 100644 src/frontend/onboarding.rs create mode 100644 src/indieauth/backend.rs create mode 100644 src/indieauth/backend/fs.rs create mode 100644 src/indieauth/mod.rs create mode 100644 src/indieauth/webauthn.rs create mode 100644 src/lib.rs create mode 100644 src/main.rs create mode 100644 src/media/mod.rs create mode 100644 src/media/storage/file.rs create mode 100644 src/media/storage/mod.rs create mode 100644 src/metrics.rs create mode 100644 src/micropub/get.rs create mode 100644 src/micropub/mod.rs create mode 100644 src/micropub/util.rs create mode 100644 src/tokenauth.rs create mode 100644 src/webmentions/check.rs create mode 100644 src/webmentions/mod.rs create mode 100644 src/webmentions/queue.rs create mode 100644 templates/Cargo.toml create mode 100644 templates/assets/jslicense.html create mode 100644 templates/assets/onboarding.css create mode 100644 templates/assets/style.css create mode 100644 templates/build.rs create mode 100644 templates/javascript/dist/indieauth.js create mode 100644 templates/javascript/dist/webauthn/register.js create mode 100644 templates/javascript/src/indieauth.ts create mode 100644 templates/javascript/src/lib.ts create mode 100644 templates/javascript/src/onboarding.ts create mode 100644 templates/javascript/src/webauthn/register.ts create mode 100644 templates/javascript/tsconfig.json create mode 100644 templates/src/indieauth.rs create mode 100644 templates/src/lib.rs create mode 100644 templates/src/login.rs create mode 100644 templates/src/mf2.rs create mode 100644 templates/src/onboarding.rs create mode 100644 templates/src/templates.rs create mode 100644 util/Cargo.toml create mode 100644 util/src/error.rs create mode 100644 util/src/lib.rs create mode 100644 util/src/queue.rs diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..39323f4 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,4589 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if", + "getrandom 0.2.10", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56fc6cf8dc8c4158eed8649f9b8b0ea1518eb62b544fe9490d66fa0b349eafe9" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" + +[[package]] +name = "anstyle-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +dependencies = [ + "anstyle", + "windows-sys 0.48.0", +] + +[[package]] +name = "anyhow" +version = "1.0.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" + +[[package]] +name = "argon2" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95c2fcf79ad1932ac6269a738109997a83c227c09b75842ae564dc8ede6a861c" +dependencies = [ + "base64ct", + "blake2", + "password-hash", +] + +[[package]] +name = "asn1-rs" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ff05a702273012438132f449575dbc804e27b2f3cbe3069aa237d26c98fa33" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror", + "time 0.3.23", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db8b7511298d5b7784b40b092d9e9dcd3a627a5707e4b5e507931ab0d44eeebf" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 1.0.109", + "synstructure", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 1.0.109", +] + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener", + "futures-core", +] + +[[package]] +name = "async-compression" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0122885821398cc923ece939e24d1056a2384ee719432397fa9db87230ff11" +dependencies = [ + "brotli", + "flate2", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-trait" +version = "0.1.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + +[[package]] +name = "autocfg" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "axum" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" +dependencies = [ + "async-trait", + "axum-core", + "axum-macros", + "bitflags 1.3.2", + "bytes", + "futures-util", + "headers", + "http", + "http-body", + "hyper", + "itoa 1.0.8", + "matchit", + "memchr", + "mime", + "multer", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "mime", + "rustversion", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-extra" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "febf23ab04509bd7672e6abe76bd8277af31b679e89fa5ffc6087dc289a448a3" +dependencies = [ + "axum", + "axum-core", + "bytes", + "cookie", + "futures-util", + "http", + "http-body", + "mime", + "pin-project-lite", + "serde", + "tokio", + "tower", + "tower-http 0.4.1", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-macros" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bb524613be645939e280b7279f7b017f98cf7f5ef084ec374df373530e73277" +dependencies = [ + "heck", + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "backtrace" +version = "0.3.68" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "base64" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "base64urlsafedata" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18b3d30abb74120a9d5267463b9e0045fdccc4dd152e7249d966612dc1721384" +dependencies = [ + "base64 0.21.2", + "serde", + "serde_json", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +dependencies = [ + "serde", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "brotli" +version = "3.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "2.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "cc" +version = "1.0.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "time 0.1.45", + "wasm-bindgen", + "winapi", +] + +[[package]] +name = "clap" +version = "4.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d" +dependencies = [ + "clap_builder", + "clap_derive", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8cd2b2a819ad6eec39e8f1d6b53001af1e5469f8c177579cdaeb313115b825f" +dependencies = [ + "heck", + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "clap_lex" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" + +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "combine" +version = "4.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +dependencies = [ + "bytes", + "futures-core", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "compact_jwt" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f9032b96a89dd79ffc5f62523d5351ebb40680cbdfc4029393b511b9e971aa" +dependencies = [ + "base64 0.13.1", + "base64urlsafedata", + "hex", + "openssl", + "serde", + "serde_json", + "tracing", + "url", + "uuid 1.4.0", +] + +[[package]] +name = "concurrent-queue" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "const-oid" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6340df57935414636969091153f35f68d9f00bbc8fb4a9c6054706c213e6c6bc" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "cookie" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7efb37c3e1ccb1ff97164ad95ac1606e8ccd35b3fa0a7d99a304c7f4a428cc24" +dependencies = [ + "percent-encoding", + "time 0.3.23", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" + +[[package]] +name = "crc32fast" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "cssparser" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "754b69d351cdc2d8ee09ae203db831e005560fc6030da058f86ad60c92a9cb0a" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa 0.4.8", + "matches", + "phf 0.8.0", + "proc-macro2 1.0.64", + "quote 1.0.29", + "smallvec", + "syn 1.0.109", +] + +[[package]] +name = "cssparser-macros" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" +dependencies = [ + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "dary_heap" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7762d17f1241643615821a8455a0b2c3e803784b058693d990b11f2dce25a0ca" + +[[package]] +name = "data-encoding" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" + +[[package]] +name = "deadpool" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e" +dependencies = [ + "async-trait", + "deadpool-runtime", + "num_cpus", + "retain_mut", + "tokio", +] + +[[package]] +name = "deadpool-runtime" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" + +[[package]] +name = "der" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7ed52955ce76b1554f509074bb357d3fb8ac9b51288a65a3fd480d1dfba946" +dependencies = [ + "const-oid", + "pem-rfc7468", + "zeroize", +] + +[[package]] +name = "der-parser" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe398ac75057914d7d07307bf67dc7f3f574a26783b4fc7805a20ffa9f506e82" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom", + "num-bigint", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2 1.0.64", + "quote 1.0.29", + "rustc_version", + "syn 1.0.109", +] + +[[package]] +name = "deunicode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c1bba4f227a4a53d12b653f50ca7bf10c9119ae2aba56aff9e0338b5c98f36a" + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "displaydoc" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "dtoa" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "519b83cd10f5f6e969625a409f735182bea5558cd8b64c655806ceaae36f1999" + +[[package]] +name = "dtoa-short" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74" +dependencies = [ + "dtoa", +] + +[[package]] +name = "easy-scraper" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18a857bc01b5ae04874234f6b5d16b8b8fa86910aa5777479c2669b5df607fce" +dependencies = [ + "html5ever 0.25.2", + "kuchiki", + "regex", +] + +[[package]] +name = "either" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +dependencies = [ + "serde", +] + +[[package]] +name = "ellipse" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1835a82a08e5c9393639e7cf99786a65af71f7fa9df7c91a519f2d52e6fa052d" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "encoding_rs" +version = "0.8.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" + +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "faker_rand" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "300d2ddbf2245b5b5e723995e0961033121b4fc2be9045fb661af82bd739ffb6" +dependencies = [ + "deunicode", + "lazy_static", + "rand 0.8.5", +] + +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + +[[package]] +name = "flate2" +version = "1.0.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "flume" +version = "0.10.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577" +dependencies = [ + "futures-core", + "futures-sink", + "pin-project", + "spin 0.9.8", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" + +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-lite" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "memchr", + "parking", + "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.27.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" + +[[package]] +name = "h2" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 1.9.3", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashlink" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" +dependencies = [ + "hashbrown 0.14.0", +] + +[[package]] +name = "headers" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" +dependencies = [ + "base64 0.13.1", + "bitflags 1.3.2", + "bytes", + "headers-core", + "http", + "httpdate", + "mime", + "sha1 0.10.5", +] + +[[package]] +name = "headers-core" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hkdf" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "html5ever" +version = "0.22.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c213fa6a618dc1da552f54f85cba74b05d8e883c92ec4e89067736938084c26e" +dependencies = [ + "log", + "mac", + "markup5ever 0.7.5", + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", +] + +[[package]] +name = "html5ever" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5c13fb08e5d4dfc151ee5e88bae63f7773d61852f3bdc73c9f4b9e1bde03148" +dependencies = [ + "log", + "mac", + "markup5ever 0.10.1", + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 1.0.109", +] + +[[package]] +name = "http" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.8", +] + +[[package]] +name = "http-body" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" + +[[package]] +name = "http-types" +version = "2.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad" +dependencies = [ + "anyhow", + "async-channel", + "base64 0.13.1", + "futures-lite", + "http", + "infer", + "pin-project-lite", + "rand 0.7.3", + "serde", + "serde_json", + "serde_qs", + "serde_urlencoded", + "url", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 1.0.8", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "include_dir" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +dependencies = [ + "include_dir_macros", +] + +[[package]] +name = "include_dir_macros" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg 1.1.0", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + +[[package]] +name = "infer" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "ipnet" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" + +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix 0.38.3", + "windows-sys 0.48.0", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" + +[[package]] +name = "js-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "kittybox" +version = "0.1.0" +dependencies = [ + "anyhow", + "argon2", + "async-trait", + "axum", + "axum-extra", + "bytes", + "chrono", + "clap", + "data-encoding", + "easy-scraper", + "either", + "faker_rand", + "futures", + "futures-util", + "hex", + "hyper", + "kittybox-frontend-renderer", + "kittybox-indieauth", + "kittybox-util", + "lazy_static", + "listenfd", + "markdown", + "microformats", + "newbase60", + "prometheus", + "rand 0.8.5", + "redis", + "relative-path", + "reqwest", + "serde", + "serde_json", + "serde_urlencoded", + "serde_variant", + "sha2", + "sqlx", + "tempfile", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tower-http 0.3.5", + "tracing", + "tracing-log", + "tracing-subscriber", + "tracing-test", + "tracing-tree", + "url", + "uuid 1.4.0", + "webauthn-rs", + "wiremock", +] + +[[package]] +name = "kittybox-frontend-renderer" +version = "0.1.0" +dependencies = [ + "axum", + "chrono", + "ellipse", + "faker_rand", + "http", + "include_dir", + "kittybox-indieauth", + "kittybox-util", + "libflate", + "markup", + "microformats", + "rand 0.8.5", + "serde_json", + "walkdir", +] + +[[package]] +name = "kittybox-indieauth" +version = "0.1.0" +dependencies = [ + "axum-core", + "data-encoding", + "http", + "rand 0.8.5", + "serde", + "serde_json", + "serde_urlencoded", + "sha2", + "url", +] + +[[package]] +name = "kittybox-util" +version = "0.1.0" +dependencies = [ + "async-trait", + "axum-core", + "futures-util", + "http", + "rand 0.8.5", + "serde", + "serde_json", + "sqlx", + "tokio", + "uuid 1.4.0", +] + +[[package]] +name = "kuchiki" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ea8e9c6e031377cff82ee3001dc8026cdf431ed4e2e6b51f98ab8c73484a358" +dependencies = [ + "cssparser", + "html5ever 0.25.2", + "matches", + "selectors", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +dependencies = [ + "spin 0.5.2", +] + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "libflate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7d5654ae1795afc7ff76f4365c2c8791b0feb18e8996a96adad8ffd7c3b2bf" +dependencies = [ + "adler32", + "core2", + "crc32fast", + "dary_heap", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be5f52fb8c451576ec6b79d3f4deb327398bc05bbdbd99021a6e77a4c855d524" +dependencies = [ + "core2", + "hashbrown 0.13.2", + "rle-decode-fast", +] + +[[package]] +name = "libm" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" + +[[package]] +name = "libsqlite3-sys" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + +[[package]] +name = "listenfd" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02b14f35d9f5f082fd0b1b34aa0ef32e3354c859c721d7f3325b3f79a42ba54" +dependencies = [ + "libc", + "uuid 0.8.2", + "winapi", +] + +[[package]] +name = "lock_api" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +dependencies = [ + "autocfg 1.1.0", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "markdown" +version = "1.0.0-alpha.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1bd98c3b68451b0390a289c58c856adb4e2b50cc40507ce2a105d5b00eafc80" +dependencies = [ + "unicode-id", +] + +[[package]] +name = "markup" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9196a235d499738d04f6a2466ce2610bf6b84730610efea8bee1b90d028b0d" +dependencies = [ + "itoa 1.0.8", + "markup-proc-macro", +] + +[[package]] +name = "markup-proc-macro" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a927f0e237dcbdd8c1a8ab03c4e1e8b1999804c448ebf06ff3b5512506c8150" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 1.0.109", +] + +[[package]] +name = "markup5ever" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897636f9850c3eef4905a5540683ed53dc9393860f0846cab2c2ddf9939862ff" +dependencies = [ + "phf 0.7.24", + "phf_codegen 0.7.24", + "serde", + "serde_derive", + "serde_json", + "string_cache 0.7.5", + "string_cache_codegen 0.4.4", + "tendril", +] + +[[package]] +name = "markup5ever" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd" +dependencies = [ + "log", + "phf 0.8.0", + "phf_codegen 0.8.0", + "string_cache 0.8.7", + "string_cache_codegen 0.5.2", + "tendril", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "matchit" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" + +[[package]] +name = "md-5" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +dependencies = [ + "digest", +] + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "microformats" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e7fccd15cba21880e824d71b8354e9e67561c9cd2bf3ec09b21dba26392ecb" +dependencies = [ + "chrono", + "html5ever 0.22.5", + "lazy_static", + "log", + "regex", + "serde", + "serde_json", + "thiserror", + "url", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.48.0", +] + +[[package]] +name = "multer" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http", + "httparse", + "log", + "memchr", + "mime", + "spin 0.9.8", + "version_check", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" + +[[package]] +name = "newbase60" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f6fe19de628588cbd968ba26b247819c7b7a000b3b4b5bcbf69ea606a33b5ba" + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-bigint-dig" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" +dependencies = [ + "byteorder", + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg 1.1.0", + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +dependencies = [ + "autocfg 1.1.0", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +dependencies = [ + "autocfg 1.1.0", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "object" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" +dependencies = [ + "memchr", +] + +[[package]] +name = "oid-registry" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e20717fa0541f39bd146692035c37bedfa532b3e5071b35761082407546b2a" +dependencies = [ + "asn1-rs", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "openssl" +version = "0.10.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "parking" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" + +[[package]] +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.48.1", +] + +[[package]] +name = "password-hash" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" +dependencies = [ + "base64ct", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "paste" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4b27ab7be369122c218afc2079489cdcb4b517c0a3fc386ff11e1fedfcc2b35" + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + +[[package]] +name = "phf" +version = "0.7.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3da44b85f8e8dfaec21adae67f95d93244b2ecf6ad2a692320598dcc8e6dd18" +dependencies = [ + "phf_shared 0.7.24", +] + +[[package]] +name = "phf" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +dependencies = [ + "phf_macros", + "phf_shared 0.8.0", + "proc-macro-hack", +] + +[[package]] +name = "phf_codegen" +version = "0.7.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03e85129e324ad4166b06b2c7491ae27fe3ec353af72e72cd1654c7225d517e" +dependencies = [ + "phf_generator 0.7.24", + "phf_shared 0.7.24", +] + +[[package]] +name = "phf_codegen" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", +] + +[[package]] +name = "phf_generator" +version = "0.7.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09364cc93c159b8b06b1f4dd8a4398984503483891b0c26b867cf431fb132662" +dependencies = [ + "phf_shared 0.7.24", + "rand 0.6.5", +] + +[[package]] +name = "phf_generator" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +dependencies = [ + "phf_shared 0.8.0", + "rand 0.7.3", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", + "proc-macro-hack", + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 1.0.109", +] + +[[package]] +name = "phf_shared" +version = "0.7.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0" +dependencies = [ + "siphasher 0.2.3", +] + +[[package]] +name = "phf_shared" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +dependencies = [ + "siphasher 0.3.10", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher 0.3.10", +] + +[[package]] +name = "pin-project" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der", + "pkcs8", + "spki", +] + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" + +[[package]] +name = "ppv-lite86" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid 0.1.0", +] + +[[package]] +name = "proc-macro2" +version = "1.0.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "procfs" +version = "0.14.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1de8dacb0873f77e6aefc6d71e044761fcc68060290f5b1089fcdf84626bb69" +dependencies = [ + "bitflags 1.3.2", + "byteorder", + "hex", + "lazy_static", + "rustix 0.36.15", +] + +[[package]] +name = "prometheus" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "libc", + "memchr", + "parking_lot", + "procfs", + "protobuf", + "thiserror", +] + +[[package]] +name = "protobuf" +version = "2.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" + +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + +[[package]] +name = "quote" +version = "1.0.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" +dependencies = [ + "proc-macro2 1.0.64", +] + +[[package]] +name = "rand" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +dependencies = [ + "autocfg 0.1.8", + "libc", + "rand_chacha 0.1.1", + "rand_core 0.4.2", + "rand_hc 0.1.0", + "rand_isaac", + "rand_jitter", + "rand_os", + "rand_pcg 0.1.2", + "rand_xorshift", + "winapi", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc 0.2.0", + "rand_pcg 0.2.1", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.3.1", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" +dependencies = [ + "rand_core 0.4.2", +] + +[[package]] +name = "rand_core" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_isaac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rand_jitter" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" +dependencies = [ + "libc", + "rand_core 0.4.2", + "winapi", +] + +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" +dependencies = [ + "cloudabi", + "fuchsia-cprng", + "libc", + "rand_core 0.4.2", + "rdrand", + "winapi", +] + +[[package]] +name = "rand_pcg" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" +dependencies = [ + "autocfg 0.1.8", + "rand_core 0.4.2", +] + +[[package]] +name = "rand_pcg" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_xorshift" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" +dependencies = [ + "rand_core 0.3.1", +] + +[[package]] +name = "redis" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "152f3863635cbb76b73bc247845781098302c6c9ad2060e1a9a7de56840346b6" +dependencies = [ + "async-trait", + "bytes", + "combine", + "futures-util", + "itoa 1.0.8", + "percent-encoding", + "pin-project-lite", + "ryu", + "sha1 0.6.1", + "tokio", + "tokio-util", + "url", +] + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.3.2", + "regex-syntax 0.7.3", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.3", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" + +[[package]] +name = "relative-path" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bf2521270932c3c7bed1a59151222bd7643c79310f2916f01925e1e16255698" + +[[package]] +name = "reqwest" +version = "0.11.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +dependencies = [ + "async-compression", + "base64 0.21.2", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "hyper-tls", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 0.22.6", + "winreg", +] + +[[package]] +name = "retain_mut" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "rle-decode-fast" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" + +[[package]] +name = "rsa" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" +dependencies = [ + "byteorder", + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-iter", + "num-traits", + "pkcs1", + "pkcs8", + "rand_core 0.6.4", + "signature", + "spki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom", +] + +[[package]] +name = "rustix" +version = "0.36.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c37f1bd5ef1b5422177b7646cba67430579cfe2ace80f284fee876bca52ad941" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.1.4", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustix" +version = "0.37.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys 0.4.3", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustls" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b19faa85ecb5197342b54f987b142fb3e30d0c90da40f80ef4fa9a726e6676ed" +dependencies = [ + "log", + "ring", + "rustls-webpki 0.101.1", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +dependencies = [ + "base64 0.21.2", +] + +[[package]] +name = "rustls-webpki" +version = "0.100.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" + +[[package]] +name = "ryu" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "sct" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "selectors" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df320f1889ac4ba6bc0cdc9c9af7af4bd64bb927bccdf32d81140dc1f9be12fe" +dependencies = [ + "bitflags 1.3.2", + "cssparser", + "derive_more", + "fxhash", + "log", + "matches", + "phf 0.8.0", + "phf_codegen 0.8.0", + "precomputed-hash", + "servo_arc", + "smallvec", + "thin-slice", +] + +[[package]] +name = "semver" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" + +[[package]] +name = "serde" +version = "1.0.170" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56657f512baabca8f840542f9ca8152aecf182c473c26e46e58d6aab4f6e439" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_cbor_2" +version = "0.12.0-dev" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b46d75f449e01f1eddbe9b00f432d616fbbd899b809c837d0fbc380496a0dd55" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.170" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77d477848e6b23adba0db397777d5aad864555bc17fd9c89abb3b8009788b7b8" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "serde_json" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" +dependencies = [ + "itoa 1.0.8", + "ryu", + "serde", +] + +[[package]] +name = "serde_path_to_error" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc4422959dd87a76cb117c191dcbffc20467f06c9100b76721dab370f24d3a" +dependencies = [ + "itoa 1.0.8", + "serde", +] + +[[package]] +name = "serde_qs" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" +dependencies = [ + "percent-encoding", + "serde", + "thiserror", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa 1.0.8", + "ryu", + "serde", +] + +[[package]] +name = "serde_variant" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47a8ec0b2fd0506290348d9699c0e3eb2e3e8c0498b5a9a6158b3bd4d6970076" +dependencies = [ + "serde", +] + +[[package]] +name = "servo_arc" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98238b800e0d1576d8b6e3de32827c2d74bee68bb97748dcf5071fb53965432" +dependencies = [ + "nodrop", + "stable_deref_trait", +] + +[[package]] +name = "sha1" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770" +dependencies = [ + "sha1_smol", +] + +[[package]] +name = "sha1" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sha1_smol" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" + +[[package]] +name = "sha2" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +dependencies = [ + "libc", +] + +[[package]] +name = "signature" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +dependencies = [ + "digest", + "rand_core 0.6.4", +] + +[[package]] +name = "siphasher" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" + +[[package]] +name = "siphasher" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" + +[[package]] +name = "slab" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +dependencies = [ + "autocfg 1.1.0", +] + +[[package]] +name = "smallvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" + +[[package]] +name = "socket2" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "sqlformat" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" +dependencies = [ + "itertools", + "nom", + "unicode_categories", +] + +[[package]] +name = "sqlx" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ef53c86d2066e04f0ac6b1364f16d13d82388e2d07f11a5c71782345555761" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a22fd81e9c1ad53c562edb869ff042b215d4eadefefc4784bacfbfd19835945" +dependencies = [ + "ahash", + "atoi", + "byteorder", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "dotenvy", + "either", + "event-listener", + "futures-channel", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashlink", + "hex", + "indexmap 2.0.0", + "log", + "memchr", + "native-tls", + "once_cell", + "paste", + "percent-encoding", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlformat", + "thiserror", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid 1.4.0", + "webpki-roots 0.23.1", +] + +[[package]] +name = "sqlx-macros" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00bb7c096a202b8164c175614cbfb79fe0e1e0a3d50e0374526183ef2974e4a2" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "sqlx-core", + "sqlx-macros-core", + "syn 1.0.109", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d644623ab9699014e5b3cb61a040d16caa50fd477008f63f1399ae35498a58" +dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2 1.0.64", + "quote 1.0.29", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn 1.0.109", + "tempfile", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8264c59b28b6858796acfcedc660aa4c9075cc6e4ec8eb03cdca2a3e725726db" +dependencies = [ + "atoi", + "base64 0.21.2", + "bitflags 2.3.3", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa 1.0.8", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1 0.10.5", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "tracing", + "uuid 1.4.0", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cab6147b81ca9213a7578f1b4c9d24c449a53953cd2222a7b5d7cd29a5c3139" +dependencies = [ + "atoi", + "base64 0.21.2", + "bitflags 2.3.3", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa 1.0.8", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha1 0.10.5", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror", + "tracing", + "uuid 1.4.0", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59fba60afa64718104b71eec6984f8779d4caffff3b30cde91a75843c7efc126" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "sqlx-core", + "tracing", + "url", + "uuid 1.4.0", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "string_cache" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89c058a82f9fd69b1becf8c274f412281038877c553182f1d02eb027045a2d67" +dependencies = [ + "lazy_static", + "new_debug_unreachable", + "phf_shared 0.7.24", + "precomputed-hash", + "serde", + "string_cache_codegen 0.4.4", + "string_cache_shared", +] + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared 0.10.0", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache_codegen" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f45ed1b65bf9a4bf2f7b7dc59212d1926e9eaf00fa998988e420fd124467c6" +dependencies = [ + "phf_generator 0.7.24", + "phf_shared 0.7.24", + "proc-macro2 1.0.64", + "quote 1.0.29", + "string_cache_shared", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro2 1.0.64", + "quote 1.0.29", +] + +[[package]] +name = "string_cache_shared" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc" + +[[package]] +name = "stringprep" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "subtle" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" + +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid 0.1.0", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" + +[[package]] +name = "synstructure" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 1.0.109", + "unicode-xid 0.2.4", +] + +[[package]] +name = "tempfile" +version = "3.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +dependencies = [ + "autocfg 1.1.0", + "cfg-if", + "fastrand", + "redox_syscall", + "rustix 0.37.23", + "windows-sys 0.48.0", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", +] + +[[package]] +name = "thin-slice" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" + +[[package]] +name = "thiserror" +version = "1.0.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" +dependencies = [ + "itoa 1.0.8", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" + +[[package]] +name = "time-macros" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" +dependencies = [ + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +dependencies = [ + "autocfg 1.1.0", + "backtrace", + "bytes", + "libc", + "mio", + "num_cpus", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tokio-macros", + "tracing", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-macros" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" +dependencies = [ + "bitflags 1.3.2", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-range-header", + "pin-project-lite", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8bd22a874a2d0b70452d5597b12c537331d49060824a95f49f108994f94aa4c" +dependencies = [ + "bitflags 2.3.3", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-range-header", + "pin-project-lite", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "tracing-test" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a2c0ff408fe918a94c428a3f2ad04e4afd5c95bbc08fcf868eff750c15728a4" +dependencies = [ + "lazy_static", + "tracing-core", + "tracing-subscriber", + "tracing-test-macro", +] + +[[package]] +name = "tracing-test-macro" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "258bc1c4f8e2e73a977812ab339d503e6feeb92700f6d07a6de4d321522d5c08" +dependencies = [ + "lazy_static", + "quote 1.0.29", + "syn 1.0.109", +] + +[[package]] +name = "tracing-tree" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92d6b63348fad3ae0439b8bebf8d38fb5bda0b115d7a8a7e6f165f12790c58c3" +dependencies = [ + "is-terminal", + "nu-ansi-term", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "try-lock" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" + +[[package]] +name = "typenum" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-id" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d70b6494226b36008c8366c288d77190b3fad2eb4c10533139c1c1f461127f1a" + +[[package]] +name = "unicode-ident" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-xid" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "unicode_categories" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" + +[[package]] +name = "uuid" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" +dependencies = [ + "getrandom 0.2.10", + "serde", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "waker-fn" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" + +[[package]] +name = "walkdir" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +dependencies = [ + "quote 1.0.29", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +dependencies = [ + "proc-macro2 1.0.64", + "quote 1.0.29", + "syn 2.0.25", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" + +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "web-sys" +version = "0.3.64" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webauthn-rs" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2db00711c712414e93b019c4596315085792215bc2ac2d5872f9e8913b0a6316" +dependencies = [ + "base64urlsafedata", + "serde", + "tracing", + "url", + "uuid 1.4.0", + "webauthn-rs-core", +] + +[[package]] +name = "webauthn-rs-core" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "294c78c83f12153a51e1cf1e6970b5da1397645dada39033a9c3173a8fc4fc2b" +dependencies = [ + "base64 0.13.1", + "base64urlsafedata", + "compact_jwt", + "der-parser", + "nom", + "openssl", + "rand 0.8.5", + "serde", + "serde_cbor_2", + "serde_json", + "thiserror", + "tracing", + "url", + "uuid 1.4.0", + "webauthn-rs-proto", + "x509-parser", +] + +[[package]] +name = "webauthn-rs-proto" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24e638361a63ba5c0a0be6a60229490fcdf33740ed63df5bb6bdb627b52a138" +dependencies = [ + "base64urlsafedata", + "serde", + "serde_json", + "url", +] + +[[package]] +name = "webpki" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "webpki-roots" +version = "0.22.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +dependencies = [ + "webpki", +] + +[[package]] +name = "webpki-roots" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" +dependencies = [ + "rustls-webpki 0.100.1", +] + +[[package]] +name = "whoami" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] +name = "winreg" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +dependencies = [ + "winapi", +] + +[[package]] +name = "wiremock" +version = "0.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6f71803d3a1c80377a06221e0530be02035d5b3e854af56c6ece7ac20ac441d" +dependencies = [ + "assert-json-diff", + "async-trait", + "base64 0.21.2", + "deadpool", + "futures", + "futures-timer", + "http-types", + "hyper", + "log", + "once_cell", + "regex", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "x509-parser" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb9bace5b5589ffead1afb76e43e34cff39cd0f3ce7e170ae0c29e53b88eb1c" +dependencies = [ + "asn1-rs", + "base64 0.13.1", + "data-encoding", + "der-parser", + "lazy_static", + "nom", + "oid-registry", + "rusticata-macros", + "thiserror", + "time 0.3.23", +] + +[[package]] +name = "zeroize" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..71ebc66 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,158 @@ +[package] +name = "kittybox" +version = "0.1.0" +authors = ["Vika "] +edition = "2021" +default-run = "kittybox" +autobins = false + +[features] +default = ["rustls", "postgres"] +#util = ["anyhow"] +#migration = ["util"] +webauthn = ["openssl", "dep:webauthn"] +openssl = ["reqwest/native-tls-crate", "reqwest/native-tls-alpn", "sqlx/tls-native-tls"] +rustls = ["reqwest/rustls-tls-webpki-roots", "sqlx/tls-rustls"] +cli = ["clap"] +postgres = ["sqlx", "kittybox-util/sqlx"] + +[[bin]] +name = "kittybox" +path = "src/main.rs" +required-features = [] + +#[[bin]] +#name = "kittybox-bulk-import" +#path = "src/bin/kittybox_bulk_import.rs" +#required-features = ["migration"] + +#[[bin]] +#name = "kittybox-database-converter" +#path = "src/bin/kittybox_database_converter.rs" +#required-features = ["migration", "redis"] + +[[bin]] +name = "kittybox-indieauth-helper" +path = "src/bin/kittybox-indieauth-helper.rs" +required-features = ["cli"] + +[[bin]] +name = "kittybox-check-webmention" +path = "src/bin/kittybox-check-webmention.rs" +required-features = ["cli"] + +[[bin]] +name = "kittybox-mf2" +path = "src/bin/kittybox-mf2.rs" +required-features = ["cli"] + +[workspace] +members = [".", "./util", "./templates", "./indieauth"] +default-members = [".", "./util", "./templates", "./indieauth"] +[dependencies.kittybox-util] +version = "0.1.0" +path = "./util" +features = ["fs"] +[dependencies.kittybox-frontend-renderer] +version = "0.1.0" +path = "./templates" +[dependencies.kittybox-indieauth] +version = "0.1.0" +path = "./indieauth" +features = ["axum"] + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dev-dependencies] +tempfile = "^3.4.0" # Temporary file managment +wiremock = "^0.5.14" +faker_rand = "^0.1.1" # Seedable, rand-compatible generators of fake data +rand = "^0.8.5" # Utilities for random number generation +tracing-test = "^0.2.2" + +[dependencies] +argon2 = { version = "^0.5.0", features = ["std"] } +async-trait = "^0.1.50" # Type erasure for async trait methods +bytes = "^1.1.0" +data-encoding = "^2.3.2" # Efficient and customizable data-encoding functions like base64, base32, and hex +easy-scraper = "^0.2.0" # HTML scraping library focused on ease of use +either = "^1.6.1" # A general purpose sum type with two cases +futures = "^0.3.14" # An implementation of futures and streams +futures-util = "^0.3.14" # Common utilities and extension traits for the futures-rs library +hex = "^0.4.3" +lazy_static = "^1.4.0" # A macro for declaring lazily evaluated statics in Rust +listenfd = "^0.5.0" # A simple library to work with listenfds passed from the outside (systemd/catflap socket activation) +#log = "^0.4.14" # A lightweight logging facade for Rust +markdown = "^1.0.0-alpha.7" # Native Rust library for parsing Markdown and (outputting HTML) +newbase60 = "^0.1.3" # A library that implements Tantek Γ‡elik's New Base 60 +rand = "^0.8.4" # Random number generators. +serde_json = "^1.0.64" # A JSON serialization file format +serde_urlencoded = "^0.7.0" # `x-www-form-urlencoded` meets Serde +serde_variant = "^0.1.1" # Retrieve serde provided variant names for enum objects +relative-path = "^1.5.0" # Portable relative paths for Rust +sha2 = "^0.10.7" # SHA-2 series of algorithms for Rust +uuid = "^1.3.3" +tracing = { version = "0.1.34", features = [] } +tracing-tree = "0.2.1" +tracing-log = "0.1.3" +tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] } +tower-http = { version = "0.3.3", features = ["trace", "cors", "catch-panic"] } +tower = { version = "0.4.12", features = ["tracing"] } +webauthn = { version = "0.4.5", package = "webauthn-rs", features = ["danger-allow-state-serialisation"], optional = true } +[dependencies.tokio] +version = "^1.29.1" +features = ["full", "tracing"] # TODO determine if my app doesn't need some features +#[dependencies.console-subscriber] +#version = "0.1.10" +[dependencies.tokio-stream] +version = "^0.1.8" +features = ["time", "net"] +[dependencies.tokio-util] +version = "^0.7.3" +features = ["io-util"] +[dependencies.anyhow] +version = "^1.0.42" +optional = true +[dependencies.axum] +version = "^0.6.18" +features = ["multipart", "json", "headers", "form", "macros"] +[dependencies.axum-extra] +version = "^0.7.4" +features = ["cookie"] +[dependencies.chrono] # Date and time library for Rust +version = "^0.4.19" +features = ["serde"] +[dependencies.redis] +version = "^0.21.3" +optional = true +features = ["aio", "tokio-comp"] +[dependencies.prometheus] # Prometheus instrumentation library for Rust applications +version = "^0.13.0" +features = ["process"] +[dependencies.serde] # A generic serialization/deserialization framework +version = "^1.0.170" +features = ["derive"] +[dependencies.url] # URL library for Rust, based on the WHATWG URL Standard +version = "^2.2.1" +features = ["serde"] +[dependencies.hyper] +version = "^0.14.17" +features = ["stream", "runtime"] +[dependencies.reqwest] +version = "^0.11.10" +default-features = false +features = ["gzip", "brotli", "json", "stream"] +[dependencies.microformats] +version = "^0.3.0" +#git = "https://gitlab.com/maxburon/microformats-parser" + +[dependencies.clap] +version = "4.3.11" +features = ["derive"] +optional = true +[dependencies.thiserror] +version = "1.0.35" +[dependencies.sqlx] +version = "^0.7" +features = ["uuid", "chrono", "json", "postgres", "runtime-tokio"] +optional = true \ No newline at end of file diff --git a/build.rs b/build.rs new file mode 100644 index 0000000..05eca7a --- /dev/null +++ b/build.rs @@ -0,0 +1,30 @@ +use std::env; + +fn main() { + println!("cargo:rerun-if-changed=migrations"); + + let out_dir = env::var("OUT_DIR").unwrap(); + + println!("cargo:rerun-if-changed=companion-lite/"); + let companion_out = std::path::Path::new(&out_dir).join("companion"); + + if let Ok(exit) = std::process::Command::new("tsc") + .arg("--outDir") + .arg(companion_out.as_os_str()) + .current_dir("companion-lite") + .spawn() + .unwrap() + .wait() + { + if !exit.success() { + std::process::exit(exit.code().unwrap_or(1)) + } + } + let companion_in = std::path::Path::new("companion-lite"); + for file in ["index.html", "style.css"] { + std::fs::copy( + companion_in.join(file), + &companion_out.join(file) + ).unwrap(); + } +} diff --git a/companion-lite/index.html b/companion-lite/index.html new file mode 100644 index 0000000..fc99c60 --- /dev/null +++ b/companion-lite/index.html @@ -0,0 +1,127 @@ + + + + + Kittybox-Micropub debug client + + + + + + + + + + + + + +
+ JavaScript licensing information for this software + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
main.jsAGPL-3.0main.ts (Kittybox source code)
micropub_api.jsAGPL-3.0micropub_api.ts (Kittybox source code)
indieauth.jsAGPL-3.0indieauth.ts (Kittybox source code)
base64.jsCC0 (Public Domain)base64.ts, adapted from MDN page on Base64
ESM.sh entrypoint for microformats-parserMIT/Expatesm.sh source code +
microformats-parserMIT/ExpatGitHub repository +
parse5 + MIT/Expat +
+ BSD-2-Clause +
+ GitHub repository for parse5 +
+ GitHub repository for entitites, a parse5 dependency +
+
+ + diff --git a/companion-lite/src/base64.ts b/companion-lite/src/base64.ts new file mode 100644 index 0000000..2429894 --- /dev/null +++ b/companion-lite/src/base64.ts @@ -0,0 +1,89 @@ +// Array of bytes to Base64 string decoding +function b64ToUint6(nChr: number) { + return nChr > 64 && nChr < 91 + ? nChr - 65 + : nChr > 96 && nChr < 123 + ? nChr - 71 + : nChr > 47 && nChr < 58 + ? nChr + 4 + : nChr === 43 + ? 62 + : nChr === 47 + ? 63 + : 0; +} + +export function decode(sBase64: string, nBlocksSize?: number) { + const sB64Enc = sBase64.replace(/[^A-Za-z0-9+/]/g, ""); + const nInLen = sB64Enc.length; + const nOutLen = nBlocksSize + ? Math.ceil(((nInLen * 3 + 1) >> 2) / nBlocksSize) * nBlocksSize + : (nInLen * 3 + 1) >> 2; + const taBytes = new Uint8Array(nOutLen); + + let nMod3; + let nMod4; + let nUint24 = 0; + let nOutIdx = 0; + for (let nInIdx = 0; nInIdx < nInLen; nInIdx++) { + nMod4 = nInIdx & 3; + nUint24 |= b64ToUint6(sB64Enc.charCodeAt(nInIdx)) << (6 * (3 - nMod4)); + if (nMod4 === 3 || nInLen - nInIdx === 1) { + nMod3 = 0; + while (nMod3 < 3 && nOutIdx < nOutLen) { + taBytes[nOutIdx] = (nUint24 >>> ((16 >>> nMod3) & 24)) & 255; + nMod3++; + nOutIdx++; + } + nUint24 = 0; + } + } + + return taBytes; +} + +/* Base64 string to array encoding */ +function uint6ToB64(nUint6: number) { + return nUint6 < 26 + ? nUint6 + 65 + : nUint6 < 52 + ? nUint6 + 71 + : nUint6 < 62 + ? nUint6 - 4 + : nUint6 === 62 + ? 43 + : nUint6 === 63 + ? 47 + : 65; +} + +export function encode(aBytes: Uint8Array) { + let nMod3 = 2; + let sB64Enc = ""; + + const nLen = aBytes.length; + let nUint24 = 0; + for (let nIdx = 0; nIdx < nLen; nIdx++) { + nMod3 = nIdx % 3; + if (nIdx > 0 && ((nIdx * 4) / 3) % 76 === 0) { + sB64Enc += "\r\n"; + } + + nUint24 |= aBytes[nIdx] << ((16 >>> nMod3) & 24); + if (nMod3 === 2 || aBytes.length - nIdx === 1) { + sB64Enc += String.fromCodePoint( + uint6ToB64((nUint24 >>> 18) & 63), + uint6ToB64((nUint24 >>> 12) & 63), + uint6ToB64((nUint24 >>> 6) & 63), + uint6ToB64(nUint24 & 63) + ); + nUint24 = 0; + } + } + return ( + sB64Enc.substr(0, sB64Enc.length - 2 + nMod3) + + (nMod3 === 2 ? "" : nMod3 === 1 ? "=" : "==") + ); +} + +export default { encode, decode } diff --git a/companion-lite/src/indieauth.ts b/companion-lite/src/indieauth.ts new file mode 100644 index 0000000..40facab --- /dev/null +++ b/companion-lite/src/indieauth.ts @@ -0,0 +1,113 @@ +// @ts-ignore +import { mf2 } from "https://esm.sh/microformats-parser@1.4.1?pin=v96" +import { MF2 } from "./micropub_api.js" +import base64 from "./base64.js" + /* + const { mf2 }: { + mf2: (html: string, options: { + baseUrl: string, + experimental?: { lang?: boolean, textContent?: boolean } + }) => { + items: MF2[], + rels: {[key: string]: string[]}, + "rel-urls": {[key: string]: { rels: string[], text?: string }} + } + } = + // @ts-ignore + await import("https://esm.sh/microformats-parser@1.4.1?pin=v96"); + */ + +interface IndieauthMetadata { + authorization_endpoint: string, + token_endpoint: string, + issuer: string, + introspection_endpoint?: string, + introspection_endpoint_auth_methods_supported?: ("Bearer")[], + revocation_endpoint?: string, + revocation_endpoint_auth_methods_supported?: ["none"], + scopes_supported?: string[], + response_types_supported: ["code"], + grant_types_supported: ("authorization_code" | "refresh_token")[] + code_challenge_methods_supported: ("S256")[] + authorization_response_iss_parameter_supported: true, + userinfo_endpoint?: string +} + +interface MF2ParsedData { + items: MF2[], + rels: {[key: string]: string[]}, + "rel-urls": {[key: string]: { rels: string[], text?: string }} +} + +export interface IndiewebEndpoints { + authorization_endpoint: URL, + token_endpoint: URL, + userinfo_endpoint: URL | null, + revocation_endpoint: URL | null, + micropub: URL, + +} + +export function create_verifier() { + const array = new Uint8Array(64) + crypto.getRandomValues(array) + + return array.reduce((str, byte) => str + byte.toString(16).padStart(2, '0'), '') +} + +export async function create_challenge(verifier: string): Promise { + return await crypto.subtle.digest('SHA-256', Uint8Array.from(verifier, c => c.charCodeAt(0))) + .then((buf) => base64.encode(new Uint8Array(buf))) + .then(s => { + return s + .replaceAll("+", "-") + .replaceAll("/", "_") + .replaceAll(/=$/g, "") + }) +} + +export async function discover_endpoints(me: URL): Promise { + const response = await fetch(me); + const data: MF2ParsedData = mf2(await response.text(), { baseUrl: me.toString() }); + let endpoints: Partial = {}; + if ("micropub" in data.rels) { + endpoints.micropub = new URL(data.rels.micropub[0]) + } else { + return null + } + if ("indieauth_metadata" in data.rels) { + const metadata_response = await fetch(data.rels.indieauth_metadata[0], { + headers: { + "Accept": "application/json" + } + }); + + const metadata = await metadata_response.json() as IndieauthMetadata; + endpoints.authorization_endpoint = new URL(metadata.authorization_endpoint) + endpoints.token_endpoint = new URL(metadata.token_endpoint) + if (metadata.userinfo_endpoint != null) { + endpoints.userinfo_endpoint = new URL(metadata.userinfo_endpoint) + } else { + endpoints.userinfo_endpoint = null + } + if (metadata.revocation_endpoint != null) { + endpoints.revocation_endpoint = new URL(metadata.revocation_endpoint) + } else { + endpoints.revocation_endpoint = null + } + + return endpoints as IndiewebEndpoints + } else if ( + "authorization_endpoint" in data.rels + && "token_endpoint" in data.rels + ) { + endpoints.authorization_endpoint = new URL(data.rels.authorization_endpoint[0]) + endpoints.token_endpoint = new URL(data.rels.token_endpoint[0]) + endpoints.userinfo_endpoint = null + endpoints.revocation_endpoint = null + + return endpoints as IndiewebEndpoints + } else { + return null + } +} diff --git a/companion-lite/src/main.ts b/companion-lite/src/main.ts new file mode 100644 index 0000000..f45cb95 --- /dev/null +++ b/companion-lite/src/main.ts @@ -0,0 +1,178 @@ +import { Micropub, MicropubChannel, MF2 } from "./micropub_api.js"; + +const channel_select_radio = document.getElementById("select_channels") as HTMLInputElement; +channel_select_radio.onclick = async () => { + function populate_channel_list(channels: MicropubChannel[]) { + (document.getElementById("channels") as HTMLElement).style.display = "block"; + const channel_list = document.getElementById("channels_target") as HTMLElement; + channel_list.innerHTML = ""; + channels.forEach((channel) => { + const template = (document.getElementById("channel_selector") as HTMLTemplateElement).content.cloneNode(true) as HTMLElement; + const input = template.querySelector("input") as HTMLInputElement; + const label = template.querySelector("label") as HTMLLabelElement; + input.id = `channel_selector_option_${channel.uid}` + input.value = channel.uid + label.htmlFor = input.id + label.innerHTML = `${channel.name}` + + channel_list.appendChild(template) + }) + } + + if (micropub == null) { + throw new Error("need to authenticate first"); + } + const config = await micropub.config(); + if (config.channels !== undefined) { + populate_channel_list(config.channels) + } +} + +const no_channel_radio = document.getElementById("no_channel") as HTMLInputElement; +no_channel_radio.onclick = () => { + (document.getElementById("channels") as HTMLElement).style.display = "none"; + const channel_list = document.getElementById("channels_target") as HTMLElement + channel_list.innerHTML = ""; +} + +const main_form = document.getElementById("micropub") as HTMLFormElement; +main_form.onsubmit = async (event) => { + function construct_body(form: HTMLFormElement): MF2 { + let content = (form.elements.namedItem("content") as HTMLInputElement).value; + let name: string | undefined = (form.elements.namedItem("name") as HTMLInputElement).value || undefined; + let category: string[] = (form.elements.namedItem("category") as HTMLInputElement).value + .split(",") + .map(val => val.trim()); + + let channel: string[] | undefined = undefined; + let channel_select = (form.elements.namedItem("channel_select") as HTMLInputElement).value; + if (channel_select) { + let channel_selector = form.elements.namedItem("channel"); + if (channel_selector instanceof RadioNodeList) { + channel = (Array.from(channel_selector) as HTMLInputElement[]) + .map(i => i.checked ? i.value : false) + .filter(i => i) as string[]; + } else if (channel_selector instanceof HTMLInputElement) { + channel = [channel_selector.value] + } + } + return { + type: ["h-entry"], + properties: { + content: [content], + name: name ? [name] : undefined, + category: category.length ? category : undefined, + channel: channel ? channel : undefined + } + } + } + + event.preventDefault() + const mf2 = construct_body(main_form); + console.log(JSON.stringify(mf2)); + if (micropub == null) { + throw new Error("need to authenticate first"); + } + try { + const location = await micropub.submit(mf2); + main_form.clear() + + window.open(location, "_blank") + } catch (e) { + console.error(e) + alert(`Error: ${e}`) + return + } + +} + +const indieauth_form = document.getElementById("indieauth") as HTMLFormElement; +indieauth_form.onsubmit = async (event) => { + event.preventDefault() + const form = event.target as HTMLFormElement; + const me = (form.elements.namedItem("me") as HTMLInputElement).value; + if (me != null) { + const { discover_endpoints, create_verifier, create_challenge } = await import("./indieauth.js"); + + const endpoints = await discover_endpoints(new URL(me)); + + if (endpoints != null) { + localStorage.setItem("micropub_endpoint", endpoints.micropub.toString()) + localStorage.setItem("token_endpoint", endpoints.token_endpoint.toString()) + if (endpoints.revocation_endpoint != null) { + localStorage.setItem("revocation_endpoint", endpoints.revocation_endpoint.toString()) + } + } else { + alert("Your website doesn't support Micropub.") + return + } + (document.getElementById("unauthorized") as HTMLElement).style.display = "none"; + (document.getElementById("authorizing") as HTMLElement).style.display = "block"; + const url = endpoints.authorization_endpoint; + let params = new URLSearchParams(); + for (const [key, val] of url.searchParams) { + params.append(key, val) + } + params.set("client_id", window.location.href) + params.set("redirect_uri", window.location.href) + params.set("response_type", "code") + params.set("scope", "profile create media") + params.set("state", "awoo") + const code_verifier = create_verifier() + localStorage.setItem("code_verifier", code_verifier) + params.set("code_challenge", await create_challenge(code_verifier)) + params.set("code_challenge_method", "S256") + + url.search = "?" + params.toString() + + console.log(url) + + window.location.href = url.toString() + } +} + +if (window.location.search != "") { + (document.getElementById("authorizing") as HTMLElement).style.display = "block"; + const params = new URLSearchParams(window.location.search) + if (params.has("code") && params.has("state")) { + const token_endpoint = new URL(localStorage.getItem("token_endpoint")!) + const state = params.get("state") + // XXX check state + + const client_id = new URL(window.location.href); + client_id.search = ""; + const form = new URLSearchParams(); + form.set("grant_type", "authorization_code") + form.set("code", params.get("code")!) + form.set("client_id", client_id.toString()) + form.set("redirect_uri", client_id.toString()) + form.set("code_verifier", localStorage.getItem("code_verifier")!) + + const response = await fetch(token_endpoint, { + method: "POST", + headers: { + "Accept": "application/json", + "Content-Type": "application/x-www-form-urlencoded" + }, + body: form.toString() + }); + + const grant = await response.json(); + + if ("access_token" in grant) { + localStorage.setItem("access_token", grant.access_token); + (document.getElementById("authorizing") as HTMLElement).style.display = "none"; + } + } +} + +let micropub: Micropub | null = null; +const token = localStorage.getItem("access_token") +const endpoint = localStorage.getItem("micropub_endpoint") +if (token == null || endpoint == null) { + (document.getElementById("unauthorized") as HTMLElement).style.display = "block"; +} else { + (document.getElementById("authorized") as HTMLElement).style.display = "block"; + + micropub = new Micropub({ endpoint: new URL(endpoint), token }); +} diff --git a/companion-lite/src/micropub_api.ts b/companion-lite/src/micropub_api.ts new file mode 100644 index 0000000..fa1c431 --- /dev/null +++ b/companion-lite/src/micropub_api.ts @@ -0,0 +1,125 @@ +export interface MicropubChannel { + readonly uid: string, + readonly name: string +} + +export interface MF2 { + type: string[], + properties: { [key:string]: (string | MF2 | {[key:string]: string})[] | undefined } +} + +export interface MicropubConfig { + readonly channels?: MicropubChannel[], + readonly "media-endpoint"?: string +} + +export interface MicropubErrorMessage { + readonly error: string, + readonly error_description: string | undefined +} + +export class MicropubError extends Error { + readonly status: number | null + readonly response: MicropubErrorMessage | null + + constructor(status: number | null, response: MicropubErrorMessage | null, cause: Error | null = null) { + // Needs to pass both `message` and `options` to install the "cause" property. + if (status == null) { + super("Micropub endpoint didn't respond properly", { cause }); + } else if (response == null) { + super(`Micropub endpoint returned HTTP ${status}`, { cause }); + } else { + super( + `Micropub endpoint returned ${response.error}: ${response.error_description ?? "(no description was provided)"}`, + { cause } + ) + } + + this.status = status; + this.response = response; + } +} + +export class Micropub { + readonly token: string + readonly micropub_endpoint: URL + private config_response: MicropubConfig | null + + constructor({ endpoint, token }: { endpoint: URL, token: string }) { + this.micropub_endpoint = endpoint; + this.token = token; + this.config_response = null; + } + + async config(): Promise { + if (this.config_response != null) { + return this.config_response + } + let url = this.micropub_endpoint; + let params = new URLSearchParams(); + for (const [key, val] of url.searchParams) { + params.append(key, val) + } + params.set("q", "config") + + url.search = "?" + params.toString(); + + const response = await fetch(url, { + headers: { + "Authorization": `Bearer ${this.token}` + } + }); + if (response.ok) { + const config = await response.json() as MicropubConfig; + this.config_response = config + + return config + } else { + throw new MicropubError(response.status, await response.json() as MicropubErrorMessage); + } + } + + async submit(mf2: MF2): Promise { + const response = await fetch(this.micropub_endpoint, { + method: "POST", + headers: { + "Authorization": `Bearer ${this.token}`, + "Content-Type": "application/json" + }, + body: JSON.stringify(mf2) + }) + + if (response.status != 201 && response.status != 202) { + let err = await response.json() as MicropubErrorMessage; + + throw new MicropubError(response.status, err) + } else { + return new URL(response.headers.get("Location") as string) + } + } + + async upload(file: File): Promise { + const config = await this.config(); + const media = config["media-endpoint"]; + if (media == null) { + throw new Error("Micropub endpoint doesn't support file uploads") + } + + const form = new FormData(); + form.set("file", file); + + const response = await fetch(media, { + method: "POST", + headers: { + "Authorization": `Bearer ${this.token}`, + }, + body: form + }) + + if (response.ok) { + return new URL(response.headers.get("Location") as string) + } else { + throw new MicropubError(response.status, await response.json()); + } + } +} diff --git a/companion-lite/style.css b/companion-lite/style.css new file mode 100644 index 0000000..09ed398 --- /dev/null +++ b/companion-lite/style.css @@ -0,0 +1,47 @@ +* { + box-sizing: border-box; +} + +:root { + font-family: sans-serif; +} + +body { + margin: 0; +} + +body > main { + margin: auto; + max-width: 1024px; +} + +h1.header { + margin-top: 0.75em; + text-align: center; +} + +fieldset + fieldset, +fieldset + input, +section + section, +section + fieldset +{ + margin-top: 0.75em; +} + +input[type="submit"] { + margin-left: auto; + display: block; +} + +form > fieldset > section > label { + width: 100%; + display: block; +} + +form > fieldset > section > input, form > fieldset > section > textarea { + width: 100%; +} + +textarea { + min-height: 10em; +} diff --git a/companion-lite/tsconfig.json b/companion-lite/tsconfig.json new file mode 100644 index 0000000..18b94c7 --- /dev/null +++ b/companion-lite/tsconfig.json @@ -0,0 +1,104 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "es2022", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "include": ["src/**/*"] +} diff --git a/dev.sh b/dev.sh new file mode 100755 index 0000000..979b85a --- /dev/null +++ b/dev.sh @@ -0,0 +1,32 @@ +#!/bin/sh +if [[ -z $KITTYBOX_LOG ]]; then + export KITTYBOX_LOG="kittybox=debug,retainer::cache=warn,h2=info,rustls=info,tokio=info,tower_http::trace=debug,sqlx=trace" +fi +#export BACKEND_URI=file://./test-dir +export BACKEND_URI="postgres://localhost?dbname=kittybox&host=/run/postgresql" +export JOB_QUEUE_URI="postgres://localhost?dbname=kittybox&host=/run/postgresql" +export BLOBSTORE_URI=file://./media-store +export AUTH_STORE_URI=file://./auth-store +export COOKIE_SECRET=1234567890abcdefghijklmnopqrstuvwxyz +#export COOKIE_SECRET_FILE=/dev/null +if [[ "$1" == "watch" ]]; then + shift + exec systemfd --no-pid -s http::8080 -- cargo watch \ + --ignore '.direnv' \ + --ignore '*~' \ + -L "$KITTYBOX_LOG" \ + -x run \ + "$@" +elif [[ "$1" == "run" ]]; then + shift 1 + export RUST_LOG="$KITTYBOX_LOG" + exec cargo run "$@" +elif [[ "$1" == "_run" ]]; then + shift 1 + release=$1 + shift 1 + exec target/${release}/kittybox "$@" +else + echo "Usage: ./dev.sh [watch|run]" + exit 1 +fi diff --git a/examples/password-hasher.rs b/examples/password-hasher.rs new file mode 100644 index 0000000..f11c591 --- /dev/null +++ b/examples/password-hasher.rs @@ -0,0 +1,33 @@ +use std::io::Write; + +use argon2::{Argon2, password_hash::{rand_core::OsRng, PasswordHasher, PasswordHash, PasswordVerifier, SaltString}}; + +fn main() -> std::io::Result<()> { + eprint!("Type a password: "); + std::io::stderr().flush().unwrap(); + let password = { + let mut s = String::new(); + std::io::stdin().read_line(&mut s)?; + + s + }; + + let salt = SaltString::generate(&mut OsRng); + let argon2 = Argon2::default(); + //eprintln!("{}", password.trim()); + let password_hash = argon2.hash_password(password.trim().as_bytes(), &salt) + .expect("Hashing a password should not error out") + .serialize(); + + println!("{}", password_hash.as_str()); + + assert!(Argon2::default() + .verify_password( + password.trim().as_bytes(), + &PasswordHash::new(password_hash.as_str()) + .expect("Password hash should be valid") + ).is_ok() + ); + + Ok(()) +} diff --git a/indieauth/Cargo.toml b/indieauth/Cargo.toml new file mode 100644 index 0000000..d6bc1fe --- /dev/null +++ b/indieauth/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "kittybox-indieauth" +version = "0.1.0" +edition = "2021" + +[features] +default = [] +axum = ["axum-core", "serde_json", "http"] + +[dev-dependencies] +serde_json = "^1.0.64" # A JSON serialization file format +serde_urlencoded = "^0.7.0" # `x-www-form-urlencoded` meets Serde +[dependencies] +rand = "^0.8.5" # Utilities for random number generation +data-encoding = "^2.3.2" # Efficient and customizable data-encoding functions like base64, base32, and hex +sha2 = "^0.10.7" # SHA-2 series of algorithms for Rust +[dependencies.url] # URL library for Rust, based on the WHATWG URL Standard +version = "^2.2.1" +features = ["serde"] +[dependencies.serde] # A generic serialization/deserialization framework +version = "^1.0.170" +features = ["derive"] +[dependencies.axum-core] +version = "^0.3.4" +optional = true +[dependencies.serde_json] +version = "^1.0.64" +optional = true +[dependencies.http] +version = "^0.2.7" +optional = true \ No newline at end of file diff --git a/indieauth/src/lib.rs b/indieauth/src/lib.rs new file mode 100644 index 0000000..a60cc42 --- /dev/null +++ b/indieauth/src/lib.rs @@ -0,0 +1,773 @@ +#![deny(missing_docs)] +#![forbid(rustdoc::broken_intra_doc_links)] +//! A library of useful structs and helpers to implement [IndieAuth +//! version 20220212][indieauth]. +//! +//! This crate is completely network-agnostic, which means it can be +//! used with both sync and async web frameworks, and even on the +//! client side to implement identity consumers. +//! +//! ## Integration with web frameworks +//! +//! For convenience, helpers for [`axum`], the web framework Kittybox +//! happens to use, are provided. Enable the `axum` feature to use +//! them. +//! +//! The author is happy to accept patches to add more +//! framework-specific helpers. +//! +//! [indieauth]: https://indieauth.spec.indieweb.org/20220212/ +//! [`axum`]: https://github.com/tokio-rs/axum +use serde::{Serialize, Deserialize}; +use url::Url; + +mod scopes; +pub use self::scopes::{Scope, Scopes}; +mod pkce; +pub use self::pkce::{PKCEMethod, PKCEVerifier, PKCEChallenge}; + +/// Authentication methods supported by the introspection endpoint. +/// Note that authentication at the introspection endpoint is +/// mandatory. +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +pub enum IntrospectionEndpointAuthMethod { + /// `Authorization` header with a `Bearer` token. + Bearer, + /// A token passed as part of a POST request. + #[serde(rename = "snake_case")] + ClientSecretPost, + /// Username and password passed using HTTP Basic authentication. + #[serde(rename = "snake_case")] + ClientSecretBasic, + /// TLS client auth with a certificate signed by a valid CA. + #[serde(rename = "snake_case")] + TlsClientAuth, + /// TLS client auth with a self-signed certificate. + #[serde(rename = "snake_case")] + SelfSignedTlsClientAuth +} + +/// Authentication methods supported by the revocation endpoint. +/// +/// The intent of the IndieAuth revocation endpoints is to quickly +/// revoke leaked tokens. As it requires posession of a token, no +/// authentication is neccesary to protect tokens. A well-intentioned +/// person discovering a leaked token could quickly revoke it without +/// disturbing anyone. +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum RevocationEndpointAuthMethod { + /// No authentication is required to access an endpoint declaring + /// this value. + None +} + +/// The response types supported by the authorization endpoint. +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ResponseType { + /// An authorization code will be issued if this response type is + /// requested. + Code +} +// TODO serde_variant +impl ResponseType { + /// Return the response type as it would appear in serialized form. + pub fn as_str(&self) -> &'static str { + match self { + ResponseType::Code => "code", + } + } +} + +/// Grant types that are described in the IndieAuth spec. +/// +/// This type is strictly for usage in the [`Metadata`] response. For +/// grant requests and responses, see [`GrantRequest`] and +/// [`GrantResponse`]. +#[derive(Copy, Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum GrantType { + /// The authorization code grant, allowing to exchange an + /// authorization code for a confirmation of identity or an access + /// token. + AuthorizationCode, + /// The refresh token grant, allowing to exchange a refresh token + /// for a fresh access token and a new refresh token, to + /// facilitate long-term access. + RefreshToken +} + +/// OAuth 2.0 Authorization Server Metadata in application to the IndieAuth protocol. +/// +/// Your metadata endpoint should return this as a response. +/// +/// ```rust +/// use kittybox_indieauth::{ +/// Metadata, IntrospectionEndpointAuthMethod, RevocationEndpointAuthMethod, +/// ResponseType, Scope, GrantType, PKCEMethod +/// }; +/// +/// let metadata = Metadata { +/// issuer: "https://indieauth.example.com/".parse().unwrap(), +/// authorization_endpoint: "https://indieauth.example.com/auth".parse().unwrap(), +/// token_endpoint: "https://indieauth.example.com/token".parse().unwrap(), +/// introspection_endpoint: "https://indieauth.example.com/introspection".parse().unwrap(), +/// introspection_endpoint_auth_methods_supported: Some(vec![IntrospectionEndpointAuthMethod::Bearer]), +/// revocation_endpoint: Some("https://indieauth.example.com/revoke".parse().unwrap()), +/// revocation_endpoint_auth_methods_supported: Some(vec![RevocationEndpointAuthMethod::None]), +/// scopes_supported: Some(vec![Scope::Create, Scope::Update, Scope::custom("manage_tokens")]), +/// response_types_supported: Some(vec![ResponseType::Code]), +/// grant_types_supported: Some(vec![GrantType::AuthorizationCode, GrantType::RefreshToken]), +/// service_documentation: Some("https://indieauth.spec.indieweb.org/".parse().unwrap()), +/// code_challenge_methods_supported: vec![PKCEMethod::S256], +/// authorization_response_iss_parameter_supported: Some(true), +/// userinfo_endpoint: Some("https://indieauth.example.com/userinfo".parse().unwrap()) +/// }; +/// ``` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Metadata { + /// The server's issuer identifier. The issuer identifier is a URL + /// that uses the "https" scheme and has no query or fragment + /// components. The identifier MUST be a prefix of the + /// `indieauth-metadata` URL. + pub issuer: Url, + /// The Authorization Endpoint + pub authorization_endpoint: Url, + /// The Token Endpoint + pub token_endpoint: Url, + /// The Introspection Endpoint + pub introspection_endpoint: Url, + /// JSON array containing a list of client authentication methods + /// supported by this introspection endpoint. + #[serde(skip_serializing_if = "Option::is_none")] + pub introspection_endpoint_auth_methods_supported: Option>, + /// The Revocation Endpoint + #[serde(skip_serializing_if = "Option::is_none")] + pub revocation_endpoint: Option, + /// JSON array containing the value + /// [`RevocationEndpointAuthMethod::None`]. If a revocation endpoint + /// is provided, this property should also be provided with the + /// value `vec![RevocationEndpointAuthMethod::None]`, since the + /// omission of this value defaults to `client_secret_basic` + /// according to [RFC8414]. + /// + /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 + #[serde(skip_serializing_if = "Option::is_none")] + pub revocation_endpoint_auth_methods_supported: Option>, + /// JSON array containing scope values supported by the IndieAuth + /// server. Servers MAY choose not to advertise some supported + /// scope values even when this parameter is used. + // Note: Scopes isn't used here because this field should be + // serialized as a list, not as a string + #[serde(skip_serializing_if = "Option::is_none")] + pub scopes_supported: Option>, + /// JSON array containing the response_type values supported. This + /// differs from [RFC8414] in that this parameter is OPTIONAL and + /// that, if omitted, the default is [`ResponseType::Code`]. + /// + /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 + #[serde(skip_serializing_if = "Option::is_none")] + pub response_types_supported: Option>, + /// JSON array containing grant type values supported. If omitted, + /// the default value differs from [RFC8414] and is + /// `authorization_code`. + /// + /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 + #[serde(skip_serializing_if = "Option::is_none")] + pub grant_types_supported: Option>, + /// URL of a page containing human-readable information that + /// developers might need to know when using the server. This + /// might be a link to the IndieAuth spec or something more + /// personal to your implementation. + #[serde(skip_serializing_if = "Option::is_none")] + pub service_documentation: Option, + /// JSON array containing the methods supported for PKCE. This + /// parameter differs from [RFC8414] in that it is not optional as + /// PKCE is *REQUIRED*. + /// + /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 + pub code_challenge_methods_supported: Vec, + /// Boolean parameter indicating whether the authorization server + /// provides the iss parameter. If omitted, the default value is + /// false. As the iss parameter is REQUIRED, this is provided for + /// compatibility with OAuth 2.0 servers implementing the + /// parameter. + #[serde(skip_serializing_if = "Option::is_none")] + pub authorization_response_iss_parameter_supported: Option, + /// The User Info Endpoint + #[serde(skip_serializing_if = "Option::is_none")] + pub userinfo_endpoint: Option +} + +#[cfg(feature = "axum")] +impl axum_core::response::IntoResponse for Metadata { + fn into_response(self) -> axum_core::response::Response { + use http::StatusCode; + + (StatusCode::OK, + [("Content-Type", "application/json")], + serde_json::to_vec(&self).unwrap()) + .into_response() + } +} + +/// User profile to be returned from the userinfo endpoint and when +/// the `profile` scope was requested. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Profile { + /// User's chosen name. + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + /// User's profile page. Fetching it may reveal an `h-card`. + #[serde(skip_serializing_if = "Option::is_none")] + pub url: Option, + /// User's profile picture suitable to represent them. + #[serde(skip_serializing_if = "Option::is_none")] + pub photo: Option, + /// User's email, if they've chosen to reveal it. This is guarded + /// by the `email` scope. + #[serde(skip_serializing_if = "Option::is_none")] + pub email: Option +} + +#[cfg(feature = "axum")] +impl axum_core::response::IntoResponse for Profile { + fn into_response(self) -> axum_core::response::Response { + use http::StatusCode; + + (StatusCode::OK, + [("Content-Type", "application/json")], + serde_json::to_vec(&self).unwrap()) + .into_response() + } +} + +/// A state string comprised of alphanumeric characters to protect +/// from CSRF attacks. +/// +/// There is no reason to inspect the string itself except to ensure +/// it hasn't been tampered with. +#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] +pub struct State(String); +impl State { + /// Generate a random state string of 128 bytes in length. + pub fn new() -> Self { + use rand::{Rng, distributions::Alphanumeric}; + let bytes = rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(128) + .collect::>(); + Self(String::from_utf8(bytes).unwrap()) + } +} +impl AsRef for State { + fn as_ref(&self) -> &str { + self.0.as_str() + } +} + +/// The authorization request that should be affixed to the URL of an +/// authorization endpoint to start the IndieAuth ceremony. +/// +/// ```rust +/// use kittybox_indieauth::{ +/// AuthorizationRequest, ResponseType, State, +/// Scopes, Scope, +/// PKCEChallenge, PKCEVerifier, PKCEMethod +/// }; +/// +/// // Save that for later, it'll come in handy +/// let verifier = PKCEVerifier::new(); +/// +/// let request = AuthorizationRequest { +/// response_type: ResponseType::Code, +/// client_id: "https://kittybox.fireburn.ru/companion/native".parse().unwrap(), +/// redirect_uri: "https://kittybox.fireburn.ru/companion/native/redirect".parse().unwrap(), +/// state: State::new(), +/// code_challenge: PKCEChallenge::new(&verifier, PKCEMethod::default()), +/// scope: Some(Scopes::new(vec![Scope::Create, Scope::Update, Scope::Delete, Scope::Media])), +/// me: Some("https://fireburn.ru/".parse().unwrap()) +/// }; +/// +/// let mut url: url::Url = "https://fireburn.ru/.kittybox/indieauth/auth" +/// .parse() +/// .unwrap(); +/// +/// url.set_query(Some(&serde_urlencoded::to_string(request).unwrap())); +/// +/// // Open a user's browser to navigate to the authorization endpoint page... +/// ``` +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AuthorizationRequest { + /// The response type expected to this request. + pub response_type: ResponseType, + /// The homepage of the client. It must be fetched to show + /// metadata and check the redirect URI's authenticity. + pub client_id: Url, + /// The URI that the user will be redirected to in case they + /// approve the authentication request. A query string containing + /// the response is affixed to it. + pub redirect_uri: Url, + /// A random state to protect from CSRF attacks. The server should + /// return this string unmodified. + pub state: State, + /// A PKCE challenge neccesary to protect from authorization code + /// injection and CSRF attacks. + #[serde(flatten)] + pub code_challenge: PKCEChallenge, + /// An array of scopes that are requested for a token. If no + /// scopes are provided, a token will not be issued. + #[serde(skip_serializing_if = "Option::is_none")] + pub scope: Option, + /// The URL that user entered. The authorization endpoint MAY use + /// it as a hint of which user is attempting to sign in, and to + /// indicate which profile URL the client is expecting in the + /// resulting profile URL response or access token response. + #[serde(skip_serializing_if = "Option::is_none")] + pub me: Option +} + +/// The authorization response that must be appended to the +/// [`AuthorizationRequest::redirect_uri`]'s query string. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AuthorizationResponse { + /// The authorization code generated by the authorization + /// endpoint. The code MUST expire shortly after it is issued to + /// mitigate the risk of leaks, and MUST be valid for only one + /// use. A maximum lifetime of 10 minutes is recommended. See + /// [OAuth 2.0 Section 4.1.2][oauth2-sec-4.1.2] for additional + /// requirements on the authorization code. + /// + /// [oauth2-sec-4.1.2]: https://tools.ietf.org/html/rfc6749#section-4.1.2 + pub code: String, + /// The state parameter from the [AuthorizationRequest], + /// unmodified. + pub state: State, + /// The issuer identifier for client validation. + /// + /// Clients MUST verify this matches the [`Metadata::issuer`] + /// parameter provided by the Server [Metadata] endpoint during + /// Discovery as outlined in [OAuth 2.0 Authorization Server + /// Issuer Identification][oauth2-iss]. If the value does not + /// match the expected issuer identifier, clients MUST reject the + /// authorization response and MUST NOT proceed with the + /// authorization grant. For error responses, clients MUST NOT + /// assume that the error originates from the intended + /// authorization server. + /// + /// [oauth2-iss]: https://www.ietf.org/archive/id/draft-ietf-oauth-iss-auth-resp-02.html + pub iss: Url +} + +/// A grant request that continues the IndieAuth ceremony. +#[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] +#[serde(tag = "grant_type")] +#[serde(rename_all = "snake_case")] +pub enum GrantRequest { + /// Use an authorization code to receive identity verification + /// and/or an access token. + AuthorizationCode { + /// The code from [`AuthorizationResponse`]. + code: String, + /// Client ID that this grant belongs to. + client_id: Url, + /// Redirect URI that was used to receive the grant. + redirect_uri: Url, + /// The PKCE code verifier that was used to create the code + /// challenge. + code_verifier: PKCEVerifier + }, + /// Use a refresh token to get a fresh access token and a new + /// matching refresh token. + RefreshToken { + /// The refresh token that was issued before. + refresh_token: String, + /// The client ID to which the token belongs to. + client_id: url::Url, + /// A list of scopes, not exceeding the already-granted scope, + /// that can be passed to further restrict the scopes on the + /// new token. + /// + /// This cannot be used to gain new scopes -- you need to + /// start over if you need new scopes from the user. + scope: Option + } +} + +/// Token type, as described in [RFC6749][]. +/// +/// [RFC6749]: https://www.rfc-editor.org/rfc/rfc6749#section-7.1 +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum TokenType { + /// A Bearer token described in [RFC6750][]. As far as the author + /// of this library is concerned, this is the only type that + /// IndieAuth uses. + /// + /// [RFC6750]: https://www.rfc-editor.org/rfc/rfc6750 + Bearer +} + +/// The response to a successful [`GrantRequest`]. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +pub enum GrantResponse { + /// An access token response, containing an access token, a refresh + /// token (if the identity provider supports them) and the profile + /// (if access was granted to the profile data). + AccessToken { + /// The URL for the user this token corresponds to. + me: Url, + /// Token type. Required by OAuth2, not mentioned in + /// IndieAuth. Confirmed as erroneous. + token_type: TokenType, + /// Scopes. REQUIRED if different from what was + /// requested. Absence from IndieAuth spec confirmed as + /// erroneous. + scope: Option, + /// The user's profile information, if it was requested. + #[serde(skip_serializing_if = "Option::is_none")] + profile: Option, + /// The access token that can be used to access protected resources. + access_token: String, + /// The duration in which the access token expires, represented in seconds. + // TODO replace with std::time::Duration + #[serde(skip_serializing_if = "Option::is_none")] + expires_in: Option, + /// The refresh token, if it was issued. + #[serde(skip_serializing_if = "Option::is_none")] + refresh_token: Option + }, + /// A profile URL response, that only contains the profile URL and + /// the profile, if it was requested. + /// + /// This is suitable for confirming the identity of the user, but + /// no more than that. + ProfileUrl { + /// The authenticated user's URL. + me: Url, + /// The user's profile information, if it was requested. + #[serde(skip_serializing_if = "Option::is_none")] + profile: Option + } +} + +#[cfg(feature = "axum")] +impl axum_core::response::IntoResponse for GrantResponse { + fn into_response(self) -> axum_core::response::Response { + use http::StatusCode; + + (StatusCode::OK, + [("Content-Type", "application/json"), + ("Cache-Control", "no-store"), + ("Pragma", "no-cache") + ], + serde_json::to_vec(&self).unwrap()) + .into_response() + } +} + +/// Describes requests that the authorization endpoint might want to handle. +/// +/// This type mostly exists for ease-of-use with serde. +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(untagged)] +#[allow(missing_docs)] +pub enum RequestMaybeAuthorizationEndpoint { + Authorization(AuthorizationRequest), + Grant(GrantRequest) +} + +/// A token introspection request that can be handled by the token +/// introspection endpoint. +/// +/// Note that this request doesn't contain authentication data, which +/// is commonly transmitted out-of-band (e.g. via the `Authorization` +/// header). +#[derive(Debug, Serialize, Deserialize)] +pub struct TokenIntrospectionRequest { + /// The token for which data was requested. + pub token: String +} + +/// Data for a token that will be returned by the introspection +/// endpoint (and can also be used internally by the resource server +/// if it is part of a monolith with the identity provider). +#[derive(Debug, Serialize, Deserialize)] +pub struct TokenData { + /// The user this token corresponds to. + pub me: Url, + /// The client ID for the client that this token was issued to. + pub client_id: Url, + /// Scope that was granted to this token. + pub scope: Scopes, + /// The expiration date for this token, measured in seconds from + /// the Unix time epoch (1970-01-01 00:00:00). + // TODO replace these two with std::time::SystemTime + #[serde(skip_serializing_if = "Option::is_none")] + pub exp: Option, + /// The issue date, represented in the same format as the + /// [`exp`][TokenData::exp] field. + #[serde(skip_serializing_if = "Option::is_none")] + pub iat: Option +} + +impl TokenData { + /// Check if the token in question expired. + pub fn expired(&self) -> bool { + use std::time::{Duration, SystemTime, UNIX_EPOCH}; + + self.exp + .map(|exp| SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or(Duration::ZERO) + .as_secs() >= exp) + .unwrap_or_default() + } + + /// Return a timestamp at which the token is not considered valid anymore. + pub fn expires_at(&self) -> Option { + self.exp.map(|time| { + std::time::UNIX_EPOCH + std::time::Duration::from_secs(time) + }) + } + /// Return a timestamp describing when the token was issued. + pub fn issued_at(&self) -> Option { + self.iat.map(|time| { + std::time::UNIX_EPOCH + std::time::Duration::from_secs(time) + }) + } + + /// Check if a certain scope is allowed for this token. + pub fn check_scope(&self, scope: &Scope) -> bool { + self.scope.has(scope) + } +} + +// I don't like this type, because it could've been represented +// internally by Option. But the IndieAuth standard +// requires the "active" field to be present. I can't do anything +// about it. +/// The introspection response that the introspection endpoint must +/// return. +/// +/// It is recommended to use the [`From`][`std::convert::From`] trait +/// to convert from `Option` for ergonomics. +#[derive(Debug, Serialize, Deserialize)] +pub struct TokenIntrospectionResponse { + active: bool, + #[serde(flatten)] + #[serde(skip_serializing_if = "Option::is_none")] + data: Option +} +// These wrappers and impls should take care of making use of this +// type as painless as possible. +impl TokenIntrospectionResponse { + /// Indicate that this token is not valid. + pub fn inactive() -> Self { + Self { active: false, data: None } + } + /// Indicate that this token is valid, and provide data about it. + pub fn active(data: TokenData) -> Self { + Self { active: true, data: Some(data) } + } + /// Check if the endpoint reports this token as valid. + pub fn is_active(&self) -> bool { + self.active + } + + /// Get data contained in the response, if the token is valid. + pub fn data(&self) -> Option<&TokenData> { + if !self.active { + return None + } + self.data.as_ref() + } +} +impl Default for TokenIntrospectionResponse { + fn default() -> Self { + Self::inactive() + } +} +impl From> for TokenIntrospectionResponse { + fn from(data: Option) -> Self { + Self { active: data.is_some(), data } + } +} +impl From for Option { + fn from(response: TokenIntrospectionResponse) -> Option { + response.data + } +} + +#[cfg(feature = "axum")] +impl axum_core::response::IntoResponse for TokenIntrospectionResponse { + fn into_response(self) -> axum_core::response::Response { + use http::StatusCode; + + (StatusCode::OK, + [("Content-Type", "application/json")], + serde_json::to_vec(&self).unwrap()) + .into_response() + } +} + +/// A request for revoking a token. There is no response beyond `HTTP +/// 200 OK`. +#[derive(Debug, Serialize, Deserialize)] +pub struct TokenRevocationRequest { + /// The token that needs to be revoked in case it is valid. + pub token: String +} + +/// Types of errors that a resource server (IndieAuth consumer) can +/// throw when authentication goes wrong. +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ResourceErrorKind { + /// The provided token was invalid. + InvalidToken, + /// The scope on the token was insufficient to perform the + /// requested operation. + InsufficientScope, +} + +/// Various kinds of errors that could occur when performing the +/// IndieAuth ceremony. +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum ErrorKind { + /// The request is missing a required parameter, includes an + /// unsupported parameter value (other than grant type), repeats a + /// parameter, includes multiple credentials, utilizes more than + /// one mechanism for authenticating the client, or is otherwise + /// malformed. + InvalidRequest, + /// Client authentication failed (e.g., unknown client, no client + /// authentication included, or unsupported authentication + /// method). The authorization server MAY return an HTTP 401 + /// (Unauthorized) status code to indicate which HTTP + /// authentication schemes are supported. If the client attempted + /// to authenticate via the "Authorization" request header field, + /// the authorization server MUST respond with an HTTP 401 + /// (Unauthorized) status code and include the "WWW-Authenticate" + /// response header field matching the authentication scheme used + /// by the client. + InvalidClient, + /// The provided authorization grant (e.g., authorization + /// code, resource owner credentials) or refresh token is + /// invalid, expired, revoked, does not match the redirection + /// URI used in the authorization request, or was issued to + /// another client. + InvalidGrant, + /// The authenticated client is not authorized to use this + /// authorization grant type. + UnauthorizedClient, + /// The authorization grant type is not supported by the + /// authorization server. + UnsupportedGrantType, + /// The requested scope is invalid, unknown, malformed, or + /// exceeds the scope granted by the resource owner. + InvalidScope +} +// TODO consider relying on serde_variant for these conversions +impl AsRef for ErrorKind { + fn as_ref(&self) -> &str { + match self { + ErrorKind::InvalidRequest => "invalid_request", + ErrorKind::InvalidClient => "invalid_client", + ErrorKind::InvalidGrant => "invalid_grant", + ErrorKind::UnauthorizedClient => "unauthorized_client", + ErrorKind::UnsupportedGrantType => "unsupported_grant_type", + ErrorKind::InvalidScope => "invalid_scope", + } + } +} +impl std::fmt::Display for ErrorKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_ref()) + } +} + +/// An error that can be returned when performing the IndieAuth ceremony. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Error { + /// Type of an error. + #[serde(rename = "error")] + pub kind: ErrorKind, + /// Human-friendly description of an error, suitable for a + /// developer to read while debugging. + #[serde(rename = "error_description")] + pub msg: Option, + /// An URL to documentation describing what went wrong and how to + /// fix it. + pub error_uri: Option +} + +impl From for Error { + fn from(kind: ErrorKind) -> Error { + Error { + kind, msg: None, error_uri: None + } + } +} + +impl std::error::Error for self::Error {} + +impl std::fmt::Display for self::Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "IndieAuth error ({})", self.kind)?; + if let Some(msg) = self.msg.as_deref() { + write!(f, ": {}", msg)?; + } + if let Some(error_uri) = &self.error_uri { + write!(f, " (see `{}` for more info)", error_uri)?; + } + + Ok(()) + } +} + +#[cfg(feature = "axum")] +impl axum_core::response::IntoResponse for self::Error { + fn into_response(self) -> axum_core::response::Response { + use http::StatusCode; + + (StatusCode::BAD_REQUEST, + [("Content-Type", "application/json")], + serde_json::to_vec(&self).unwrap()) + .into_response() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_serialize_deserialize_grant_request() { + let authorization_code: GrantRequest = GrantRequest::AuthorizationCode { + client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), + redirect_uri: "https://kittybox.fireburn.ru/.kittybox/login/redirect".parse().unwrap(), + code_verifier: PKCEVerifier("helloworld".to_string()), + code: "hithere".to_owned() + }; + let serialized = serde_urlencoded::to_string(&[ + ("grant_type", "authorization_code"), + ("code", "hithere"), + ("client_id", "https://kittybox.fireburn.ru/"), + ("redirect_uri", "https://kittybox.fireburn.ru/.kittybox/login/redirect"), + ("code_verifier", "helloworld"), + ]).unwrap(); + + let deserialized = serde_urlencoded::from_str(&serialized).unwrap(); + + assert_eq!(authorization_code, deserialized); + + assert_eq!( + serialized, + serde_urlencoded::to_string(authorization_code).unwrap() + ) + } +} diff --git a/indieauth/src/pkce.rs b/indieauth/src/pkce.rs new file mode 100644 index 0000000..bf8d1a0 --- /dev/null +++ b/indieauth/src/pkce.rs @@ -0,0 +1,132 @@ +use serde::{Serialize, Deserialize}; +use rand::{Rng, distributions::Alphanumeric}; +use sha2::{Sha256, Digest}; +use data_encoding::BASE64URL; + +/// Methods to use for PKCE challenges. +#[derive(PartialEq, Eq, Copy, Clone, Debug, Serialize, Deserialize, /*Default*/)] +pub enum PKCEMethod { + /// Base64-encoded SHA256 hash of an ASCII string. + //#[default] + S256, + /// Plain string by itself. Please don't use this. + #[serde(rename = "snake_case")] + Plain +} +// manual impl until Rust 1.62 hits nixos-unstable +impl Default for PKCEMethod { + fn default() -> Self { PKCEMethod::S256 } +} +impl PKCEMethod { + /// Return a string representing a PKCE method as it would be serialized. + pub fn as_str(&self) -> &'static str { + match self { + PKCEMethod::S256 => "S256", + PKCEMethod::Plain => "plain" + } + } +} +/// A PKCE verifier string that should be kept in secret until the end +/// of the authentication ceremony, where it is revealed to prove that +/// the one who uses the grant is the same entity who it was given to. +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +pub struct PKCEVerifier(pub(super) String); + +impl AsRef for PKCEVerifier { + fn as_ref(&self) -> &str { + self.0.as_str() + } +} +impl ToString for PKCEVerifier { + fn to_string(&self) -> String { + self.0.clone() + } +} + +impl PKCEVerifier { + /// Generate a new PKCE verifier string of 128 bytes in length. + #[allow(clippy::new_without_default)] + pub fn new() -> Self { + let bytes = rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(128) + .collect::>(); + Self(String::from_utf8(bytes).unwrap()) + } +} + +/// A PKCE challenge as described in [RFC7636]. +/// +/// [RFC7636]: https://tools.ietf.org/html/rfc7636 +#[derive(Eq, PartialEq, Debug, Clone, Serialize, Deserialize)] +pub struct PKCEChallenge { + code_challenge: String, + #[serde(rename = "code_challenge_method")] + method: PKCEMethod +} + +impl PKCEChallenge { + /// Create a new challenge from a [PKCEVerifier] using a certain + /// [PKCEMethod]. + pub fn new(code_verifier: &PKCEVerifier, method: PKCEMethod) -> Self { + Self { + code_challenge: match method { + PKCEMethod::S256 => { + let mut hasher = Sha256::new(); + hasher.update(code_verifier.as_ref()); + let mut challenge = BASE64URL.encode(&hasher.finalize()); + challenge.retain(|c| c != '='); + + challenge + }, + PKCEMethod::Plain => code_verifier.to_string(), + }, + method + } + } + + /// Verify that the [PKCEVerifier] corresponds to this challenge, + /// by creating a second challenge string and comparing it against + /// this challenge data. + /// + /// ```rust + /// use kittybox_indieauth::{PKCEVerifier, PKCEMethod, PKCEChallenge}; + /// + /// let verifier = PKCEVerifier::new(); + /// let challenge = PKCEChallenge::new(&verifier, PKCEMethod::default()); + /// // Meanwhile, at the token endpoint, in the end of the ceremony... + /// // ...the challenge gets retrieved from the stored data and verified + /// assert!(challenge.verify(verifier)) + /// ``` + #[must_use] + pub fn verify(&self, code_verifier: PKCEVerifier) -> bool { + Self::new(&code_verifier, self.method) == *self + } + + /// Return a reference to the code challenge string. + pub fn as_str(&self) -> &str { + self.code_challenge.as_str() + } + + /// Return the method used to create this challenge. + pub fn method(&self) -> PKCEMethod { + self.method + } +} + +#[cfg(test)] +mod tests { + use super::{PKCEMethod, PKCEVerifier, PKCEChallenge}; + + #[test] + /// A snapshot test generated using [Aaron Parecki's PKCE + /// tools](https://example-app.com/pkce) that checks for a + /// conforming challenge. + fn test_pkce_challenge_verification() { + let verifier = PKCEVerifier("ec03310e4e90f7bc988af05384060c3c1afeae4bb4d0f648c5c06b63".to_owned()); + + let challenge = PKCEChallenge::new(&verifier, PKCEMethod::S256); + + assert_eq!(challenge.as_str(), "aB8OG20Rh8UoQ9gFhI0YvPkx4dDW2MBspBKGXL6j6Wg"); + } +} diff --git a/indieauth/src/scopes.rs b/indieauth/src/scopes.rs new file mode 100644 index 0000000..d74878e --- /dev/null +++ b/indieauth/src/scopes.rs @@ -0,0 +1,208 @@ +use std::str::FromStr; + +use serde::{ + Serialize, Serializer, + Deserialize, + de::{ + Deserializer, Visitor, + Error as DeserializeError + } +}; + +/// Various scopes that can be requested through IndieAuth. +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum Scope { + /// Allows to create posts using Micropub. + Create, + /// Allows to edit posts using Micropub. + Update, + /// Allows to delete posts using Micropub. + Delete, + /// Allows to upload blobs to the media endpoint. + Media, + /// Allows to read feeds via Microsub. + Read, + /// Allows to manage follows via Microsub. + Follow, + /// Allows to mute and unmute users in feeds via Microsub. + Mute, + /// Allows to block and unblock users. + Block, + /// Allows to create and manage feeds via Microsub. + Channels, + /// Allows to request profile information (except email, see Email) + Profile, + /// Allows to receive email in the profile information. + Email, + /// Custom scope not included above. + Custom(String) +} +impl Scope { + /// Create a custom scope from a string slice. + pub fn custom(scope: &str) -> Scope { + Scope::Custom(scope.to_string()) + } +} + +// TODO consider relying on serde_variant for these conversions +impl AsRef for Scope { + fn as_ref(&self) -> &str { + use Scope::*; + match self { + Create => "create", + Update => "update", + Delete => "delete", + Media => "media", + Read => "read", + Follow => "follow", + Mute => "mute", + Block => "block", + Channels => "channels", + Profile => "profile", + Email => "email", + Custom(s) => s.as_ref() + } + } +} +impl From<&str> for Scope { + fn from(scope: &str) -> Self { + match scope { + "create" => Scope::Create, + "update" => Scope::Update, + "delete" => Scope::Delete, + "media" => Scope::Media, + "read" => Scope::Read, + "follow" => Scope::Follow, + "mute" => Scope::Mute, + "block" => Scope::Block, + "channels" => Scope::Channels, + "profile" => Scope::Profile, + "email" => Scope::Email, + other => Scope::custom(other) + } + } +} +impl FromStr for Scope { + type Err = std::convert::Infallible; + fn from_str(s: &str) -> Result { + Ok(s.into()) + } +} + +/// A list of scopes that serializes to a space-separated string instead of a list. +/// +/// OAuth2 is weird, don't ask me why it's a thing. +#[derive(PartialEq, Eq, Debug, Clone)] +pub struct Scopes(Vec); +impl Scopes { + /// Create a list of scopes from a vector of scopes. + pub fn new(scopes: Vec) -> Self { + Self(scopes) + } + /// Ensure a certain scope is listed in the scope list. + pub fn has(&self, scope: &Scope) -> bool { + self.0.iter().any(|s| s == scope) + } + /// Ensure all of the requested scopes are in the list. + pub fn has_all(&self, scopes: &[Scope]) -> bool { + scopes.iter() + .map(|s1| self.iter().any(|s2| s1 == s2)) + .all(|s| s) + } + /// Transform this into an iterator over individual scopes. + pub fn iter(&self) -> std::slice::Iter<'_, Scope> { + self.0.iter() + } +} +impl AsRef<[Scope]> for Scopes { + fn as_ref(&self) -> &[Scope] { + self.0.as_ref() + } +} +impl ToString for Scopes { + fn to_string(&self) -> String { + self.0.iter() + .map(|s| s.as_ref()) + .fold(String::new(), |a, s| if a.is_empty() { + s.to_string() + } else { + a + " " + s + }) + } +} +impl FromStr for Scopes { + type Err = std::convert::Infallible; + + fn from_str(value: &str) -> Result { + Ok(Self(value.split_ascii_whitespace() + .map(Scope::from) + .collect::>())) + } +} +impl Serialize for Scopes { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer + { + serializer.serialize_str(&self.to_string()) + } +} +struct ScopeVisitor; +impl<'de> Visitor<'de> for ScopeVisitor { + type Value = Scopes; + + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + formatter.write_str("a string of space-separated OAuth2 scopes") + } + + fn visit_str(self, value: &str) -> Result + where + E: DeserializeError + { + Ok(Scopes::from_str(value).unwrap()) + } +} +impl<'de> Deserialize<'de> for Scopes { + + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de> + { + deserializer.deserialize_str(ScopeVisitor) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_serde_vec_scope() { + let scopes = vec![ + Scope::Create, Scope::Update, Scope::Delete, + Scope::Media, + Scope::custom("kittybox_internal_access") + ]; + + let scope_serialized = serde_json::to_value( + Scopes::new(scopes.clone()) + ).unwrap(); + let scope_str = scope_serialized.as_str().unwrap(); + assert_eq!(scope_str, "create update delete media kittybox_internal_access"); + + assert!(serde_json::from_value::(scope_serialized).unwrap().has_all(&scopes)) + } + + #[test] + fn test_scope_has_all() { + let scopes = Scopes(vec![ + Scope::Create, Scope::Update, Scope::custom("draft") + ]); + + assert!(scopes.has_all(&[Scope::Create, Scope::custom("draft")])); + + assert!(!scopes.has_all(&[Scope::Read, Scope::custom("kittybox_internal_access")])); + } + +} diff --git a/kittybox-rs/Cargo.lock b/kittybox-rs/Cargo.lock deleted file mode 100644 index 39323f4..0000000 --- a/kittybox-rs/Cargo.lock +++ /dev/null @@ -1,4589 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "addr2line" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" - -[[package]] -name = "adler32" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" - -[[package]] -name = "ahash" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" -dependencies = [ - "cfg-if", - "getrandom 0.2.10", - "once_cell", - "version_check", -] - -[[package]] -name = "aho-corasick" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" -dependencies = [ - "memchr", -] - -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - -[[package]] -name = "allocator-api2" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56fc6cf8dc8c4158eed8649f9b8b0ea1518eb62b544fe9490d66fa0b349eafe9" - -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anstream" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is-terminal", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" - -[[package]] -name = "anstyle-parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" -dependencies = [ - "windows-sys 0.48.0", -] - -[[package]] -name = "anstyle-wincon" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" -dependencies = [ - "anstyle", - "windows-sys 0.48.0", -] - -[[package]] -name = "anyhow" -version = "1.0.71" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" - -[[package]] -name = "argon2" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95c2fcf79ad1932ac6269a738109997a83c227c09b75842ae564dc8ede6a861c" -dependencies = [ - "base64ct", - "blake2", - "password-hash", -] - -[[package]] -name = "asn1-rs" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ff05a702273012438132f449575dbc804e27b2f3cbe3069aa237d26c98fa33" -dependencies = [ - "asn1-rs-derive", - "asn1-rs-impl", - "displaydoc", - "nom", - "num-traits", - "rusticata-macros", - "thiserror", - "time 0.3.23", -] - -[[package]] -name = "asn1-rs-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db8b7511298d5b7784b40b092d9e9dcd3a627a5707e4b5e507931ab0d44eeebf" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 1.0.109", - "synstructure", -] - -[[package]] -name = "asn1-rs-impl" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 1.0.109", -] - -[[package]] -name = "assert-json-diff" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" -dependencies = [ - "serde", - "serde_json", -] - -[[package]] -name = "async-channel" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" -dependencies = [ - "concurrent-queue", - "event-listener", - "futures-core", -] - -[[package]] -name = "async-compression" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0122885821398cc923ece939e24d1056a2384ee719432397fa9db87230ff11" -dependencies = [ - "brotli", - "flate2", - "futures-core", - "memchr", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "async-trait" -version = "0.1.71" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "atoi" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" -dependencies = [ - "num-traits", -] - -[[package]] -name = "autocfg" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" -dependencies = [ - "autocfg 1.1.0", -] - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "axum" -version = "0.6.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8175979259124331c1d7bf6586ee7e0da434155e4b2d48ec2c8386281d8df39" -dependencies = [ - "async-trait", - "axum-core", - "axum-macros", - "bitflags 1.3.2", - "bytes", - "futures-util", - "headers", - "http", - "http-body", - "hyper", - "itoa 1.0.8", - "matchit", - "memchr", - "mime", - "multer", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "serde_json", - "serde_path_to_error", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http", - "http-body", - "mime", - "rustversion", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-extra" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "febf23ab04509bd7672e6abe76bd8277af31b679e89fa5ffc6087dc289a448a3" -dependencies = [ - "axum", - "axum-core", - "bytes", - "cookie", - "futures-util", - "http", - "http-body", - "mime", - "pin-project-lite", - "serde", - "tokio", - "tower", - "tower-http 0.4.1", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-macros" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bb524613be645939e280b7279f7b017f98cf7f5ef084ec374df373530e73277" -dependencies = [ - "heck", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "backtrace" -version = "0.3.68" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" -dependencies = [ - "addr2line", - "cc", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", -] - -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - -[[package]] -name = "base64" -version = "0.21.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" - -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - -[[package]] -name = "base64urlsafedata" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18b3d30abb74120a9d5267463b9e0045fdccc4dd152e7249d966612dc1721384" -dependencies = [ - "base64 0.21.2", - "serde", - "serde_json", -] - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" -dependencies = [ - "serde", -] - -[[package]] -name = "blake2" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" -dependencies = [ - "digest", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "brotli" -version = "3.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "2.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - -[[package]] -name = "bumpalo" -version = "3.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" - -[[package]] -name = "byteorder" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" - -[[package]] -name = "bytes" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" - -[[package]] -name = "cc" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chrono" -version = "0.4.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" -dependencies = [ - "android-tzdata", - "iana-time-zone", - "js-sys", - "num-traits", - "serde", - "time 0.1.45", - "wasm-bindgen", - "winapi", -] - -[[package]] -name = "clap" -version = "4.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1640e5cc7fb47dbb8338fd471b105e7ed6c3cb2aeb00c2e067127ffd3764a05d" -dependencies = [ - "clap_builder", - "clap_derive", - "once_cell", -] - -[[package]] -name = "clap_builder" -version = "4.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98c59138d527eeaf9b53f35a77fcc1fad9d883116070c63d5de1c7dc7b00c72b" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8cd2b2a819ad6eec39e8f1d6b53001af1e5469f8c177579cdaeb313115b825f" -dependencies = [ - "heck", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "clap_lex" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" - -[[package]] -name = "cloudabi" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "colorchoice" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" - -[[package]] -name = "combine" -version = "4.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" -dependencies = [ - "bytes", - "futures-core", - "memchr", - "pin-project-lite", - "tokio", - "tokio-util", -] - -[[package]] -name = "compact_jwt" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51f9032b96a89dd79ffc5f62523d5351ebb40680cbdfc4029393b511b9e971aa" -dependencies = [ - "base64 0.13.1", - "base64urlsafedata", - "hex", - "openssl", - "serde", - "serde_json", - "tracing", - "url", - "uuid 1.4.0", -] - -[[package]] -name = "concurrent-queue" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62ec6771ecfa0762d24683ee5a32ad78487a3d3afdc0fb8cae19d2c5deb50b7c" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "const-oid" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6340df57935414636969091153f35f68d9f00bbc8fb4a9c6054706c213e6c6bc" - -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" - -[[package]] -name = "cookie" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7efb37c3e1ccb1ff97164ad95ac1606e8ccd35b3fa0a7d99a304c7f4a428cc24" -dependencies = [ - "percent-encoding", - "time 0.3.23", - "version_check", -] - -[[package]] -name = "core-foundation" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" - -[[package]] -name = "core2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" -dependencies = [ - "memchr", -] - -[[package]] -name = "cpufeatures" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" - -[[package]] -name = "crc32fast" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crossbeam-queue" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "cssparser" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "754b69d351cdc2d8ee09ae203db831e005560fc6030da058f86ad60c92a9cb0a" -dependencies = [ - "cssparser-macros", - "dtoa-short", - "itoa 0.4.8", - "matches", - "phf 0.8.0", - "proc-macro2 1.0.64", - "quote 1.0.29", - "smallvec", - "syn 1.0.109", -] - -[[package]] -name = "cssparser-macros" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" -dependencies = [ - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "dary_heap" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7762d17f1241643615821a8455a0b2c3e803784b058693d990b11f2dce25a0ca" - -[[package]] -name = "data-encoding" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" - -[[package]] -name = "deadpool" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "421fe0f90f2ab22016f32a9881be5134fdd71c65298917084b0c7477cbc3856e" -dependencies = [ - "async-trait", - "deadpool-runtime", - "num_cpus", - "retain_mut", - "tokio", -] - -[[package]] -name = "deadpool-runtime" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" - -[[package]] -name = "der" -version = "0.7.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ed52955ce76b1554f509074bb357d3fb8ac9b51288a65a3fd480d1dfba946" -dependencies = [ - "const-oid", - "pem-rfc7468", - "zeroize", -] - -[[package]] -name = "der-parser" -version = "7.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe398ac75057914d7d07307bf67dc7f3f574a26783b4fc7805a20ffa9f506e82" -dependencies = [ - "asn1-rs", - "displaydoc", - "nom", - "num-bigint", - "num-traits", - "rusticata-macros", -] - -[[package]] -name = "derive_more" -version = "0.99.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" -dependencies = [ - "convert_case", - "proc-macro2 1.0.64", - "quote 1.0.29", - "rustc_version", - "syn 1.0.109", -] - -[[package]] -name = "deunicode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c1bba4f227a4a53d12b653f50ca7bf10c9119ae2aba56aff9e0338b5c98f36a" - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "displaydoc" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - -[[package]] -name = "dtoa" -version = "1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "519b83cd10f5f6e969625a409f735182bea5558cd8b64c655806ceaae36f1999" - -[[package]] -name = "dtoa-short" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbaceec3c6e4211c79e7b1800fb9680527106beb2f9c51904a3210c03a448c74" -dependencies = [ - "dtoa", -] - -[[package]] -name = "easy-scraper" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18a857bc01b5ae04874234f6b5d16b8b8fa86910aa5777479c2669b5df607fce" -dependencies = [ - "html5ever 0.25.2", - "kuchiki", - "regex", -] - -[[package]] -name = "either" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" -dependencies = [ - "serde", -] - -[[package]] -name = "ellipse" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1835a82a08e5c9393639e7cf99786a65af71f7fa9df7c91a519f2d52e6fa052d" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "encoding_rs" -version = "0.8.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "equivalent" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" - -[[package]] -name = "errno" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "etcetera" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" -dependencies = [ - "cfg-if", - "home", - "windows-sys 0.48.0", -] - -[[package]] -name = "event-listener" -version = "2.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" - -[[package]] -name = "faker_rand" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "300d2ddbf2245b5b5e723995e0961033121b4fc2be9045fb661af82bd739ffb6" -dependencies = [ - "deunicode", - "lazy_static", - "rand 0.8.5", -] - -[[package]] -name = "fastrand" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] - -[[package]] -name = "flate2" -version = "1.0.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "flume" -version = "0.10.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577" -dependencies = [ - "futures-core", - "futures-sink", - "pin-project", - "spin 0.9.8", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fuchsia-cprng" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" - -[[package]] -name = "futf" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" -dependencies = [ - "mac", - "new_debug_unreachable", -] - -[[package]] -name = "futures" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" - -[[package]] -name = "futures-executor" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-intrusive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" -dependencies = [ - "futures-core", - "lock_api", - "parking_lot", -] - -[[package]] -name = "futures-io" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" - -[[package]] -name = "futures-lite" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "memchr", - "parking", - "pin-project-lite", - "waker-fn", -] - -[[package]] -name = "futures-macro" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "futures-sink" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" - -[[package]] -name = "futures-task" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" - -[[package]] -name = "futures-timer" -version = "3.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" - -[[package]] -name = "futures-util" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "fxhash" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" -dependencies = [ - "byteorder", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", -] - -[[package]] -name = "getrandom" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.9.0+wasi-snapshot-preview1", -] - -[[package]] -name = "getrandom" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" -dependencies = [ - "cfg-if", - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", -] - -[[package]] -name = "gimli" -version = "0.27.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" - -[[package]] -name = "h2" -version = "0.3.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http", - "indexmap 1.9.3", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "half" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash", -] - -[[package]] -name = "hashbrown" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" -dependencies = [ - "ahash", - "allocator-api2", -] - -[[package]] -name = "hashlink" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" -dependencies = [ - "hashbrown 0.14.0", -] - -[[package]] -name = "headers" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" -dependencies = [ - "base64 0.13.1", - "bitflags 1.3.2", - "bytes", - "headers-core", - "http", - "httpdate", - "mime", - "sha1 0.10.5", -] - -[[package]] -name = "headers-core" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" -dependencies = [ - "http", -] - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "hermit-abi" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hkdf" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" -dependencies = [ - "hmac", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest", -] - -[[package]] -name = "home" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" -dependencies = [ - "windows-sys 0.48.0", -] - -[[package]] -name = "html5ever" -version = "0.22.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c213fa6a618dc1da552f54f85cba74b05d8e883c92ec4e89067736938084c26e" -dependencies = [ - "log", - "mac", - "markup5ever 0.7.5", - "proc-macro2 0.4.30", - "quote 0.6.13", - "syn 0.15.44", -] - -[[package]] -name = "html5ever" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5c13fb08e5d4dfc151ee5e88bae63f7773d61852f3bdc73c9f4b9e1bde03148" -dependencies = [ - "log", - "mac", - "markup5ever 0.10.1", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 1.0.109", -] - -[[package]] -name = "http" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" -dependencies = [ - "bytes", - "fnv", - "itoa 1.0.8", -] - -[[package]] -name = "http-body" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" -dependencies = [ - "bytes", - "http", - "pin-project-lite", -] - -[[package]] -name = "http-range-header" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" - -[[package]] -name = "http-types" -version = "2.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad" -dependencies = [ - "anyhow", - "async-channel", - "base64 0.13.1", - "futures-lite", - "http", - "infer", - "pin-project-lite", - "rand 0.7.3", - "serde", - "serde_json", - "serde_qs", - "serde_urlencoded", - "url", -] - -[[package]] -name = "httparse" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" - -[[package]] -name = "httpdate" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" - -[[package]] -name = "hyper" -version = "0.14.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "httparse", - "httpdate", - "itoa 1.0.8", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" -dependencies = [ - "futures-util", - "http", - "hyper", - "rustls", - "tokio", - "tokio-rustls", -] - -[[package]] -name = "hyper-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" -dependencies = [ - "bytes", - "hyper", - "native-tls", - "tokio", - "tokio-native-tls", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "wasm-bindgen", - "windows", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "idna" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "include_dir" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" -dependencies = [ - "include_dir_macros", -] - -[[package]] -name = "include_dir_macros" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg 1.1.0", - "hashbrown 0.12.3", -] - -[[package]] -name = "indexmap" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" -dependencies = [ - "equivalent", - "hashbrown 0.14.0", -] - -[[package]] -name = "infer" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" - -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "ipnet" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" - -[[package]] -name = "is-terminal" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi", - "rustix 0.38.3", - "windows-sys 0.48.0", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - -[[package]] -name = "itoa" -version = "1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b02a5381cc465bd3041d84623d0fa3b66738b52b8e2fc3bab8ad63ab032f4a" - -[[package]] -name = "js-sys" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "kittybox" -version = "0.1.0" -dependencies = [ - "anyhow", - "argon2", - "async-trait", - "axum", - "axum-extra", - "bytes", - "chrono", - "clap", - "data-encoding", - "easy-scraper", - "either", - "faker_rand", - "futures", - "futures-util", - "hex", - "hyper", - "kittybox-frontend-renderer", - "kittybox-indieauth", - "kittybox-util", - "lazy_static", - "listenfd", - "markdown", - "microformats", - "newbase60", - "prometheus", - "rand 0.8.5", - "redis", - "relative-path", - "reqwest", - "serde", - "serde_json", - "serde_urlencoded", - "serde_variant", - "sha2", - "sqlx", - "tempfile", - "thiserror", - "tokio", - "tokio-stream", - "tokio-util", - "tower", - "tower-http 0.3.5", - "tracing", - "tracing-log", - "tracing-subscriber", - "tracing-test", - "tracing-tree", - "url", - "uuid 1.4.0", - "webauthn-rs", - "wiremock", -] - -[[package]] -name = "kittybox-frontend-renderer" -version = "0.1.0" -dependencies = [ - "axum", - "chrono", - "ellipse", - "faker_rand", - "http", - "include_dir", - "kittybox-indieauth", - "kittybox-util", - "libflate", - "markup", - "microformats", - "rand 0.8.5", - "serde_json", - "walkdir", -] - -[[package]] -name = "kittybox-indieauth" -version = "0.1.0" -dependencies = [ - "axum-core", - "data-encoding", - "http", - "rand 0.8.5", - "serde", - "serde_json", - "serde_urlencoded", - "sha2", - "url", -] - -[[package]] -name = "kittybox-util" -version = "0.1.0" -dependencies = [ - "async-trait", - "axum-core", - "futures-util", - "http", - "rand 0.8.5", - "serde", - "serde_json", - "sqlx", - "tokio", - "uuid 1.4.0", -] - -[[package]] -name = "kuchiki" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ea8e9c6e031377cff82ee3001dc8026cdf431ed4e2e6b51f98ab8c73484a358" -dependencies = [ - "cssparser", - "html5ever 0.25.2", - "matches", - "selectors", -] - -[[package]] -name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" -dependencies = [ - "spin 0.5.2", -] - -[[package]] -name = "libc" -version = "0.2.147" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" - -[[package]] -name = "libflate" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7d5654ae1795afc7ff76f4365c2c8791b0feb18e8996a96adad8ffd7c3b2bf" -dependencies = [ - "adler32", - "core2", - "crc32fast", - "dary_heap", - "libflate_lz77", -] - -[[package]] -name = "libflate_lz77" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be5f52fb8c451576ec6b79d3f4deb327398bc05bbdbd99021a6e77a4c855d524" -dependencies = [ - "core2", - "hashbrown 0.13.2", - "rle-decode-fast", -] - -[[package]] -name = "libm" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" - -[[package]] -name = "libsqlite3-sys" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "linux-raw-sys" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" - -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - -[[package]] -name = "linux-raw-sys" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" - -[[package]] -name = "listenfd" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02b14f35d9f5f082fd0b1b34aa0ef32e3354c859c721d7f3325b3f79a42ba54" -dependencies = [ - "libc", - "uuid 0.8.2", - "winapi", -] - -[[package]] -name = "lock_api" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" -dependencies = [ - "autocfg 1.1.0", - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" - -[[package]] -name = "mac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" - -[[package]] -name = "markdown" -version = "1.0.0-alpha.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1bd98c3b68451b0390a289c58c856adb4e2b50cc40507ce2a105d5b00eafc80" -dependencies = [ - "unicode-id", -] - -[[package]] -name = "markup" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9196a235d499738d04f6a2466ce2610bf6b84730610efea8bee1b90d028b0d" -dependencies = [ - "itoa 1.0.8", - "markup-proc-macro", -] - -[[package]] -name = "markup-proc-macro" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a927f0e237dcbdd8c1a8ab03c4e1e8b1999804c448ebf06ff3b5512506c8150" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 1.0.109", -] - -[[package]] -name = "markup5ever" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897636f9850c3eef4905a5540683ed53dc9393860f0846cab2c2ddf9939862ff" -dependencies = [ - "phf 0.7.24", - "phf_codegen 0.7.24", - "serde", - "serde_derive", - "serde_json", - "string_cache 0.7.5", - "string_cache_codegen 0.4.4", - "tendril", -] - -[[package]] -name = "markup5ever" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd" -dependencies = [ - "log", - "phf 0.8.0", - "phf_codegen 0.8.0", - "string_cache 0.8.7", - "string_cache_codegen 0.5.2", - "tendril", -] - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "matches" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" - -[[package]] -name = "matchit" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" - -[[package]] -name = "md-5" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" -dependencies = [ - "digest", -] - -[[package]] -name = "memchr" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" - -[[package]] -name = "microformats" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18e7fccd15cba21880e824d71b8354e9e67561c9cd2bf3ec09b21dba26392ecb" -dependencies = [ - "chrono", - "html5ever 0.22.5", - "lazy_static", - "log", - "regex", - "serde", - "serde_json", - "thiserror", - "url", -] - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" -dependencies = [ - "adler", -] - -[[package]] -name = "mio" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" -dependencies = [ - "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.48.0", -] - -[[package]] -name = "multer" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" -dependencies = [ - "bytes", - "encoding_rs", - "futures-util", - "http", - "httparse", - "log", - "memchr", - "mime", - "spin 0.9.8", - "version_check", -] - -[[package]] -name = "native-tls" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" -dependencies = [ - "lazy_static", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - -[[package]] -name = "new_debug_unreachable" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" - -[[package]] -name = "newbase60" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f6fe19de628588cbd968ba26b247819c7b7a000b3b4b5bcbf69ea606a33b5ba" - -[[package]] -name = "nodrop" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "nu-ansi-term" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" -dependencies = [ - "overload", - "winapi", -] - -[[package]] -name = "num-bigint" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" -dependencies = [ - "autocfg 1.1.0", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-bigint-dig" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" -dependencies = [ - "byteorder", - "lazy_static", - "libm", - "num-integer", - "num-iter", - "num-traits", - "rand 0.8.5", - "smallvec", - "zeroize", -] - -[[package]] -name = "num-integer" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" -dependencies = [ - "autocfg 1.1.0", - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" -dependencies = [ - "autocfg 1.1.0", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" -dependencies = [ - "autocfg 1.1.0", - "libm", -] - -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "object" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" -dependencies = [ - "memchr", -] - -[[package]] -name = "oid-registry" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38e20717fa0541f39bd146692035c37bedfa532b3e5071b35761082407546b2a" -dependencies = [ - "asn1-rs", -] - -[[package]] -name = "once_cell" -version = "1.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" - -[[package]] -name = "openssl" -version = "0.10.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "openssl-probe" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" - -[[package]] -name = "openssl-sys" -version = "0.9.90" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "parking" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-targets 0.48.1", -] - -[[package]] -name = "password-hash" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "346f04948ba92c43e8469c1ee6736c7563d71012b17d40745260fe106aac2166" -dependencies = [ - "base64ct", - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "paste" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4b27ab7be369122c218afc2079489cdcb4b517c0a3fc386ff11e1fedfcc2b35" - -[[package]] -name = "pem-rfc7468" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" -dependencies = [ - "base64ct", -] - -[[package]] -name = "percent-encoding" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" - -[[package]] -name = "phf" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3da44b85f8e8dfaec21adae67f95d93244b2ecf6ad2a692320598dcc8e6dd18" -dependencies = [ - "phf_shared 0.7.24", -] - -[[package]] -name = "phf" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" -dependencies = [ - "phf_macros", - "phf_shared 0.8.0", - "proc-macro-hack", -] - -[[package]] -name = "phf_codegen" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03e85129e324ad4166b06b2c7491ae27fe3ec353af72e72cd1654c7225d517e" -dependencies = [ - "phf_generator 0.7.24", - "phf_shared 0.7.24", -] - -[[package]] -name = "phf_codegen" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" -dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", -] - -[[package]] -name = "phf_generator" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09364cc93c159b8b06b1f4dd8a4398984503483891b0c26b867cf431fb132662" -dependencies = [ - "phf_shared 0.7.24", - "rand 0.6.5", -] - -[[package]] -name = "phf_generator" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" -dependencies = [ - "phf_shared 0.8.0", - "rand 0.7.3", -] - -[[package]] -name = "phf_generator" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" -dependencies = [ - "phf_shared 0.10.0", - "rand 0.8.5", -] - -[[package]] -name = "phf_macros" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c" -dependencies = [ - "phf_generator 0.8.0", - "phf_shared 0.8.0", - "proc-macro-hack", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 1.0.109", -] - -[[package]] -name = "phf_shared" -version = "0.7.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0" -dependencies = [ - "siphasher 0.2.3", -] - -[[package]] -name = "phf_shared" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" -dependencies = [ - "siphasher 0.3.10", -] - -[[package]] -name = "phf_shared" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" -dependencies = [ - "siphasher 0.3.10", -] - -[[package]] -name = "pin-project" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkcs1" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" -dependencies = [ - "der", - "pkcs8", - "spki", -] - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "pkg-config" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "precomputed-hash" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" - -[[package]] -name = "proc-macro-hack" -version = "0.5.20+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" - -[[package]] -name = "proc-macro2" -version = "0.4.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" -dependencies = [ - "unicode-xid 0.1.0", -] - -[[package]] -name = "proc-macro2" -version = "1.0.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78803b62cbf1f46fde80d7c0e803111524b9877184cfe7c3033659490ac7a7da" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "procfs" -version = "0.14.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1de8dacb0873f77e6aefc6d71e044761fcc68060290f5b1089fcdf84626bb69" -dependencies = [ - "bitflags 1.3.2", - "byteorder", - "hex", - "lazy_static", - "rustix 0.36.15", -] - -[[package]] -name = "prometheus" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "449811d15fbdf5ceb5c1144416066429cf82316e2ec8ce0c1f6f8a02e7bbcf8c" -dependencies = [ - "cfg-if", - "fnv", - "lazy_static", - "libc", - "memchr", - "parking_lot", - "procfs", - "protobuf", - "thiserror", -] - -[[package]] -name = "protobuf" -version = "2.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" - -[[package]] -name = "quote" -version = "0.6.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" -dependencies = [ - "proc-macro2 0.4.30", -] - -[[package]] -name = "quote" -version = "1.0.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" -dependencies = [ - "proc-macro2 1.0.64", -] - -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -dependencies = [ - "autocfg 0.1.8", - "libc", - "rand_chacha 0.1.1", - "rand_core 0.4.2", - "rand_hc 0.1.0", - "rand_isaac", - "rand_jitter", - "rand_os", - "rand_pcg 0.1.2", - "rand_xorshift", - "winapi", -] - -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc 0.2.0", - "rand_pcg 0.2.1", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.3.1", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" -dependencies = [ - "getrandom 0.1.16", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.10", -] - -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" -dependencies = [ - "libc", - "rand_core 0.4.2", - "winapi", -] - -[[package]] -name = "rand_os" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" -dependencies = [ - "cloudabi", - "fuchsia-cprng", - "libc", - "rand_core 0.4.2", - "rdrand", - "winapi", -] - -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.4.2", -] - -[[package]] -name = "rand_pcg" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" -dependencies = [ - "rand_core 0.5.1", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rdrand" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "redis" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "152f3863635cbb76b73bc247845781098302c6c9ad2060e1a9a7de56840346b6" -dependencies = [ - "async-trait", - "bytes", - "combine", - "futures-util", - "itoa 1.0.8", - "percent-encoding", - "pin-project-lite", - "ryu", - "sha1 0.6.1", - "tokio", - "tokio-util", - "url", -] - -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "regex" -version = "1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata 0.3.2", - "regex-syntax 0.7.3", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83d3daa6976cffb758ec878f108ba0e062a45b2d6ca3a2cca965338855476caf" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax 0.7.3", -] - -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] -name = "regex-syntax" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab07dc67230e4a4718e70fd5c20055a4334b121f1f9db8fe63ef39ce9b8c846" - -[[package]] -name = "relative-path" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bf2521270932c3c7bed1a59151222bd7643c79310f2916f01925e1e16255698" - -[[package]] -name = "reqwest" -version = "0.11.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" -dependencies = [ - "async-compression", - "base64 0.21.2", - "bytes", - "encoding_rs", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "hyper", - "hyper-rustls", - "hyper-tls", - "ipnet", - "js-sys", - "log", - "mime", - "native-tls", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls", - "rustls-pemfile", - "serde", - "serde_json", - "serde_urlencoded", - "tokio", - "tokio-native-tls", - "tokio-rustls", - "tokio-util", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "webpki-roots 0.22.6", - "winreg", -] - -[[package]] -name = "retain_mut" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" - -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted", - "web-sys", - "winapi", -] - -[[package]] -name = "rle-decode-fast" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" - -[[package]] -name = "rsa" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" -dependencies = [ - "byteorder", - "const-oid", - "digest", - "num-bigint-dig", - "num-integer", - "num-iter", - "num-traits", - "pkcs1", - "pkcs8", - "rand_core 0.6.4", - "signature", - "spki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustc-demangle" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" - -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] - -[[package]] -name = "rusticata-macros" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" -dependencies = [ - "nom", -] - -[[package]] -name = "rustix" -version = "0.36.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c37f1bd5ef1b5422177b7646cba67430579cfe2ace80f284fee876bca52ad941" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.1.4", - "windows-sys 0.45.0", -] - -[[package]] -name = "rustix" -version = "0.37.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac5ffa1efe7548069688cd7028f32591853cd7b5b756d41bcffd2353e4fc75b4" -dependencies = [ - "bitflags 2.3.3", - "errno", - "libc", - "linux-raw-sys 0.4.3", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustls" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b19faa85ecb5197342b54f987b142fb3e30d0c90da40f80ef4fa9a726e6676ed" -dependencies = [ - "log", - "ring", - "rustls-webpki 0.101.1", - "sct", -] - -[[package]] -name = "rustls-pemfile" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" -dependencies = [ - "base64 0.21.2", -] - -[[package]] -name = "rustls-webpki" -version = "0.100.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "rustls-webpki" -version = "0.101.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15f36a6828982f422756984e47912a7a51dcbc2a197aa791158f8ca61cd8204e" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc31bd9b61a32c31f9650d18add92aa83a49ba979c143eefd27fe7177b05bd5f" - -[[package]] -name = "ryu" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe232bdf6be8c8de797b22184ee71118d63780ea42ac85b61d1baa6d3b782ae9" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" -dependencies = [ - "windows-sys 0.48.0", -] - -[[package]] -name = "scopeguard" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" - -[[package]] -name = "sct" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "security-framework" -version = "2.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "selectors" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df320f1889ac4ba6bc0cdc9c9af7af4bd64bb927bccdf32d81140dc1f9be12fe" -dependencies = [ - "bitflags 1.3.2", - "cssparser", - "derive_more", - "fxhash", - "log", - "matches", - "phf 0.8.0", - "phf_codegen 0.8.0", - "precomputed-hash", - "servo_arc", - "smallvec", - "thin-slice", -] - -[[package]] -name = "semver" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" - -[[package]] -name = "serde" -version = "1.0.170" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56657f512baabca8f840542f9ca8152aecf182c473c26e46e58d6aab4f6e439" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_cbor_2" -version = "0.12.0-dev" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b46d75f449e01f1eddbe9b00f432d616fbbd899b809c837d0fbc380496a0dd55" -dependencies = [ - "half", - "serde", -] - -[[package]] -name = "serde_derive" -version = "1.0.170" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77d477848e6b23adba0db397777d5aad864555bc17fd9c89abb3b8009788b7b8" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "serde_json" -version = "1.0.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1e14e89be7aa4c4b78bdbdc9eb5bf8517829a600ae8eaa39a6e1d960b5185c" -dependencies = [ - "itoa 1.0.8", - "ryu", - "serde", -] - -[[package]] -name = "serde_path_to_error" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc4422959dd87a76cb117c191dcbffc20467f06c9100b76721dab370f24d3a" -dependencies = [ - "itoa 1.0.8", - "serde", -] - -[[package]] -name = "serde_qs" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" -dependencies = [ - "percent-encoding", - "serde", - "thiserror", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa 1.0.8", - "ryu", - "serde", -] - -[[package]] -name = "serde_variant" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47a8ec0b2fd0506290348d9699c0e3eb2e3e8c0498b5a9a6158b3bd4d6970076" -dependencies = [ - "serde", -] - -[[package]] -name = "servo_arc" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d98238b800e0d1576d8b6e3de32827c2d74bee68bb97748dcf5071fb53965432" -dependencies = [ - "nodrop", - "stable_deref_trait", -] - -[[package]] -name = "sha1" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770" -dependencies = [ - "sha1_smol", -] - -[[package]] -name = "sha1" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sha1_smol" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" - -[[package]] -name = "sha2" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest", -] - -[[package]] -name = "sharded-slab" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "signal-hook-registry" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" -dependencies = [ - "libc", -] - -[[package]] -name = "signature" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" -dependencies = [ - "digest", - "rand_core 0.6.4", -] - -[[package]] -name = "siphasher" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac" - -[[package]] -name = "siphasher" -version = "0.3.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" - -[[package]] -name = "slab" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" -dependencies = [ - "autocfg 1.1.0", -] - -[[package]] -name = "smallvec" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" - -[[package]] -name = "socket2" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spki" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "sqlformat" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" -dependencies = [ - "itertools", - "nom", - "unicode_categories", -] - -[[package]] -name = "sqlx" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ef53c86d2066e04f0ac6b1364f16d13d82388e2d07f11a5c71782345555761" -dependencies = [ - "sqlx-core", - "sqlx-macros", - "sqlx-mysql", - "sqlx-postgres", - "sqlx-sqlite", -] - -[[package]] -name = "sqlx-core" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a22fd81e9c1ad53c562edb869ff042b215d4eadefefc4784bacfbfd19835945" -dependencies = [ - "ahash", - "atoi", - "byteorder", - "bytes", - "chrono", - "crc", - "crossbeam-queue", - "dotenvy", - "either", - "event-listener", - "futures-channel", - "futures-core", - "futures-intrusive", - "futures-io", - "futures-util", - "hashlink", - "hex", - "indexmap 2.0.0", - "log", - "memchr", - "native-tls", - "once_cell", - "paste", - "percent-encoding", - "rustls", - "rustls-pemfile", - "serde", - "serde_json", - "sha2", - "smallvec", - "sqlformat", - "thiserror", - "tokio", - "tokio-stream", - "tracing", - "url", - "uuid 1.4.0", - "webpki-roots 0.23.1", -] - -[[package]] -name = "sqlx-macros" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00bb7c096a202b8164c175614cbfb79fe0e1e0a3d50e0374526183ef2974e4a2" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "sqlx-core", - "sqlx-macros-core", - "syn 1.0.109", -] - -[[package]] -name = "sqlx-macros-core" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d644623ab9699014e5b3cb61a040d16caa50fd477008f63f1399ae35498a58" -dependencies = [ - "dotenvy", - "either", - "heck", - "hex", - "once_cell", - "proc-macro2 1.0.64", - "quote 1.0.29", - "serde", - "serde_json", - "sha2", - "sqlx-core", - "sqlx-mysql", - "sqlx-postgres", - "sqlx-sqlite", - "syn 1.0.109", - "tempfile", - "tokio", - "url", -] - -[[package]] -name = "sqlx-mysql" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8264c59b28b6858796acfcedc660aa4c9075cc6e4ec8eb03cdca2a3e725726db" -dependencies = [ - "atoi", - "base64 0.21.2", - "bitflags 2.3.3", - "byteorder", - "bytes", - "chrono", - "crc", - "digest", - "dotenvy", - "either", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "generic-array", - "hex", - "hkdf", - "hmac", - "itoa 1.0.8", - "log", - "md-5", - "memchr", - "once_cell", - "percent-encoding", - "rand 0.8.5", - "rsa", - "serde", - "sha1 0.10.5", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror", - "tracing", - "uuid 1.4.0", - "whoami", -] - -[[package]] -name = "sqlx-postgres" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cab6147b81ca9213a7578f1b4c9d24c449a53953cd2222a7b5d7cd29a5c3139" -dependencies = [ - "atoi", - "base64 0.21.2", - "bitflags 2.3.3", - "byteorder", - "chrono", - "crc", - "dotenvy", - "etcetera", - "futures-channel", - "futures-core", - "futures-io", - "futures-util", - "hex", - "hkdf", - "hmac", - "home", - "itoa 1.0.8", - "log", - "md-5", - "memchr", - "once_cell", - "rand 0.8.5", - "serde", - "serde_json", - "sha1 0.10.5", - "sha2", - "smallvec", - "sqlx-core", - "stringprep", - "thiserror", - "tracing", - "uuid 1.4.0", - "whoami", -] - -[[package]] -name = "sqlx-sqlite" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fba60afa64718104b71eec6984f8779d4caffff3b30cde91a75843c7efc126" -dependencies = [ - "atoi", - "chrono", - "flume", - "futures-channel", - "futures-core", - "futures-executor", - "futures-intrusive", - "futures-util", - "libsqlite3-sys", - "log", - "percent-encoding", - "serde", - "sqlx-core", - "tracing", - "url", - "uuid 1.4.0", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - -[[package]] -name = "string_cache" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89c058a82f9fd69b1becf8c274f412281038877c553182f1d02eb027045a2d67" -dependencies = [ - "lazy_static", - "new_debug_unreachable", - "phf_shared 0.7.24", - "precomputed-hash", - "serde", - "string_cache_codegen 0.4.4", - "string_cache_shared", -] - -[[package]] -name = "string_cache" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" -dependencies = [ - "new_debug_unreachable", - "once_cell", - "parking_lot", - "phf_shared 0.10.0", - "precomputed-hash", - "serde", -] - -[[package]] -name = "string_cache_codegen" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f45ed1b65bf9a4bf2f7b7dc59212d1926e9eaf00fa998988e420fd124467c6" -dependencies = [ - "phf_generator 0.7.24", - "phf_shared 0.7.24", - "proc-macro2 1.0.64", - "quote 1.0.29", - "string_cache_shared", -] - -[[package]] -name = "string_cache_codegen" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" -dependencies = [ - "phf_generator 0.10.0", - "phf_shared 0.10.0", - "proc-macro2 1.0.64", - "quote 1.0.29", -] - -[[package]] -name = "string_cache_shared" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc" - -[[package]] -name = "stringprep" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ee348cb74b87454fff4b551cbf727025810a004f88aeacae7f85b87f4e9a1c1" -dependencies = [ - "unicode-bidi", - "unicode-normalization", -] - -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - -[[package]] -name = "subtle" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" - -[[package]] -name = "syn" -version = "0.15.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" -dependencies = [ - "proc-macro2 0.4.30", - "quote 0.6.13", - "unicode-xid 0.1.0", -] - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15e3fc8c0c74267e2df136e5e5fb656a464158aa57624053375eb9c8c6e25ae2" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "unicode-ident", -] - -[[package]] -name = "sync_wrapper" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" - -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 1.0.109", - "unicode-xid 0.2.4", -] - -[[package]] -name = "tempfile" -version = "3.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" -dependencies = [ - "autocfg 1.1.0", - "cfg-if", - "fastrand", - "redox_syscall", - "rustix 0.37.23", - "windows-sys 0.48.0", -] - -[[package]] -name = "tendril" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" -dependencies = [ - "futf", - "mac", - "utf-8", -] - -[[package]] -name = "thin-slice" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" - -[[package]] -name = "thiserror" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "thread_local" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" -dependencies = [ - "cfg-if", - "once_cell", -] - -[[package]] -name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" -dependencies = [ - "itoa 1.0.8", - "serde", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" - -[[package]] -name = "time-macros" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" -dependencies = [ - "time-core", -] - -[[package]] -name = "tinyvec" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.29.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" -dependencies = [ - "autocfg 1.1.0", - "backtrace", - "bytes", - "libc", - "mio", - "num_cpus", - "parking_lot", - "pin-project-lite", - "signal-hook-registry", - "socket2", - "tokio-macros", - "tracing", - "windows-sys 0.48.0", -] - -[[package]] -name = "tokio-macros" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" -dependencies = [ - "rustls", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", - "tracing", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "pin-project", - "pin-project-lite", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" -dependencies = [ - "bitflags 1.3.2", - "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-range-header", - "pin-project-lite", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8bd22a874a2d0b70452d5597b12c537331d49060824a95f49f108994f94aa4c" -dependencies = [ - "bitflags 2.3.3", - "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-range-header", - "pin-project-lite", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-layer" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" - -[[package]] -name = "tower-service" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" - -[[package]] -name = "tracing" -version = "0.1.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" -dependencies = [ - "cfg-if", - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", -] - -[[package]] -name = "tracing-core" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-log" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" -dependencies = [ - "lazy_static", - "log", - "tracing-core", -] - -[[package]] -name = "tracing-serde" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", - "tracing-serde", -] - -[[package]] -name = "tracing-test" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a2c0ff408fe918a94c428a3f2ad04e4afd5c95bbc08fcf868eff750c15728a4" -dependencies = [ - "lazy_static", - "tracing-core", - "tracing-subscriber", - "tracing-test-macro", -] - -[[package]] -name = "tracing-test-macro" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258bc1c4f8e2e73a977812ab339d503e6feeb92700f6d07a6de4d321522d5c08" -dependencies = [ - "lazy_static", - "quote 1.0.29", - "syn 1.0.109", -] - -[[package]] -name = "tracing-tree" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d6b63348fad3ae0439b8bebf8d38fb5bda0b115d7a8a7e6f165f12790c58c3" -dependencies = [ - "is-terminal", - "nu-ansi-term", - "tracing-core", - "tracing-log", - "tracing-subscriber", -] - -[[package]] -name = "try-lock" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" - -[[package]] -name = "typenum" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" - -[[package]] -name = "unicode-bidi" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" - -[[package]] -name = "unicode-id" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d70b6494226b36008c8366c288d77190b3fad2eb4c10533139c1c1f461127f1a" - -[[package]] -name = "unicode-ident" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22049a19f4a68748a168c0fc439f9516686aa045927ff767eca0a85101fb6e73" - -[[package]] -name = "unicode-normalization" -version = "0.1.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "unicode-segmentation" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" - -[[package]] -name = "unicode-xid" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "url" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf8parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" - -[[package]] -name = "uuid" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" - -[[package]] -name = "uuid" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d023da39d1fde5a8a3fe1f3e01ca9632ada0a63e9797de55a879d6e2236277be" -dependencies = [ - "getrandom 0.2.10", - "serde", -] - -[[package]] -name = "valuable" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "waker-fn" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" - -[[package]] -name = "walkdir" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" - -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" - -[[package]] -name = "wasm-bindgen" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" -dependencies = [ - "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" -dependencies = [ - "quote 1.0.29", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" -dependencies = [ - "proc-macro2 1.0.64", - "quote 1.0.29", - "syn 2.0.25", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.87" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" - -[[package]] -name = "wasm-streams" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "web-sys" -version = "0.3.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webauthn-rs" -version = "0.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2db00711c712414e93b019c4596315085792215bc2ac2d5872f9e8913b0a6316" -dependencies = [ - "base64urlsafedata", - "serde", - "tracing", - "url", - "uuid 1.4.0", - "webauthn-rs-core", -] - -[[package]] -name = "webauthn-rs-core" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "294c78c83f12153a51e1cf1e6970b5da1397645dada39033a9c3173a8fc4fc2b" -dependencies = [ - "base64 0.13.1", - "base64urlsafedata", - "compact_jwt", - "der-parser", - "nom", - "openssl", - "rand 0.8.5", - "serde", - "serde_cbor_2", - "serde_json", - "thiserror", - "tracing", - "url", - "uuid 1.4.0", - "webauthn-rs-proto", - "x509-parser", -] - -[[package]] -name = "webauthn-rs-proto" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d24e638361a63ba5c0a0be6a60229490fcdf33740ed63df5bb6bdb627b52a138" -dependencies = [ - "base64urlsafedata", - "serde", - "serde_json", - "url", -] - -[[package]] -name = "webpki" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "webpki-roots" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" -dependencies = [ - "webpki", -] - -[[package]] -name = "webpki-roots" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" -dependencies = [ - "rustls-webpki 0.100.1", -] - -[[package]] -name = "whoami" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" -dependencies = [ - "winapi", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" -dependencies = [ - "windows-targets 0.48.1", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.1", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.48.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" -dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" - -[[package]] -name = "winreg" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" -dependencies = [ - "winapi", -] - -[[package]] -name = "wiremock" -version = "0.5.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6f71803d3a1c80377a06221e0530be02035d5b3e854af56c6ece7ac20ac441d" -dependencies = [ - "assert-json-diff", - "async-trait", - "base64 0.21.2", - "deadpool", - "futures", - "futures-timer", - "http-types", - "hyper", - "log", - "once_cell", - "regex", - "serde", - "serde_json", - "tokio", -] - -[[package]] -name = "x509-parser" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb9bace5b5589ffead1afb76e43e34cff39cd0f3ce7e170ae0c29e53b88eb1c" -dependencies = [ - "asn1-rs", - "base64 0.13.1", - "data-encoding", - "der-parser", - "lazy_static", - "nom", - "oid-registry", - "rusticata-macros", - "thiserror", - "time 0.3.23", -] - -[[package]] -name = "zeroize" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" diff --git a/kittybox-rs/Cargo.toml b/kittybox-rs/Cargo.toml deleted file mode 100644 index 71ebc66..0000000 --- a/kittybox-rs/Cargo.toml +++ /dev/null @@ -1,158 +0,0 @@ -[package] -name = "kittybox" -version = "0.1.0" -authors = ["Vika "] -edition = "2021" -default-run = "kittybox" -autobins = false - -[features] -default = ["rustls", "postgres"] -#util = ["anyhow"] -#migration = ["util"] -webauthn = ["openssl", "dep:webauthn"] -openssl = ["reqwest/native-tls-crate", "reqwest/native-tls-alpn", "sqlx/tls-native-tls"] -rustls = ["reqwest/rustls-tls-webpki-roots", "sqlx/tls-rustls"] -cli = ["clap"] -postgres = ["sqlx", "kittybox-util/sqlx"] - -[[bin]] -name = "kittybox" -path = "src/main.rs" -required-features = [] - -#[[bin]] -#name = "kittybox-bulk-import" -#path = "src/bin/kittybox_bulk_import.rs" -#required-features = ["migration"] - -#[[bin]] -#name = "kittybox-database-converter" -#path = "src/bin/kittybox_database_converter.rs" -#required-features = ["migration", "redis"] - -[[bin]] -name = "kittybox-indieauth-helper" -path = "src/bin/kittybox-indieauth-helper.rs" -required-features = ["cli"] - -[[bin]] -name = "kittybox-check-webmention" -path = "src/bin/kittybox-check-webmention.rs" -required-features = ["cli"] - -[[bin]] -name = "kittybox-mf2" -path = "src/bin/kittybox-mf2.rs" -required-features = ["cli"] - -[workspace] -members = [".", "./util", "./templates", "./indieauth"] -default-members = [".", "./util", "./templates", "./indieauth"] -[dependencies.kittybox-util] -version = "0.1.0" -path = "./util" -features = ["fs"] -[dependencies.kittybox-frontend-renderer] -version = "0.1.0" -path = "./templates" -[dependencies.kittybox-indieauth] -version = "0.1.0" -path = "./indieauth" -features = ["axum"] - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dev-dependencies] -tempfile = "^3.4.0" # Temporary file managment -wiremock = "^0.5.14" -faker_rand = "^0.1.1" # Seedable, rand-compatible generators of fake data -rand = "^0.8.5" # Utilities for random number generation -tracing-test = "^0.2.2" - -[dependencies] -argon2 = { version = "^0.5.0", features = ["std"] } -async-trait = "^0.1.50" # Type erasure for async trait methods -bytes = "^1.1.0" -data-encoding = "^2.3.2" # Efficient and customizable data-encoding functions like base64, base32, and hex -easy-scraper = "^0.2.0" # HTML scraping library focused on ease of use -either = "^1.6.1" # A general purpose sum type with two cases -futures = "^0.3.14" # An implementation of futures and streams -futures-util = "^0.3.14" # Common utilities and extension traits for the futures-rs library -hex = "^0.4.3" -lazy_static = "^1.4.0" # A macro for declaring lazily evaluated statics in Rust -listenfd = "^0.5.0" # A simple library to work with listenfds passed from the outside (systemd/catflap socket activation) -#log = "^0.4.14" # A lightweight logging facade for Rust -markdown = "^1.0.0-alpha.7" # Native Rust library for parsing Markdown and (outputting HTML) -newbase60 = "^0.1.3" # A library that implements Tantek Γ‡elik's New Base 60 -rand = "^0.8.4" # Random number generators. -serde_json = "^1.0.64" # A JSON serialization file format -serde_urlencoded = "^0.7.0" # `x-www-form-urlencoded` meets Serde -serde_variant = "^0.1.1" # Retrieve serde provided variant names for enum objects -relative-path = "^1.5.0" # Portable relative paths for Rust -sha2 = "^0.10.7" # SHA-2 series of algorithms for Rust -uuid = "^1.3.3" -tracing = { version = "0.1.34", features = [] } -tracing-tree = "0.2.1" -tracing-log = "0.1.3" -tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] } -tower-http = { version = "0.3.3", features = ["trace", "cors", "catch-panic"] } -tower = { version = "0.4.12", features = ["tracing"] } -webauthn = { version = "0.4.5", package = "webauthn-rs", features = ["danger-allow-state-serialisation"], optional = true } -[dependencies.tokio] -version = "^1.29.1" -features = ["full", "tracing"] # TODO determine if my app doesn't need some features -#[dependencies.console-subscriber] -#version = "0.1.10" -[dependencies.tokio-stream] -version = "^0.1.8" -features = ["time", "net"] -[dependencies.tokio-util] -version = "^0.7.3" -features = ["io-util"] -[dependencies.anyhow] -version = "^1.0.42" -optional = true -[dependencies.axum] -version = "^0.6.18" -features = ["multipart", "json", "headers", "form", "macros"] -[dependencies.axum-extra] -version = "^0.7.4" -features = ["cookie"] -[dependencies.chrono] # Date and time library for Rust -version = "^0.4.19" -features = ["serde"] -[dependencies.redis] -version = "^0.21.3" -optional = true -features = ["aio", "tokio-comp"] -[dependencies.prometheus] # Prometheus instrumentation library for Rust applications -version = "^0.13.0" -features = ["process"] -[dependencies.serde] # A generic serialization/deserialization framework -version = "^1.0.170" -features = ["derive"] -[dependencies.url] # URL library for Rust, based on the WHATWG URL Standard -version = "^2.2.1" -features = ["serde"] -[dependencies.hyper] -version = "^0.14.17" -features = ["stream", "runtime"] -[dependencies.reqwest] -version = "^0.11.10" -default-features = false -features = ["gzip", "brotli", "json", "stream"] -[dependencies.microformats] -version = "^0.3.0" -#git = "https://gitlab.com/maxburon/microformats-parser" - -[dependencies.clap] -version = "4.3.11" -features = ["derive"] -optional = true -[dependencies.thiserror] -version = "1.0.35" -[dependencies.sqlx] -version = "^0.7" -features = ["uuid", "chrono", "json", "postgres", "runtime-tokio"] -optional = true \ No newline at end of file diff --git a/kittybox-rs/build.rs b/kittybox-rs/build.rs deleted file mode 100644 index 05eca7a..0000000 --- a/kittybox-rs/build.rs +++ /dev/null @@ -1,30 +0,0 @@ -use std::env; - -fn main() { - println!("cargo:rerun-if-changed=migrations"); - - let out_dir = env::var("OUT_DIR").unwrap(); - - println!("cargo:rerun-if-changed=companion-lite/"); - let companion_out = std::path::Path::new(&out_dir).join("companion"); - - if let Ok(exit) = std::process::Command::new("tsc") - .arg("--outDir") - .arg(companion_out.as_os_str()) - .current_dir("companion-lite") - .spawn() - .unwrap() - .wait() - { - if !exit.success() { - std::process::exit(exit.code().unwrap_or(1)) - } - } - let companion_in = std::path::Path::new("companion-lite"); - for file in ["index.html", "style.css"] { - std::fs::copy( - companion_in.join(file), - &companion_out.join(file) - ).unwrap(); - } -} diff --git a/kittybox-rs/companion-lite/index.html b/kittybox-rs/companion-lite/index.html deleted file mode 100644 index fc99c60..0000000 --- a/kittybox-rs/companion-lite/index.html +++ /dev/null @@ -1,127 +0,0 @@ - - - - - Kittybox-Micropub debug client - - - - - - - - - - - - - -
- JavaScript licensing information for this software - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
main.jsAGPL-3.0main.ts (Kittybox source code)
micropub_api.jsAGPL-3.0micropub_api.ts (Kittybox source code)
indieauth.jsAGPL-3.0indieauth.ts (Kittybox source code)
base64.jsCC0 (Public Domain)base64.ts, adapted from MDN page on Base64
ESM.sh entrypoint for microformats-parserMIT/Expatesm.sh source code -
microformats-parserMIT/ExpatGitHub repository -
parse5 - MIT/Expat -
- BSD-2-Clause -
- GitHub repository for parse5 -
- GitHub repository for entitites, a parse5 dependency -
-
- - diff --git a/kittybox-rs/companion-lite/src/base64.ts b/kittybox-rs/companion-lite/src/base64.ts deleted file mode 100644 index 2429894..0000000 --- a/kittybox-rs/companion-lite/src/base64.ts +++ /dev/null @@ -1,89 +0,0 @@ -// Array of bytes to Base64 string decoding -function b64ToUint6(nChr: number) { - return nChr > 64 && nChr < 91 - ? nChr - 65 - : nChr > 96 && nChr < 123 - ? nChr - 71 - : nChr > 47 && nChr < 58 - ? nChr + 4 - : nChr === 43 - ? 62 - : nChr === 47 - ? 63 - : 0; -} - -export function decode(sBase64: string, nBlocksSize?: number) { - const sB64Enc = sBase64.replace(/[^A-Za-z0-9+/]/g, ""); - const nInLen = sB64Enc.length; - const nOutLen = nBlocksSize - ? Math.ceil(((nInLen * 3 + 1) >> 2) / nBlocksSize) * nBlocksSize - : (nInLen * 3 + 1) >> 2; - const taBytes = new Uint8Array(nOutLen); - - let nMod3; - let nMod4; - let nUint24 = 0; - let nOutIdx = 0; - for (let nInIdx = 0; nInIdx < nInLen; nInIdx++) { - nMod4 = nInIdx & 3; - nUint24 |= b64ToUint6(sB64Enc.charCodeAt(nInIdx)) << (6 * (3 - nMod4)); - if (nMod4 === 3 || nInLen - nInIdx === 1) { - nMod3 = 0; - while (nMod3 < 3 && nOutIdx < nOutLen) { - taBytes[nOutIdx] = (nUint24 >>> ((16 >>> nMod3) & 24)) & 255; - nMod3++; - nOutIdx++; - } - nUint24 = 0; - } - } - - return taBytes; -} - -/* Base64 string to array encoding */ -function uint6ToB64(nUint6: number) { - return nUint6 < 26 - ? nUint6 + 65 - : nUint6 < 52 - ? nUint6 + 71 - : nUint6 < 62 - ? nUint6 - 4 - : nUint6 === 62 - ? 43 - : nUint6 === 63 - ? 47 - : 65; -} - -export function encode(aBytes: Uint8Array) { - let nMod3 = 2; - let sB64Enc = ""; - - const nLen = aBytes.length; - let nUint24 = 0; - for (let nIdx = 0; nIdx < nLen; nIdx++) { - nMod3 = nIdx % 3; - if (nIdx > 0 && ((nIdx * 4) / 3) % 76 === 0) { - sB64Enc += "\r\n"; - } - - nUint24 |= aBytes[nIdx] << ((16 >>> nMod3) & 24); - if (nMod3 === 2 || aBytes.length - nIdx === 1) { - sB64Enc += String.fromCodePoint( - uint6ToB64((nUint24 >>> 18) & 63), - uint6ToB64((nUint24 >>> 12) & 63), - uint6ToB64((nUint24 >>> 6) & 63), - uint6ToB64(nUint24 & 63) - ); - nUint24 = 0; - } - } - return ( - sB64Enc.substr(0, sB64Enc.length - 2 + nMod3) + - (nMod3 === 2 ? "" : nMod3 === 1 ? "=" : "==") - ); -} - -export default { encode, decode } diff --git a/kittybox-rs/companion-lite/src/indieauth.ts b/kittybox-rs/companion-lite/src/indieauth.ts deleted file mode 100644 index 40facab..0000000 --- a/kittybox-rs/companion-lite/src/indieauth.ts +++ /dev/null @@ -1,113 +0,0 @@ -// @ts-ignore -import { mf2 } from "https://esm.sh/microformats-parser@1.4.1?pin=v96" -import { MF2 } from "./micropub_api.js" -import base64 from "./base64.js" - /* - const { mf2 }: { - mf2: (html: string, options: { - baseUrl: string, - experimental?: { lang?: boolean, textContent?: boolean } - }) => { - items: MF2[], - rels: {[key: string]: string[]}, - "rel-urls": {[key: string]: { rels: string[], text?: string }} - } - } = - // @ts-ignore - await import("https://esm.sh/microformats-parser@1.4.1?pin=v96"); - */ - -interface IndieauthMetadata { - authorization_endpoint: string, - token_endpoint: string, - issuer: string, - introspection_endpoint?: string, - introspection_endpoint_auth_methods_supported?: ("Bearer")[], - revocation_endpoint?: string, - revocation_endpoint_auth_methods_supported?: ["none"], - scopes_supported?: string[], - response_types_supported: ["code"], - grant_types_supported: ("authorization_code" | "refresh_token")[] - code_challenge_methods_supported: ("S256")[] - authorization_response_iss_parameter_supported: true, - userinfo_endpoint?: string -} - -interface MF2ParsedData { - items: MF2[], - rels: {[key: string]: string[]}, - "rel-urls": {[key: string]: { rels: string[], text?: string }} -} - -export interface IndiewebEndpoints { - authorization_endpoint: URL, - token_endpoint: URL, - userinfo_endpoint: URL | null, - revocation_endpoint: URL | null, - micropub: URL, - -} - -export function create_verifier() { - const array = new Uint8Array(64) - crypto.getRandomValues(array) - - return array.reduce((str, byte) => str + byte.toString(16).padStart(2, '0'), '') -} - -export async function create_challenge(verifier: string): Promise { - return await crypto.subtle.digest('SHA-256', Uint8Array.from(verifier, c => c.charCodeAt(0))) - .then((buf) => base64.encode(new Uint8Array(buf))) - .then(s => { - return s - .replaceAll("+", "-") - .replaceAll("/", "_") - .replaceAll(/=$/g, "") - }) -} - -export async function discover_endpoints(me: URL): Promise { - const response = await fetch(me); - const data: MF2ParsedData = mf2(await response.text(), { baseUrl: me.toString() }); - let endpoints: Partial = {}; - if ("micropub" in data.rels) { - endpoints.micropub = new URL(data.rels.micropub[0]) - } else { - return null - } - if ("indieauth_metadata" in data.rels) { - const metadata_response = await fetch(data.rels.indieauth_metadata[0], { - headers: { - "Accept": "application/json" - } - }); - - const metadata = await metadata_response.json() as IndieauthMetadata; - endpoints.authorization_endpoint = new URL(metadata.authorization_endpoint) - endpoints.token_endpoint = new URL(metadata.token_endpoint) - if (metadata.userinfo_endpoint != null) { - endpoints.userinfo_endpoint = new URL(metadata.userinfo_endpoint) - } else { - endpoints.userinfo_endpoint = null - } - if (metadata.revocation_endpoint != null) { - endpoints.revocation_endpoint = new URL(metadata.revocation_endpoint) - } else { - endpoints.revocation_endpoint = null - } - - return endpoints as IndiewebEndpoints - } else if ( - "authorization_endpoint" in data.rels - && "token_endpoint" in data.rels - ) { - endpoints.authorization_endpoint = new URL(data.rels.authorization_endpoint[0]) - endpoints.token_endpoint = new URL(data.rels.token_endpoint[0]) - endpoints.userinfo_endpoint = null - endpoints.revocation_endpoint = null - - return endpoints as IndiewebEndpoints - } else { - return null - } -} diff --git a/kittybox-rs/companion-lite/src/main.ts b/kittybox-rs/companion-lite/src/main.ts deleted file mode 100644 index f45cb95..0000000 --- a/kittybox-rs/companion-lite/src/main.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { Micropub, MicropubChannel, MF2 } from "./micropub_api.js"; - -const channel_select_radio = document.getElementById("select_channels") as HTMLInputElement; -channel_select_radio.onclick = async () => { - function populate_channel_list(channels: MicropubChannel[]) { - (document.getElementById("channels") as HTMLElement).style.display = "block"; - const channel_list = document.getElementById("channels_target") as HTMLElement; - channel_list.innerHTML = ""; - channels.forEach((channel) => { - const template = (document.getElementById("channel_selector") as HTMLTemplateElement).content.cloneNode(true) as HTMLElement; - const input = template.querySelector("input") as HTMLInputElement; - const label = template.querySelector("label") as HTMLLabelElement; - input.id = `channel_selector_option_${channel.uid}` - input.value = channel.uid - label.htmlFor = input.id - label.innerHTML = `${channel.name}` - - channel_list.appendChild(template) - }) - } - - if (micropub == null) { - throw new Error("need to authenticate first"); - } - const config = await micropub.config(); - if (config.channels !== undefined) { - populate_channel_list(config.channels) - } -} - -const no_channel_radio = document.getElementById("no_channel") as HTMLInputElement; -no_channel_radio.onclick = () => { - (document.getElementById("channels") as HTMLElement).style.display = "none"; - const channel_list = document.getElementById("channels_target") as HTMLElement - channel_list.innerHTML = ""; -} - -const main_form = document.getElementById("micropub") as HTMLFormElement; -main_form.onsubmit = async (event) => { - function construct_body(form: HTMLFormElement): MF2 { - let content = (form.elements.namedItem("content") as HTMLInputElement).value; - let name: string | undefined = (form.elements.namedItem("name") as HTMLInputElement).value || undefined; - let category: string[] = (form.elements.namedItem("category") as HTMLInputElement).value - .split(",") - .map(val => val.trim()); - - let channel: string[] | undefined = undefined; - let channel_select = (form.elements.namedItem("channel_select") as HTMLInputElement).value; - if (channel_select) { - let channel_selector = form.elements.namedItem("channel"); - if (channel_selector instanceof RadioNodeList) { - channel = (Array.from(channel_selector) as HTMLInputElement[]) - .map(i => i.checked ? i.value : false) - .filter(i => i) as string[]; - } else if (channel_selector instanceof HTMLInputElement) { - channel = [channel_selector.value] - } - } - return { - type: ["h-entry"], - properties: { - content: [content], - name: name ? [name] : undefined, - category: category.length ? category : undefined, - channel: channel ? channel : undefined - } - } - } - - event.preventDefault() - const mf2 = construct_body(main_form); - console.log(JSON.stringify(mf2)); - if (micropub == null) { - throw new Error("need to authenticate first"); - } - try { - const location = await micropub.submit(mf2); - main_form.clear() - - window.open(location, "_blank") - } catch (e) { - console.error(e) - alert(`Error: ${e}`) - return - } - -} - -const indieauth_form = document.getElementById("indieauth") as HTMLFormElement; -indieauth_form.onsubmit = async (event) => { - event.preventDefault() - const form = event.target as HTMLFormElement; - const me = (form.elements.namedItem("me") as HTMLInputElement).value; - if (me != null) { - const { discover_endpoints, create_verifier, create_challenge } = await import("./indieauth.js"); - - const endpoints = await discover_endpoints(new URL(me)); - - if (endpoints != null) { - localStorage.setItem("micropub_endpoint", endpoints.micropub.toString()) - localStorage.setItem("token_endpoint", endpoints.token_endpoint.toString()) - if (endpoints.revocation_endpoint != null) { - localStorage.setItem("revocation_endpoint", endpoints.revocation_endpoint.toString()) - } - } else { - alert("Your website doesn't support Micropub.") - return - } - (document.getElementById("unauthorized") as HTMLElement).style.display = "none"; - (document.getElementById("authorizing") as HTMLElement).style.display = "block"; - const url = endpoints.authorization_endpoint; - let params = new URLSearchParams(); - for (const [key, val] of url.searchParams) { - params.append(key, val) - } - params.set("client_id", window.location.href) - params.set("redirect_uri", window.location.href) - params.set("response_type", "code") - params.set("scope", "profile create media") - params.set("state", "awoo") - const code_verifier = create_verifier() - localStorage.setItem("code_verifier", code_verifier) - params.set("code_challenge", await create_challenge(code_verifier)) - params.set("code_challenge_method", "S256") - - url.search = "?" + params.toString() - - console.log(url) - - window.location.href = url.toString() - } -} - -if (window.location.search != "") { - (document.getElementById("authorizing") as HTMLElement).style.display = "block"; - const params = new URLSearchParams(window.location.search) - if (params.has("code") && params.has("state")) { - const token_endpoint = new URL(localStorage.getItem("token_endpoint")!) - const state = params.get("state") - // XXX check state - - const client_id = new URL(window.location.href); - client_id.search = ""; - const form = new URLSearchParams(); - form.set("grant_type", "authorization_code") - form.set("code", params.get("code")!) - form.set("client_id", client_id.toString()) - form.set("redirect_uri", client_id.toString()) - form.set("code_verifier", localStorage.getItem("code_verifier")!) - - const response = await fetch(token_endpoint, { - method: "POST", - headers: { - "Accept": "application/json", - "Content-Type": "application/x-www-form-urlencoded" - }, - body: form.toString() - }); - - const grant = await response.json(); - - if ("access_token" in grant) { - localStorage.setItem("access_token", grant.access_token); - (document.getElementById("authorizing") as HTMLElement).style.display = "none"; - } - } -} - -let micropub: Micropub | null = null; -const token = localStorage.getItem("access_token") -const endpoint = localStorage.getItem("micropub_endpoint") -if (token == null || endpoint == null) { - (document.getElementById("unauthorized") as HTMLElement).style.display = "block"; -} else { - (document.getElementById("authorized") as HTMLElement).style.display = "block"; - - micropub = new Micropub({ endpoint: new URL(endpoint), token }); -} diff --git a/kittybox-rs/companion-lite/src/micropub_api.ts b/kittybox-rs/companion-lite/src/micropub_api.ts deleted file mode 100644 index fa1c431..0000000 --- a/kittybox-rs/companion-lite/src/micropub_api.ts +++ /dev/null @@ -1,125 +0,0 @@ -export interface MicropubChannel { - readonly uid: string, - readonly name: string -} - -export interface MF2 { - type: string[], - properties: { [key:string]: (string | MF2 | {[key:string]: string})[] | undefined } -} - -export interface MicropubConfig { - readonly channels?: MicropubChannel[], - readonly "media-endpoint"?: string -} - -export interface MicropubErrorMessage { - readonly error: string, - readonly error_description: string | undefined -} - -export class MicropubError extends Error { - readonly status: number | null - readonly response: MicropubErrorMessage | null - - constructor(status: number | null, response: MicropubErrorMessage | null, cause: Error | null = null) { - // Needs to pass both `message` and `options` to install the "cause" property. - if (status == null) { - super("Micropub endpoint didn't respond properly", { cause }); - } else if (response == null) { - super(`Micropub endpoint returned HTTP ${status}`, { cause }); - } else { - super( - `Micropub endpoint returned ${response.error}: ${response.error_description ?? "(no description was provided)"}`, - { cause } - ) - } - - this.status = status; - this.response = response; - } -} - -export class Micropub { - readonly token: string - readonly micropub_endpoint: URL - private config_response: MicropubConfig | null - - constructor({ endpoint, token }: { endpoint: URL, token: string }) { - this.micropub_endpoint = endpoint; - this.token = token; - this.config_response = null; - } - - async config(): Promise { - if (this.config_response != null) { - return this.config_response - } - let url = this.micropub_endpoint; - let params = new URLSearchParams(); - for (const [key, val] of url.searchParams) { - params.append(key, val) - } - params.set("q", "config") - - url.search = "?" + params.toString(); - - const response = await fetch(url, { - headers: { - "Authorization": `Bearer ${this.token}` - } - }); - if (response.ok) { - const config = await response.json() as MicropubConfig; - this.config_response = config - - return config - } else { - throw new MicropubError(response.status, await response.json() as MicropubErrorMessage); - } - } - - async submit(mf2: MF2): Promise { - const response = await fetch(this.micropub_endpoint, { - method: "POST", - headers: { - "Authorization": `Bearer ${this.token}`, - "Content-Type": "application/json" - }, - body: JSON.stringify(mf2) - }) - - if (response.status != 201 && response.status != 202) { - let err = await response.json() as MicropubErrorMessage; - - throw new MicropubError(response.status, err) - } else { - return new URL(response.headers.get("Location") as string) - } - } - - async upload(file: File): Promise { - const config = await this.config(); - const media = config["media-endpoint"]; - if (media == null) { - throw new Error("Micropub endpoint doesn't support file uploads") - } - - const form = new FormData(); - form.set("file", file); - - const response = await fetch(media, { - method: "POST", - headers: { - "Authorization": `Bearer ${this.token}`, - }, - body: form - }) - - if (response.ok) { - return new URL(response.headers.get("Location") as string) - } else { - throw new MicropubError(response.status, await response.json()); - } - } -} diff --git a/kittybox-rs/companion-lite/style.css b/kittybox-rs/companion-lite/style.css deleted file mode 100644 index 09ed398..0000000 --- a/kittybox-rs/companion-lite/style.css +++ /dev/null @@ -1,47 +0,0 @@ -* { - box-sizing: border-box; -} - -:root { - font-family: sans-serif; -} - -body { - margin: 0; -} - -body > main { - margin: auto; - max-width: 1024px; -} - -h1.header { - margin-top: 0.75em; - text-align: center; -} - -fieldset + fieldset, -fieldset + input, -section + section, -section + fieldset -{ - margin-top: 0.75em; -} - -input[type="submit"] { - margin-left: auto; - display: block; -} - -form > fieldset > section > label { - width: 100%; - display: block; -} - -form > fieldset > section > input, form > fieldset > section > textarea { - width: 100%; -} - -textarea { - min-height: 10em; -} diff --git a/kittybox-rs/companion-lite/tsconfig.json b/kittybox-rs/companion-lite/tsconfig.json deleted file mode 100644 index 18b94c7..0000000 --- a/kittybox-rs/companion-lite/tsconfig.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "es2022", /* Specify what module code is generated. */ - // "rootDir": "./", /* Specify the root folder within your source files. */ - // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - "outDir": "./dist", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ - - /* Type Checking */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ - }, - "include": ["src/**/*"] -} diff --git a/kittybox-rs/dev.sh b/kittybox-rs/dev.sh deleted file mode 100755 index 979b85a..0000000 --- a/kittybox-rs/dev.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/sh -if [[ -z $KITTYBOX_LOG ]]; then - export KITTYBOX_LOG="kittybox=debug,retainer::cache=warn,h2=info,rustls=info,tokio=info,tower_http::trace=debug,sqlx=trace" -fi -#export BACKEND_URI=file://./test-dir -export BACKEND_URI="postgres://localhost?dbname=kittybox&host=/run/postgresql" -export JOB_QUEUE_URI="postgres://localhost?dbname=kittybox&host=/run/postgresql" -export BLOBSTORE_URI=file://./media-store -export AUTH_STORE_URI=file://./auth-store -export COOKIE_SECRET=1234567890abcdefghijklmnopqrstuvwxyz -#export COOKIE_SECRET_FILE=/dev/null -if [[ "$1" == "watch" ]]; then - shift - exec systemfd --no-pid -s http::8080 -- cargo watch \ - --ignore '.direnv' \ - --ignore '*~' \ - -L "$KITTYBOX_LOG" \ - -x run \ - "$@" -elif [[ "$1" == "run" ]]; then - shift 1 - export RUST_LOG="$KITTYBOX_LOG" - exec cargo run "$@" -elif [[ "$1" == "_run" ]]; then - shift 1 - release=$1 - shift 1 - exec target/${release}/kittybox "$@" -else - echo "Usage: ./dev.sh [watch|run]" - exit 1 -fi diff --git a/kittybox-rs/examples/password-hasher.rs b/kittybox-rs/examples/password-hasher.rs deleted file mode 100644 index f11c591..0000000 --- a/kittybox-rs/examples/password-hasher.rs +++ /dev/null @@ -1,33 +0,0 @@ -use std::io::Write; - -use argon2::{Argon2, password_hash::{rand_core::OsRng, PasswordHasher, PasswordHash, PasswordVerifier, SaltString}}; - -fn main() -> std::io::Result<()> { - eprint!("Type a password: "); - std::io::stderr().flush().unwrap(); - let password = { - let mut s = String::new(); - std::io::stdin().read_line(&mut s)?; - - s - }; - - let salt = SaltString::generate(&mut OsRng); - let argon2 = Argon2::default(); - //eprintln!("{}", password.trim()); - let password_hash = argon2.hash_password(password.trim().as_bytes(), &salt) - .expect("Hashing a password should not error out") - .serialize(); - - println!("{}", password_hash.as_str()); - - assert!(Argon2::default() - .verify_password( - password.trim().as_bytes(), - &PasswordHash::new(password_hash.as_str()) - .expect("Password hash should be valid") - ).is_ok() - ); - - Ok(()) -} diff --git a/kittybox-rs/indieauth/Cargo.toml b/kittybox-rs/indieauth/Cargo.toml deleted file mode 100644 index d6bc1fe..0000000 --- a/kittybox-rs/indieauth/Cargo.toml +++ /dev/null @@ -1,31 +0,0 @@ -[package] -name = "kittybox-indieauth" -version = "0.1.0" -edition = "2021" - -[features] -default = [] -axum = ["axum-core", "serde_json", "http"] - -[dev-dependencies] -serde_json = "^1.0.64" # A JSON serialization file format -serde_urlencoded = "^0.7.0" # `x-www-form-urlencoded` meets Serde -[dependencies] -rand = "^0.8.5" # Utilities for random number generation -data-encoding = "^2.3.2" # Efficient and customizable data-encoding functions like base64, base32, and hex -sha2 = "^0.10.7" # SHA-2 series of algorithms for Rust -[dependencies.url] # URL library for Rust, based on the WHATWG URL Standard -version = "^2.2.1" -features = ["serde"] -[dependencies.serde] # A generic serialization/deserialization framework -version = "^1.0.170" -features = ["derive"] -[dependencies.axum-core] -version = "^0.3.4" -optional = true -[dependencies.serde_json] -version = "^1.0.64" -optional = true -[dependencies.http] -version = "^0.2.7" -optional = true \ No newline at end of file diff --git a/kittybox-rs/indieauth/src/lib.rs b/kittybox-rs/indieauth/src/lib.rs deleted file mode 100644 index a60cc42..0000000 --- a/kittybox-rs/indieauth/src/lib.rs +++ /dev/null @@ -1,773 +0,0 @@ -#![deny(missing_docs)] -#![forbid(rustdoc::broken_intra_doc_links)] -//! A library of useful structs and helpers to implement [IndieAuth -//! version 20220212][indieauth]. -//! -//! This crate is completely network-agnostic, which means it can be -//! used with both sync and async web frameworks, and even on the -//! client side to implement identity consumers. -//! -//! ## Integration with web frameworks -//! -//! For convenience, helpers for [`axum`], the web framework Kittybox -//! happens to use, are provided. Enable the `axum` feature to use -//! them. -//! -//! The author is happy to accept patches to add more -//! framework-specific helpers. -//! -//! [indieauth]: https://indieauth.spec.indieweb.org/20220212/ -//! [`axum`]: https://github.com/tokio-rs/axum -use serde::{Serialize, Deserialize}; -use url::Url; - -mod scopes; -pub use self::scopes::{Scope, Scopes}; -mod pkce; -pub use self::pkce::{PKCEMethod, PKCEVerifier, PKCEChallenge}; - -/// Authentication methods supported by the introspection endpoint. -/// Note that authentication at the introspection endpoint is -/// mandatory. -#[derive(Copy, Clone, Debug, Serialize, Deserialize)] -pub enum IntrospectionEndpointAuthMethod { - /// `Authorization` header with a `Bearer` token. - Bearer, - /// A token passed as part of a POST request. - #[serde(rename = "snake_case")] - ClientSecretPost, - /// Username and password passed using HTTP Basic authentication. - #[serde(rename = "snake_case")] - ClientSecretBasic, - /// TLS client auth with a certificate signed by a valid CA. - #[serde(rename = "snake_case")] - TlsClientAuth, - /// TLS client auth with a self-signed certificate. - #[serde(rename = "snake_case")] - SelfSignedTlsClientAuth -} - -/// Authentication methods supported by the revocation endpoint. -/// -/// The intent of the IndieAuth revocation endpoints is to quickly -/// revoke leaked tokens. As it requires posession of a token, no -/// authentication is neccesary to protect tokens. A well-intentioned -/// person discovering a leaked token could quickly revoke it without -/// disturbing anyone. -#[derive(Copy, Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum RevocationEndpointAuthMethod { - /// No authentication is required to access an endpoint declaring - /// this value. - None -} - -/// The response types supported by the authorization endpoint. -#[derive(Copy, Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum ResponseType { - /// An authorization code will be issued if this response type is - /// requested. - Code -} -// TODO serde_variant -impl ResponseType { - /// Return the response type as it would appear in serialized form. - pub fn as_str(&self) -> &'static str { - match self { - ResponseType::Code => "code", - } - } -} - -/// Grant types that are described in the IndieAuth spec. -/// -/// This type is strictly for usage in the [`Metadata`] response. For -/// grant requests and responses, see [`GrantRequest`] and -/// [`GrantResponse`]. -#[derive(Copy, Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum GrantType { - /// The authorization code grant, allowing to exchange an - /// authorization code for a confirmation of identity or an access - /// token. - AuthorizationCode, - /// The refresh token grant, allowing to exchange a refresh token - /// for a fresh access token and a new refresh token, to - /// facilitate long-term access. - RefreshToken -} - -/// OAuth 2.0 Authorization Server Metadata in application to the IndieAuth protocol. -/// -/// Your metadata endpoint should return this as a response. -/// -/// ```rust -/// use kittybox_indieauth::{ -/// Metadata, IntrospectionEndpointAuthMethod, RevocationEndpointAuthMethod, -/// ResponseType, Scope, GrantType, PKCEMethod -/// }; -/// -/// let metadata = Metadata { -/// issuer: "https://indieauth.example.com/".parse().unwrap(), -/// authorization_endpoint: "https://indieauth.example.com/auth".parse().unwrap(), -/// token_endpoint: "https://indieauth.example.com/token".parse().unwrap(), -/// introspection_endpoint: "https://indieauth.example.com/introspection".parse().unwrap(), -/// introspection_endpoint_auth_methods_supported: Some(vec![IntrospectionEndpointAuthMethod::Bearer]), -/// revocation_endpoint: Some("https://indieauth.example.com/revoke".parse().unwrap()), -/// revocation_endpoint_auth_methods_supported: Some(vec![RevocationEndpointAuthMethod::None]), -/// scopes_supported: Some(vec![Scope::Create, Scope::Update, Scope::custom("manage_tokens")]), -/// response_types_supported: Some(vec![ResponseType::Code]), -/// grant_types_supported: Some(vec![GrantType::AuthorizationCode, GrantType::RefreshToken]), -/// service_documentation: Some("https://indieauth.spec.indieweb.org/".parse().unwrap()), -/// code_challenge_methods_supported: vec![PKCEMethod::S256], -/// authorization_response_iss_parameter_supported: Some(true), -/// userinfo_endpoint: Some("https://indieauth.example.com/userinfo".parse().unwrap()) -/// }; -/// ``` -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Metadata { - /// The server's issuer identifier. The issuer identifier is a URL - /// that uses the "https" scheme and has no query or fragment - /// components. The identifier MUST be a prefix of the - /// `indieauth-metadata` URL. - pub issuer: Url, - /// The Authorization Endpoint - pub authorization_endpoint: Url, - /// The Token Endpoint - pub token_endpoint: Url, - /// The Introspection Endpoint - pub introspection_endpoint: Url, - /// JSON array containing a list of client authentication methods - /// supported by this introspection endpoint. - #[serde(skip_serializing_if = "Option::is_none")] - pub introspection_endpoint_auth_methods_supported: Option>, - /// The Revocation Endpoint - #[serde(skip_serializing_if = "Option::is_none")] - pub revocation_endpoint: Option, - /// JSON array containing the value - /// [`RevocationEndpointAuthMethod::None`]. If a revocation endpoint - /// is provided, this property should also be provided with the - /// value `vec![RevocationEndpointAuthMethod::None]`, since the - /// omission of this value defaults to `client_secret_basic` - /// according to [RFC8414]. - /// - /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 - #[serde(skip_serializing_if = "Option::is_none")] - pub revocation_endpoint_auth_methods_supported: Option>, - /// JSON array containing scope values supported by the IndieAuth - /// server. Servers MAY choose not to advertise some supported - /// scope values even when this parameter is used. - // Note: Scopes isn't used here because this field should be - // serialized as a list, not as a string - #[serde(skip_serializing_if = "Option::is_none")] - pub scopes_supported: Option>, - /// JSON array containing the response_type values supported. This - /// differs from [RFC8414] in that this parameter is OPTIONAL and - /// that, if omitted, the default is [`ResponseType::Code`]. - /// - /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 - #[serde(skip_serializing_if = "Option::is_none")] - pub response_types_supported: Option>, - /// JSON array containing grant type values supported. If omitted, - /// the default value differs from [RFC8414] and is - /// `authorization_code`. - /// - /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 - #[serde(skip_serializing_if = "Option::is_none")] - pub grant_types_supported: Option>, - /// URL of a page containing human-readable information that - /// developers might need to know when using the server. This - /// might be a link to the IndieAuth spec or something more - /// personal to your implementation. - #[serde(skip_serializing_if = "Option::is_none")] - pub service_documentation: Option, - /// JSON array containing the methods supported for PKCE. This - /// parameter differs from [RFC8414] in that it is not optional as - /// PKCE is *REQUIRED*. - /// - /// [RFC8414]: https://www.rfc-editor.org/rfc/rfc8414 - pub code_challenge_methods_supported: Vec, - /// Boolean parameter indicating whether the authorization server - /// provides the iss parameter. If omitted, the default value is - /// false. As the iss parameter is REQUIRED, this is provided for - /// compatibility with OAuth 2.0 servers implementing the - /// parameter. - #[serde(skip_serializing_if = "Option::is_none")] - pub authorization_response_iss_parameter_supported: Option, - /// The User Info Endpoint - #[serde(skip_serializing_if = "Option::is_none")] - pub userinfo_endpoint: Option -} - -#[cfg(feature = "axum")] -impl axum_core::response::IntoResponse for Metadata { - fn into_response(self) -> axum_core::response::Response { - use http::StatusCode; - - (StatusCode::OK, - [("Content-Type", "application/json")], - serde_json::to_vec(&self).unwrap()) - .into_response() - } -} - -/// User profile to be returned from the userinfo endpoint and when -/// the `profile` scope was requested. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct Profile { - /// User's chosen name. - #[serde(skip_serializing_if = "Option::is_none")] - pub name: Option, - /// User's profile page. Fetching it may reveal an `h-card`. - #[serde(skip_serializing_if = "Option::is_none")] - pub url: Option, - /// User's profile picture suitable to represent them. - #[serde(skip_serializing_if = "Option::is_none")] - pub photo: Option, - /// User's email, if they've chosen to reveal it. This is guarded - /// by the `email` scope. - #[serde(skip_serializing_if = "Option::is_none")] - pub email: Option -} - -#[cfg(feature = "axum")] -impl axum_core::response::IntoResponse for Profile { - fn into_response(self) -> axum_core::response::Response { - use http::StatusCode; - - (StatusCode::OK, - [("Content-Type", "application/json")], - serde_json::to_vec(&self).unwrap()) - .into_response() - } -} - -/// A state string comprised of alphanumeric characters to protect -/// from CSRF attacks. -/// -/// There is no reason to inspect the string itself except to ensure -/// it hasn't been tampered with. -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -pub struct State(String); -impl State { - /// Generate a random state string of 128 bytes in length. - pub fn new() -> Self { - use rand::{Rng, distributions::Alphanumeric}; - let bytes = rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(128) - .collect::>(); - Self(String::from_utf8(bytes).unwrap()) - } -} -impl AsRef for State { - fn as_ref(&self) -> &str { - self.0.as_str() - } -} - -/// The authorization request that should be affixed to the URL of an -/// authorization endpoint to start the IndieAuth ceremony. -/// -/// ```rust -/// use kittybox_indieauth::{ -/// AuthorizationRequest, ResponseType, State, -/// Scopes, Scope, -/// PKCEChallenge, PKCEVerifier, PKCEMethod -/// }; -/// -/// // Save that for later, it'll come in handy -/// let verifier = PKCEVerifier::new(); -/// -/// let request = AuthorizationRequest { -/// response_type: ResponseType::Code, -/// client_id: "https://kittybox.fireburn.ru/companion/native".parse().unwrap(), -/// redirect_uri: "https://kittybox.fireburn.ru/companion/native/redirect".parse().unwrap(), -/// state: State::new(), -/// code_challenge: PKCEChallenge::new(&verifier, PKCEMethod::default()), -/// scope: Some(Scopes::new(vec![Scope::Create, Scope::Update, Scope::Delete, Scope::Media])), -/// me: Some("https://fireburn.ru/".parse().unwrap()) -/// }; -/// -/// let mut url: url::Url = "https://fireburn.ru/.kittybox/indieauth/auth" -/// .parse() -/// .unwrap(); -/// -/// url.set_query(Some(&serde_urlencoded::to_string(request).unwrap())); -/// -/// // Open a user's browser to navigate to the authorization endpoint page... -/// ``` -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AuthorizationRequest { - /// The response type expected to this request. - pub response_type: ResponseType, - /// The homepage of the client. It must be fetched to show - /// metadata and check the redirect URI's authenticity. - pub client_id: Url, - /// The URI that the user will be redirected to in case they - /// approve the authentication request. A query string containing - /// the response is affixed to it. - pub redirect_uri: Url, - /// A random state to protect from CSRF attacks. The server should - /// return this string unmodified. - pub state: State, - /// A PKCE challenge neccesary to protect from authorization code - /// injection and CSRF attacks. - #[serde(flatten)] - pub code_challenge: PKCEChallenge, - /// An array of scopes that are requested for a token. If no - /// scopes are provided, a token will not be issued. - #[serde(skip_serializing_if = "Option::is_none")] - pub scope: Option, - /// The URL that user entered. The authorization endpoint MAY use - /// it as a hint of which user is attempting to sign in, and to - /// indicate which profile URL the client is expecting in the - /// resulting profile URL response or access token response. - #[serde(skip_serializing_if = "Option::is_none")] - pub me: Option -} - -/// The authorization response that must be appended to the -/// [`AuthorizationRequest::redirect_uri`]'s query string. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AuthorizationResponse { - /// The authorization code generated by the authorization - /// endpoint. The code MUST expire shortly after it is issued to - /// mitigate the risk of leaks, and MUST be valid for only one - /// use. A maximum lifetime of 10 minutes is recommended. See - /// [OAuth 2.0 Section 4.1.2][oauth2-sec-4.1.2] for additional - /// requirements on the authorization code. - /// - /// [oauth2-sec-4.1.2]: https://tools.ietf.org/html/rfc6749#section-4.1.2 - pub code: String, - /// The state parameter from the [AuthorizationRequest], - /// unmodified. - pub state: State, - /// The issuer identifier for client validation. - /// - /// Clients MUST verify this matches the [`Metadata::issuer`] - /// parameter provided by the Server [Metadata] endpoint during - /// Discovery as outlined in [OAuth 2.0 Authorization Server - /// Issuer Identification][oauth2-iss]. If the value does not - /// match the expected issuer identifier, clients MUST reject the - /// authorization response and MUST NOT proceed with the - /// authorization grant. For error responses, clients MUST NOT - /// assume that the error originates from the intended - /// authorization server. - /// - /// [oauth2-iss]: https://www.ietf.org/archive/id/draft-ietf-oauth-iss-auth-resp-02.html - pub iss: Url -} - -/// A grant request that continues the IndieAuth ceremony. -#[derive(PartialEq, Debug, Clone, Serialize, Deserialize)] -#[serde(tag = "grant_type")] -#[serde(rename_all = "snake_case")] -pub enum GrantRequest { - /// Use an authorization code to receive identity verification - /// and/or an access token. - AuthorizationCode { - /// The code from [`AuthorizationResponse`]. - code: String, - /// Client ID that this grant belongs to. - client_id: Url, - /// Redirect URI that was used to receive the grant. - redirect_uri: Url, - /// The PKCE code verifier that was used to create the code - /// challenge. - code_verifier: PKCEVerifier - }, - /// Use a refresh token to get a fresh access token and a new - /// matching refresh token. - RefreshToken { - /// The refresh token that was issued before. - refresh_token: String, - /// The client ID to which the token belongs to. - client_id: url::Url, - /// A list of scopes, not exceeding the already-granted scope, - /// that can be passed to further restrict the scopes on the - /// new token. - /// - /// This cannot be used to gain new scopes -- you need to - /// start over if you need new scopes from the user. - scope: Option - } -} - -/// Token type, as described in [RFC6749][]. -/// -/// [RFC6749]: https://www.rfc-editor.org/rfc/rfc6749#section-7.1 -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum TokenType { - /// A Bearer token described in [RFC6750][]. As far as the author - /// of this library is concerned, this is the only type that - /// IndieAuth uses. - /// - /// [RFC6750]: https://www.rfc-editor.org/rfc/rfc6750 - Bearer -} - -/// The response to a successful [`GrantRequest`]. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(untagged)] -pub enum GrantResponse { - /// An access token response, containing an access token, a refresh - /// token (if the identity provider supports them) and the profile - /// (if access was granted to the profile data). - AccessToken { - /// The URL for the user this token corresponds to. - me: Url, - /// Token type. Required by OAuth2, not mentioned in - /// IndieAuth. Confirmed as erroneous. - token_type: TokenType, - /// Scopes. REQUIRED if different from what was - /// requested. Absence from IndieAuth spec confirmed as - /// erroneous. - scope: Option, - /// The user's profile information, if it was requested. - #[serde(skip_serializing_if = "Option::is_none")] - profile: Option, - /// The access token that can be used to access protected resources. - access_token: String, - /// The duration in which the access token expires, represented in seconds. - // TODO replace with std::time::Duration - #[serde(skip_serializing_if = "Option::is_none")] - expires_in: Option, - /// The refresh token, if it was issued. - #[serde(skip_serializing_if = "Option::is_none")] - refresh_token: Option - }, - /// A profile URL response, that only contains the profile URL and - /// the profile, if it was requested. - /// - /// This is suitable for confirming the identity of the user, but - /// no more than that. - ProfileUrl { - /// The authenticated user's URL. - me: Url, - /// The user's profile information, if it was requested. - #[serde(skip_serializing_if = "Option::is_none")] - profile: Option - } -} - -#[cfg(feature = "axum")] -impl axum_core::response::IntoResponse for GrantResponse { - fn into_response(self) -> axum_core::response::Response { - use http::StatusCode; - - (StatusCode::OK, - [("Content-Type", "application/json"), - ("Cache-Control", "no-store"), - ("Pragma", "no-cache") - ], - serde_json::to_vec(&self).unwrap()) - .into_response() - } -} - -/// Describes requests that the authorization endpoint might want to handle. -/// -/// This type mostly exists for ease-of-use with serde. -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(untagged)] -#[allow(missing_docs)] -pub enum RequestMaybeAuthorizationEndpoint { - Authorization(AuthorizationRequest), - Grant(GrantRequest) -} - -/// A token introspection request that can be handled by the token -/// introspection endpoint. -/// -/// Note that this request doesn't contain authentication data, which -/// is commonly transmitted out-of-band (e.g. via the `Authorization` -/// header). -#[derive(Debug, Serialize, Deserialize)] -pub struct TokenIntrospectionRequest { - /// The token for which data was requested. - pub token: String -} - -/// Data for a token that will be returned by the introspection -/// endpoint (and can also be used internally by the resource server -/// if it is part of a monolith with the identity provider). -#[derive(Debug, Serialize, Deserialize)] -pub struct TokenData { - /// The user this token corresponds to. - pub me: Url, - /// The client ID for the client that this token was issued to. - pub client_id: Url, - /// Scope that was granted to this token. - pub scope: Scopes, - /// The expiration date for this token, measured in seconds from - /// the Unix time epoch (1970-01-01 00:00:00). - // TODO replace these two with std::time::SystemTime - #[serde(skip_serializing_if = "Option::is_none")] - pub exp: Option, - /// The issue date, represented in the same format as the - /// [`exp`][TokenData::exp] field. - #[serde(skip_serializing_if = "Option::is_none")] - pub iat: Option -} - -impl TokenData { - /// Check if the token in question expired. - pub fn expired(&self) -> bool { - use std::time::{Duration, SystemTime, UNIX_EPOCH}; - - self.exp - .map(|exp| SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap_or(Duration::ZERO) - .as_secs() >= exp) - .unwrap_or_default() - } - - /// Return a timestamp at which the token is not considered valid anymore. - pub fn expires_at(&self) -> Option { - self.exp.map(|time| { - std::time::UNIX_EPOCH + std::time::Duration::from_secs(time) - }) - } - /// Return a timestamp describing when the token was issued. - pub fn issued_at(&self) -> Option { - self.iat.map(|time| { - std::time::UNIX_EPOCH + std::time::Duration::from_secs(time) - }) - } - - /// Check if a certain scope is allowed for this token. - pub fn check_scope(&self, scope: &Scope) -> bool { - self.scope.has(scope) - } -} - -// I don't like this type, because it could've been represented -// internally by Option. But the IndieAuth standard -// requires the "active" field to be present. I can't do anything -// about it. -/// The introspection response that the introspection endpoint must -/// return. -/// -/// It is recommended to use the [`From`][`std::convert::From`] trait -/// to convert from `Option` for ergonomics. -#[derive(Debug, Serialize, Deserialize)] -pub struct TokenIntrospectionResponse { - active: bool, - #[serde(flatten)] - #[serde(skip_serializing_if = "Option::is_none")] - data: Option -} -// These wrappers and impls should take care of making use of this -// type as painless as possible. -impl TokenIntrospectionResponse { - /// Indicate that this token is not valid. - pub fn inactive() -> Self { - Self { active: false, data: None } - } - /// Indicate that this token is valid, and provide data about it. - pub fn active(data: TokenData) -> Self { - Self { active: true, data: Some(data) } - } - /// Check if the endpoint reports this token as valid. - pub fn is_active(&self) -> bool { - self.active - } - - /// Get data contained in the response, if the token is valid. - pub fn data(&self) -> Option<&TokenData> { - if !self.active { - return None - } - self.data.as_ref() - } -} -impl Default for TokenIntrospectionResponse { - fn default() -> Self { - Self::inactive() - } -} -impl From> for TokenIntrospectionResponse { - fn from(data: Option) -> Self { - Self { active: data.is_some(), data } - } -} -impl From for Option { - fn from(response: TokenIntrospectionResponse) -> Option { - response.data - } -} - -#[cfg(feature = "axum")] -impl axum_core::response::IntoResponse for TokenIntrospectionResponse { - fn into_response(self) -> axum_core::response::Response { - use http::StatusCode; - - (StatusCode::OK, - [("Content-Type", "application/json")], - serde_json::to_vec(&self).unwrap()) - .into_response() - } -} - -/// A request for revoking a token. There is no response beyond `HTTP -/// 200 OK`. -#[derive(Debug, Serialize, Deserialize)] -pub struct TokenRevocationRequest { - /// The token that needs to be revoked in case it is valid. - pub token: String -} - -/// Types of errors that a resource server (IndieAuth consumer) can -/// throw when authentication goes wrong. -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum ResourceErrorKind { - /// The provided token was invalid. - InvalidToken, - /// The scope on the token was insufficient to perform the - /// requested operation. - InsufficientScope, -} - -/// Various kinds of errors that could occur when performing the -/// IndieAuth ceremony. -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum ErrorKind { - /// The request is missing a required parameter, includes an - /// unsupported parameter value (other than grant type), repeats a - /// parameter, includes multiple credentials, utilizes more than - /// one mechanism for authenticating the client, or is otherwise - /// malformed. - InvalidRequest, - /// Client authentication failed (e.g., unknown client, no client - /// authentication included, or unsupported authentication - /// method). The authorization server MAY return an HTTP 401 - /// (Unauthorized) status code to indicate which HTTP - /// authentication schemes are supported. If the client attempted - /// to authenticate via the "Authorization" request header field, - /// the authorization server MUST respond with an HTTP 401 - /// (Unauthorized) status code and include the "WWW-Authenticate" - /// response header field matching the authentication scheme used - /// by the client. - InvalidClient, - /// The provided authorization grant (e.g., authorization - /// code, resource owner credentials) or refresh token is - /// invalid, expired, revoked, does not match the redirection - /// URI used in the authorization request, or was issued to - /// another client. - InvalidGrant, - /// The authenticated client is not authorized to use this - /// authorization grant type. - UnauthorizedClient, - /// The authorization grant type is not supported by the - /// authorization server. - UnsupportedGrantType, - /// The requested scope is invalid, unknown, malformed, or - /// exceeds the scope granted by the resource owner. - InvalidScope -} -// TODO consider relying on serde_variant for these conversions -impl AsRef for ErrorKind { - fn as_ref(&self) -> &str { - match self { - ErrorKind::InvalidRequest => "invalid_request", - ErrorKind::InvalidClient => "invalid_client", - ErrorKind::InvalidGrant => "invalid_grant", - ErrorKind::UnauthorizedClient => "unauthorized_client", - ErrorKind::UnsupportedGrantType => "unsupported_grant_type", - ErrorKind::InvalidScope => "invalid_scope", - } - } -} -impl std::fmt::Display for ErrorKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.as_ref()) - } -} - -/// An error that can be returned when performing the IndieAuth ceremony. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct Error { - /// Type of an error. - #[serde(rename = "error")] - pub kind: ErrorKind, - /// Human-friendly description of an error, suitable for a - /// developer to read while debugging. - #[serde(rename = "error_description")] - pub msg: Option, - /// An URL to documentation describing what went wrong and how to - /// fix it. - pub error_uri: Option -} - -impl From for Error { - fn from(kind: ErrorKind) -> Error { - Error { - kind, msg: None, error_uri: None - } - } -} - -impl std::error::Error for self::Error {} - -impl std::fmt::Display for self::Error { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "IndieAuth error ({})", self.kind)?; - if let Some(msg) = self.msg.as_deref() { - write!(f, ": {}", msg)?; - } - if let Some(error_uri) = &self.error_uri { - write!(f, " (see `{}` for more info)", error_uri)?; - } - - Ok(()) - } -} - -#[cfg(feature = "axum")] -impl axum_core::response::IntoResponse for self::Error { - fn into_response(self) -> axum_core::response::Response { - use http::StatusCode; - - (StatusCode::BAD_REQUEST, - [("Content-Type", "application/json")], - serde_json::to_vec(&self).unwrap()) - .into_response() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_serialize_deserialize_grant_request() { - let authorization_code: GrantRequest = GrantRequest::AuthorizationCode { - client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), - redirect_uri: "https://kittybox.fireburn.ru/.kittybox/login/redirect".parse().unwrap(), - code_verifier: PKCEVerifier("helloworld".to_string()), - code: "hithere".to_owned() - }; - let serialized = serde_urlencoded::to_string(&[ - ("grant_type", "authorization_code"), - ("code", "hithere"), - ("client_id", "https://kittybox.fireburn.ru/"), - ("redirect_uri", "https://kittybox.fireburn.ru/.kittybox/login/redirect"), - ("code_verifier", "helloworld"), - ]).unwrap(); - - let deserialized = serde_urlencoded::from_str(&serialized).unwrap(); - - assert_eq!(authorization_code, deserialized); - - assert_eq!( - serialized, - serde_urlencoded::to_string(authorization_code).unwrap() - ) - } -} diff --git a/kittybox-rs/indieauth/src/pkce.rs b/kittybox-rs/indieauth/src/pkce.rs deleted file mode 100644 index bf8d1a0..0000000 --- a/kittybox-rs/indieauth/src/pkce.rs +++ /dev/null @@ -1,132 +0,0 @@ -use serde::{Serialize, Deserialize}; -use rand::{Rng, distributions::Alphanumeric}; -use sha2::{Sha256, Digest}; -use data_encoding::BASE64URL; - -/// Methods to use for PKCE challenges. -#[derive(PartialEq, Eq, Copy, Clone, Debug, Serialize, Deserialize, /*Default*/)] -pub enum PKCEMethod { - /// Base64-encoded SHA256 hash of an ASCII string. - //#[default] - S256, - /// Plain string by itself. Please don't use this. - #[serde(rename = "snake_case")] - Plain -} -// manual impl until Rust 1.62 hits nixos-unstable -impl Default for PKCEMethod { - fn default() -> Self { PKCEMethod::S256 } -} -impl PKCEMethod { - /// Return a string representing a PKCE method as it would be serialized. - pub fn as_str(&self) -> &'static str { - match self { - PKCEMethod::S256 => "S256", - PKCEMethod::Plain => "plain" - } - } -} -/// A PKCE verifier string that should be kept in secret until the end -/// of the authentication ceremony, where it is revealed to prove that -/// the one who uses the grant is the same entity who it was given to. -#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] -pub struct PKCEVerifier(pub(super) String); - -impl AsRef for PKCEVerifier { - fn as_ref(&self) -> &str { - self.0.as_str() - } -} -impl ToString for PKCEVerifier { - fn to_string(&self) -> String { - self.0.clone() - } -} - -impl PKCEVerifier { - /// Generate a new PKCE verifier string of 128 bytes in length. - #[allow(clippy::new_without_default)] - pub fn new() -> Self { - let bytes = rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(128) - .collect::>(); - Self(String::from_utf8(bytes).unwrap()) - } -} - -/// A PKCE challenge as described in [RFC7636]. -/// -/// [RFC7636]: https://tools.ietf.org/html/rfc7636 -#[derive(Eq, PartialEq, Debug, Clone, Serialize, Deserialize)] -pub struct PKCEChallenge { - code_challenge: String, - #[serde(rename = "code_challenge_method")] - method: PKCEMethod -} - -impl PKCEChallenge { - /// Create a new challenge from a [PKCEVerifier] using a certain - /// [PKCEMethod]. - pub fn new(code_verifier: &PKCEVerifier, method: PKCEMethod) -> Self { - Self { - code_challenge: match method { - PKCEMethod::S256 => { - let mut hasher = Sha256::new(); - hasher.update(code_verifier.as_ref()); - let mut challenge = BASE64URL.encode(&hasher.finalize()); - challenge.retain(|c| c != '='); - - challenge - }, - PKCEMethod::Plain => code_verifier.to_string(), - }, - method - } - } - - /// Verify that the [PKCEVerifier] corresponds to this challenge, - /// by creating a second challenge string and comparing it against - /// this challenge data. - /// - /// ```rust - /// use kittybox_indieauth::{PKCEVerifier, PKCEMethod, PKCEChallenge}; - /// - /// let verifier = PKCEVerifier::new(); - /// let challenge = PKCEChallenge::new(&verifier, PKCEMethod::default()); - /// // Meanwhile, at the token endpoint, in the end of the ceremony... - /// // ...the challenge gets retrieved from the stored data and verified - /// assert!(challenge.verify(verifier)) - /// ``` - #[must_use] - pub fn verify(&self, code_verifier: PKCEVerifier) -> bool { - Self::new(&code_verifier, self.method) == *self - } - - /// Return a reference to the code challenge string. - pub fn as_str(&self) -> &str { - self.code_challenge.as_str() - } - - /// Return the method used to create this challenge. - pub fn method(&self) -> PKCEMethod { - self.method - } -} - -#[cfg(test)] -mod tests { - use super::{PKCEMethod, PKCEVerifier, PKCEChallenge}; - - #[test] - /// A snapshot test generated using [Aaron Parecki's PKCE - /// tools](https://example-app.com/pkce) that checks for a - /// conforming challenge. - fn test_pkce_challenge_verification() { - let verifier = PKCEVerifier("ec03310e4e90f7bc988af05384060c3c1afeae4bb4d0f648c5c06b63".to_owned()); - - let challenge = PKCEChallenge::new(&verifier, PKCEMethod::S256); - - assert_eq!(challenge.as_str(), "aB8OG20Rh8UoQ9gFhI0YvPkx4dDW2MBspBKGXL6j6Wg"); - } -} diff --git a/kittybox-rs/indieauth/src/scopes.rs b/kittybox-rs/indieauth/src/scopes.rs deleted file mode 100644 index d74878e..0000000 --- a/kittybox-rs/indieauth/src/scopes.rs +++ /dev/null @@ -1,208 +0,0 @@ -use std::str::FromStr; - -use serde::{ - Serialize, Serializer, - Deserialize, - de::{ - Deserializer, Visitor, - Error as DeserializeError - } -}; - -/// Various scopes that can be requested through IndieAuth. -#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] -#[serde(rename_all = "snake_case")] -pub enum Scope { - /// Allows to create posts using Micropub. - Create, - /// Allows to edit posts using Micropub. - Update, - /// Allows to delete posts using Micropub. - Delete, - /// Allows to upload blobs to the media endpoint. - Media, - /// Allows to read feeds via Microsub. - Read, - /// Allows to manage follows via Microsub. - Follow, - /// Allows to mute and unmute users in feeds via Microsub. - Mute, - /// Allows to block and unblock users. - Block, - /// Allows to create and manage feeds via Microsub. - Channels, - /// Allows to request profile information (except email, see Email) - Profile, - /// Allows to receive email in the profile information. - Email, - /// Custom scope not included above. - Custom(String) -} -impl Scope { - /// Create a custom scope from a string slice. - pub fn custom(scope: &str) -> Scope { - Scope::Custom(scope.to_string()) - } -} - -// TODO consider relying on serde_variant for these conversions -impl AsRef for Scope { - fn as_ref(&self) -> &str { - use Scope::*; - match self { - Create => "create", - Update => "update", - Delete => "delete", - Media => "media", - Read => "read", - Follow => "follow", - Mute => "mute", - Block => "block", - Channels => "channels", - Profile => "profile", - Email => "email", - Custom(s) => s.as_ref() - } - } -} -impl From<&str> for Scope { - fn from(scope: &str) -> Self { - match scope { - "create" => Scope::Create, - "update" => Scope::Update, - "delete" => Scope::Delete, - "media" => Scope::Media, - "read" => Scope::Read, - "follow" => Scope::Follow, - "mute" => Scope::Mute, - "block" => Scope::Block, - "channels" => Scope::Channels, - "profile" => Scope::Profile, - "email" => Scope::Email, - other => Scope::custom(other) - } - } -} -impl FromStr for Scope { - type Err = std::convert::Infallible; - fn from_str(s: &str) -> Result { - Ok(s.into()) - } -} - -/// A list of scopes that serializes to a space-separated string instead of a list. -/// -/// OAuth2 is weird, don't ask me why it's a thing. -#[derive(PartialEq, Eq, Debug, Clone)] -pub struct Scopes(Vec); -impl Scopes { - /// Create a list of scopes from a vector of scopes. - pub fn new(scopes: Vec) -> Self { - Self(scopes) - } - /// Ensure a certain scope is listed in the scope list. - pub fn has(&self, scope: &Scope) -> bool { - self.0.iter().any(|s| s == scope) - } - /// Ensure all of the requested scopes are in the list. - pub fn has_all(&self, scopes: &[Scope]) -> bool { - scopes.iter() - .map(|s1| self.iter().any(|s2| s1 == s2)) - .all(|s| s) - } - /// Transform this into an iterator over individual scopes. - pub fn iter(&self) -> std::slice::Iter<'_, Scope> { - self.0.iter() - } -} -impl AsRef<[Scope]> for Scopes { - fn as_ref(&self) -> &[Scope] { - self.0.as_ref() - } -} -impl ToString for Scopes { - fn to_string(&self) -> String { - self.0.iter() - .map(|s| s.as_ref()) - .fold(String::new(), |a, s| if a.is_empty() { - s.to_string() - } else { - a + " " + s - }) - } -} -impl FromStr for Scopes { - type Err = std::convert::Infallible; - - fn from_str(value: &str) -> Result { - Ok(Self(value.split_ascii_whitespace() - .map(Scope::from) - .collect::>())) - } -} -impl Serialize for Scopes { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer - { - serializer.serialize_str(&self.to_string()) - } -} -struct ScopeVisitor; -impl<'de> Visitor<'de> for ScopeVisitor { - type Value = Scopes; - - fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("a string of space-separated OAuth2 scopes") - } - - fn visit_str(self, value: &str) -> Result - where - E: DeserializeError - { - Ok(Scopes::from_str(value).unwrap()) - } -} -impl<'de> Deserialize<'de> for Scopes { - - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de> - { - deserializer.deserialize_str(ScopeVisitor) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_serde_vec_scope() { - let scopes = vec![ - Scope::Create, Scope::Update, Scope::Delete, - Scope::Media, - Scope::custom("kittybox_internal_access") - ]; - - let scope_serialized = serde_json::to_value( - Scopes::new(scopes.clone()) - ).unwrap(); - let scope_str = scope_serialized.as_str().unwrap(); - assert_eq!(scope_str, "create update delete media kittybox_internal_access"); - - assert!(serde_json::from_value::(scope_serialized).unwrap().has_all(&scopes)) - } - - #[test] - fn test_scope_has_all() { - let scopes = Scopes(vec![ - Scope::Create, Scope::Update, Scope::custom("draft") - ]); - - assert!(scopes.has_all(&[Scope::Create, Scope::custom("draft")])); - - assert!(!scopes.has_all(&[Scope::Read, Scope::custom("kittybox_internal_access")])); - } - -} diff --git a/kittybox-rs/migrations/0001_init.sql b/kittybox-rs/migrations/0001_init.sql deleted file mode 100644 index c9915eb..0000000 --- a/kittybox-rs/migrations/0001_init.sql +++ /dev/null @@ -1,62 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS kittybox; - -CREATE TABLE kittybox.users ( - user_domain TEXT NOT NULL PRIMARY KEY, - site_name JSONB NOT NULL DEFAULT '"Kittybox"'::jsonb, - webring JSONB NOT NULL DEFAULT 'false'::jsonb -); - -CREATE TABLE kittybox.mf2_json ( - uid TEXT NOT NULL PRIMARY KEY, - mf2 JSONB NOT NULL, - owner TEXT NOT NULL -- REFERENCES kittybox.users(user_domain) -); - -CREATE INDEX mf2props ON kittybox.mf2_json USING GIN (mf2); -CREATE INDEX published_date ON kittybox.mf2_json ((mf2 #>> '{properties,published,0}')); - -CREATE TABLE kittybox.children ( - parent TEXT NOT NULL REFERENCES kittybox.mf2_json(uid) ON DELETE CASCADE, - child TEXT NOT NULL REFERENCES kittybox.mf2_json(uid) ON DELETE CASCADE, - UNIQUE(parent, child) -); - -CREATE INDEX fulltext ON kittybox.mf2_json USING GIN ( - to_tsvector('english', mf2['properties']['content']) -); - -CREATE FUNCTION kittybox.set_setting(user_domain text, setting text, val anyelement) RETURNS void AS $$ -BEGIN -EXECUTE format('INSERT INTO kittybox.users (user_domain, %I) VALUES ($1, $2) ON CONFLICT (user_domain) DO UPDATE SET %I = $2', setting, setting) - USING user_domain, val; - RETURN; -END; -$$ LANGUAGE plpgsql; - -CREATE FUNCTION kittybox.get_setting(user_domain text, setting text) RETURNS jsonb AS $$ -DECLARE - val jsonb; -BEGIN -EXECUTE format('SELECT %I FROM kittybox.users WHERE user_domain = $1', setting) USING user_domain INTO val; - -RETURN val; -END; -$$ LANGUAGE plpgsql; - -CREATE FUNCTION kittybox.hydrate_author(mf2 jsonb) RETURNS jsonb AS $$ -DECLARE - author jsonb; - author_uid text; -BEGIN - -author_uid := mf2 #>> '{properties,author,0}'; -IF NOT (author_uid IS NULL) THEN - SELECT mf2_json.mf2 INTO author FROM kittybox.mf2_json WHERE uid = author_uid; -END IF; -IF NOT FOUND THEN - RETURN mf2; -ELSE - RETURN jsonb_set(mf2, '{properties,author,0}', author); -END IF; -END; -$$ LANGUAGE plpgsql; diff --git a/kittybox-rs/migrations/webmention/0001_init.sql b/kittybox-rs/migrations/webmention/0001_init.sql deleted file mode 100644 index 9e7a192..0000000 --- a/kittybox-rs/migrations/webmention/0001_init.sql +++ /dev/null @@ -1,15 +0,0 @@ -CREATE SCHEMA IF NOT EXISTS kittybox_webmention; - -CREATE TABLE kittybox_webmention.incoming_webmention_queue ( - id UUID PRIMARY KEY DEFAULT gen_random_uuid(), - source TEXT NOT NULL, - target TEXT NOT NULL, - recv_timestamp TIMESTAMPTZ NOT NULL DEFAULT now(), - attempts INTEGER NOT NULL DEFAULT 0 -); - -CREATE INDEX webmention_jobs_by_attempts ON kittybox_webmention.incoming_webmention_queue (attempts); - -CREATE RULE notify_incoming_webmention AS -ON INSERT TO kittybox_webmention.incoming_webmention_queue -DO ALSO NOTIFY incoming_webmention; diff --git a/kittybox-rs/src/bin/kittybox-check-webmention.rs b/kittybox-rs/src/bin/kittybox-check-webmention.rs deleted file mode 100644 index f02032c..0000000 --- a/kittybox-rs/src/bin/kittybox-check-webmention.rs +++ /dev/null @@ -1,152 +0,0 @@ -use std::cell::{RefCell, Ref}; -use std::rc::Rc; - -use clap::Parser; -use microformats::types::PropertyValue; -use microformats::html5ever; -use microformats::html5ever::tendril::TendrilSink; - -#[derive(thiserror::Error, Debug)] -enum Error { - #[error("http request error: {0}")] - Http(#[from] reqwest::Error), - #[error("microformats error: {0}")] - Microformats(#[from] microformats::Error), - #[error("json error: {0}")] - Json(#[from] serde_json::Error), - #[error("url parse error: {0}")] - UrlParse(#[from] url::ParseError), -} - -use kittybox_util::MentionType; - -fn check_mention(document: impl AsRef, base_url: &url::Url, link: &url::Url) -> Result, Error> { - // First, check the document for MF2 markup - let document = microformats::from_html(document.as_ref(), base_url.clone())?; - - // Get an iterator of all items - let items_iter = document.items.iter() - .map(AsRef::as_ref) - .map(RefCell::borrow); - - for item in items_iter { - let props = item.properties.borrow(); - for (prop, interaction_type) in [ - ("in-reply-to", MentionType::Reply), ("like-of", MentionType::Like), - ("bookmark-of", MentionType::Bookmark), ("repost-of", MentionType::Repost) - ] { - if let Some(propvals) = props.get(prop) { - for val in propvals { - if let PropertyValue::Url(url) = val { - if url == link { - return Ok(Some(interaction_type)) - } - } - } - } - } - // Process `content` - if let Some(PropertyValue::Fragment(content)) = props.get("content") - .map(Vec::as_slice) - .unwrap_or_default() - .first() - { - let root = html5ever::parse_document(html5ever::rcdom::RcDom::default(), Default::default()) - .from_utf8() - .one(content.html.to_owned().as_bytes()) - .document; - - // This is a trick to unwrap recursion into a loop - // - // A list of unprocessed node is made. Then, in each - // iteration, the list is "taken" and replaced with an - // empty list, which is populated with nodes for the next - // iteration of the loop. - // - // Empty list means all nodes were processed. - let mut unprocessed_nodes: Vec> = root.children.borrow().iter().cloned().collect(); - while unprocessed_nodes.len() > 0 { - // "Take" the list out of its memory slot, replace it with an empty list - let nodes = std::mem::take(&mut unprocessed_nodes); - 'nodes_loop: for node in nodes.into_iter() { - // Add children nodes to the list for the next iteration - unprocessed_nodes.extend(node.children.borrow().iter().cloned()); - - if let html5ever::rcdom::NodeData::Element { ref name, ref attrs, .. } = node.data { - // If it's not ``, skip it - if name.local != *"a" { continue; } - let mut is_mention: bool = false; - for attr in attrs.borrow().iter() { - if attr.name.local == *"rel" { - // Don't count `rel="nofollow"` links β€” a web crawler should ignore them - // and so for purposes of driving visitors they are useless - if attr.value - .as_ref() - .split([',', ' ']) - .any(|v| v == "nofollow") - { - // Skip the entire node. - continue 'nodes_loop; - } - } - // if it's not ``, skip it - if attr.name.local != *"href" { continue; } - // Be forgiving in parsing URLs, and resolve them against the base URL - if let Ok(url) = base_url.join(attr.value.as_ref()) { - if &url == link { - is_mention = true; - } - } - } - if is_mention { - return Ok(Some(MentionType::Mention)); - } - } - } - } - - } - } - - Ok(None) -} - -#[derive(Parser, Debug)] -#[clap( - name = "kittybox-check-webmention", - author = "Vika ", - version = env!("CARGO_PKG_VERSION"), - about = "Verify an incoming webmention" -)] -struct Args { - #[clap(value_parser)] - url: url::Url, - #[clap(value_parser)] - link: url::Url -} - -#[tokio::main] -async fn main() -> Result<(), self::Error> { - let args = Args::parse(); - - let http: reqwest::Client = { - #[allow(unused_mut)] - let mut builder = reqwest::Client::builder() - .user_agent(concat!( - env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION") - )); - - builder.build().unwrap() - }; - - let response = http.get(args.url.clone()).send().await?; - let text = response.text().await?; - - if let Some(mention_type) = check_mention(text, &args.url, &args.link)? { - println!("{:?}", mention_type); - - Ok(()) - } else { - std::process::exit(1) - } -} diff --git a/kittybox-rs/src/bin/kittybox-indieauth-helper.rs b/kittybox-rs/src/bin/kittybox-indieauth-helper.rs deleted file mode 100644 index 3377ec3..0000000 --- a/kittybox-rs/src/bin/kittybox-indieauth-helper.rs +++ /dev/null @@ -1,233 +0,0 @@ -use kittybox_indieauth::{ - AuthorizationRequest, PKCEVerifier, - PKCEChallenge, PKCEMethod, GrantRequest, Scope, - AuthorizationResponse, TokenData, GrantResponse -}; -use clap::Parser; -use std::{borrow::Cow, io::Write}; - -const DEFAULT_CLIENT_ID: &str = "https://kittybox.fireburn.ru/indieauth-helper.html"; -const DEFAULT_REDIRECT_URI: &str = "http://localhost:60000/callback"; - -#[derive(Debug, thiserror::Error)] -enum Error { - #[error("i/o error: {0}")] - IO(#[from] std::io::Error), - #[error("http request error: {0}")] - HTTP(#[from] reqwest::Error), - #[error("urlencoded encoding error: {0}")] - UrlencodedEncoding(#[from] serde_urlencoded::ser::Error), - #[error("url parsing error: {0}")] - UrlParse(#[from] url::ParseError), - #[error("indieauth flow error: {0}")] - IndieAuth(Cow<'static, str>) -} - -#[derive(Parser, Debug)] -#[clap( - name = "kittybox-indieauth-helper", - author = "Vika ", - version = env!("CARGO_PKG_VERSION"), - about = "Retrieve an IndieAuth token for debugging", - long_about = None -)] -struct Args { - /// Profile URL to use for initiating IndieAuth metadata discovery. - #[clap(value_parser)] - me: url::Url, - /// Scopes to request for the token. - /// - /// All IndieAuth scopes are supported, including arbitrary custom scopes. - #[clap(short, long)] - scope: Vec, - /// Client ID to use when requesting a token. - #[clap(short, long, value_parser, default_value = DEFAULT_CLIENT_ID)] - client_id: url::Url, - /// Redirect URI to declare. Note: This will break the flow, use only for testing UI. - #[clap(long, value_parser)] - redirect_uri: Option -} - -fn append_query_string( - url: &url::Url, - query: T -) -> Result { - let mut new_url = url.clone(); - let mut query = serde_urlencoded::to_string(query)?; - if let Some(old_query) = url.query() { - query.push('&'); - query.push_str(old_query); - } - new_url.set_query(Some(&query)); - - Ok(new_url) -} - -#[tokio::main] -async fn main() -> Result<(), Error> { - let args = Args::parse(); - - let http: reqwest::Client = { - #[allow(unused_mut)] - let mut builder = reqwest::Client::builder() - .user_agent(concat!( - env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION") - )); - - builder.build().unwrap() - }; - - let redirect_uri: url::Url = args.redirect_uri - .clone() - .unwrap_or_else(|| DEFAULT_REDIRECT_URI.parse().unwrap()); - - eprintln!("Checking .well-known for metadata..."); - let metadata = http.get(args.me.join("/.well-known/oauth-authorization-server")?) - .header("Accept", "application/json") - .send() - .await? - .json::() - .await?; - - let verifier = PKCEVerifier::new(); - - let authorization_request = AuthorizationRequest { - response_type: kittybox_indieauth::ResponseType::Code, - client_id: args.client_id.clone(), - redirect_uri: redirect_uri.clone(), - state: kittybox_indieauth::State::new(), - code_challenge: PKCEChallenge::new(&verifier, PKCEMethod::default()), - scope: Some(kittybox_indieauth::Scopes::new(args.scope)), - me: Some(args.me) - }; - - let indieauth_url = append_query_string( - &metadata.authorization_endpoint, - authorization_request - )?; - - eprintln!("Please visit the following URL in your browser:\n\n {}\n", indieauth_url.as_str()); - - if args.redirect_uri.is_some() { - eprintln!("Custom redirect URI specified, won't be able to catch authorization response."); - std::process::exit(0); - } - - // Prepare a callback - let (tx, rx) = tokio::sync::oneshot::channel::(); - let server = { - use axum::{routing::get, extract::Query, response::IntoResponse}; - - let tx = std::sync::Arc::new(tokio::sync::Mutex::new(Some(tx))); - - let router = axum::Router::new() - .route("/callback", axum::routing::get( - move |query: Option>| async move { - if let Some(Query(response)) = query { - if let Some(tx) = tx.lock_owned().await.take() { - tx.send(response).unwrap(); - - (axum::http::StatusCode::OK, - [("Content-Type", "text/plain")], - "Thank you! This window can now be closed.") - .into_response() - } else { - (axum::http::StatusCode::BAD_REQUEST, - [("Content-Type", "text/plain")], - "Oops. The callback was already received. Did you click twice?") - .into_response() - } - } else { - axum::http::StatusCode::BAD_REQUEST.into_response() - } - } - )); - - use std::net::{SocketAddr, IpAddr, Ipv4Addr}; - - let server = hyper::server::Server::bind( - &SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST),60000) - ) - .serve(router.into_make_service()); - - tokio::task::spawn(server) - }; - - let authorization_response = rx.await.unwrap(); - - // Clean up after the server - tokio::task::spawn(async move { - // Wait for the server to settle -- it might need to send its response - tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; - // Abort the future -- this should kill the server - server.abort(); - }); - - eprintln!("Got authorization response: {:#?}", authorization_response); - eprint!("Checking issuer field..."); - std::io::stderr().lock().flush()?; - - if dbg!(authorization_response.iss.as_str()) == dbg!(metadata.issuer.as_str()) { - eprintln!(" Done"); - } else { - eprintln!(" Failed"); - #[cfg(not(debug_assertions))] - std::process::exit(1); - } - let grant_response: GrantResponse = http.post(metadata.token_endpoint) - .form(&GrantRequest::AuthorizationCode { - code: authorization_response.code, - client_id: args.client_id, - redirect_uri, - code_verifier: verifier - }) - .header("Accept", "application/json") - .send() - .await? - .json() - .await?; - - if let GrantResponse::AccessToken { - me, - profile, - access_token, - expires_in, - refresh_token, - token_type, - scope - } = grant_response { - eprintln!("Congratulations, {}, access token is ready! {}", - me.as_str(), - if let Some(exp) = expires_in { - format!("It expires in {exp} seconds.") - } else { - format!("It seems to have unlimited duration.") - } - ); - println!("{}", access_token); - if let Some(refresh_token) = refresh_token { - eprintln!("Save this refresh token, it will come in handy:"); - println!("{}", refresh_token); - }; - - if let Some(profile) = profile { - eprintln!("\nThe token endpoint returned some profile information:"); - if let Some(name) = profile.name { - eprintln!(" - Name: {name}") - } - if let Some(url) = profile.url { - eprintln!(" - URL: {url}") - } - if let Some(photo) = profile.photo { - eprintln!(" - Photo: {photo}") - } - if let Some(email) = profile.email { - eprintln!(" - Email: {email}") - } - } - - Ok(()) - } else { - return Err(Error::IndieAuth(Cow::Borrowed("IndieAuth token endpoint did not return an access token grant."))); - } -} diff --git a/kittybox-rs/src/bin/kittybox-mf2.rs b/kittybox-rs/src/bin/kittybox-mf2.rs deleted file mode 100644 index 4366cb8..0000000 --- a/kittybox-rs/src/bin/kittybox-mf2.rs +++ /dev/null @@ -1,49 +0,0 @@ -use clap::Parser; - -#[derive(Parser, Debug)] -#[clap( - name = "kittybox-mf2", - author = "Vika ", - version = env!("CARGO_PKG_VERSION"), - about = "Fetch HTML and turn it into MF2-JSON" -)] -struct Args { - #[clap(value_parser)] - url: url::Url, -} - -#[derive(thiserror::Error, Debug)] -enum Error { - #[error("http request error: {0}")] - Http(#[from] reqwest::Error), - #[error("microformats error: {0}")] - Microformats(#[from] microformats::Error), - #[error("json error: {0}")] - Json(#[from] serde_json::Error), - #[error("url parse error: {0}")] - UrlParse(#[from] url::ParseError), -} - -#[tokio::main] -async fn main() -> Result<(), Error> { - let args = Args::parse(); - - let http: reqwest::Client = { - #[allow(unused_mut)] - let mut builder = reqwest::Client::builder() - .user_agent(concat!( - env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION") - )); - - builder.build().unwrap() - }; - - let response = http.get(args.url.clone()).send().await?; - let text = response.text().await?; - - let mf2 = microformats::from_html(text.as_ref(), args.url)?; - - println!("{}", serde_json::to_string_pretty(&mf2)?); - - Ok(()) -} diff --git a/kittybox-rs/src/bin/kittybox_bulk_import.rs b/kittybox-rs/src/bin/kittybox_bulk_import.rs deleted file mode 100644 index 7e1f6af..0000000 --- a/kittybox-rs/src/bin/kittybox_bulk_import.rs +++ /dev/null @@ -1,66 +0,0 @@ -use anyhow::{anyhow, bail, Context, Result}; -use std::fs::File; -use std::io; - -#[async_std::main] -async fn main() -> Result<()> { - let args = std::env::args().collect::>(); - if args.iter().skip(1).any(|s| s == "--help") { - println!("Usage: {} [file]", args[0]); - println!("\nIf launched with no arguments, reads from stdin."); - println!( - "\nUse KITTYBOX_AUTH_TOKEN environment variable to authorize to the Micropub endpoint." - ); - std::process::exit(0); - } - - let token = std::env::var("KITTYBOX_AUTH_TOKEN") - .map_err(|_| anyhow!("No auth token found! Use KITTYBOX_AUTH_TOKEN env variable."))?; - let data: Vec = (if args.len() == 2 || (args.len() == 3 && args[2] == "-") { - serde_json::from_reader(io::stdin()) - } else if args.len() == 3 { - serde_json::from_reader(File::open(&args[2]).with_context(|| "Error opening input file")?) - } else { - bail!("See `{} --help` for usage.", args[0]); - }) - .with_context(|| "Error while loading the input file")?; - - let url = surf::Url::parse(&args[1])?; - let client = surf::Client::new(); - - let iter = data.into_iter(); - - for post in iter { - println!( - "Processing {}...", - post["properties"]["url"][0] - .as_str() - .or_else(|| post["properties"]["published"][0] - .as_str() - .or_else(|| post["properties"]["name"][0] - .as_str() - .or(Some("")))) - .unwrap() - ); - match client - .post(&url) - .body(surf::http::Body::from_string(serde_json::to_string(&post)?)) - .header("Content-Type", "application/json") - .header("Authorization", format!("Bearer {}", &token)) - .send() - .await - { - Ok(mut response) => { - if response.status() == 201 || response.status() == 202 { - println!("Posted at {}", response.header("location").unwrap().last()); - } else { - println!("Error: {:?}", response.body_string().await); - } - } - Err(err) => { - println!("{}", err); - } - } - } - Ok(()) -} diff --git a/kittybox-rs/src/bin/kittybox_database_converter.rs b/kittybox-rs/src/bin/kittybox_database_converter.rs deleted file mode 100644 index bc355c9..0000000 --- a/kittybox-rs/src/bin/kittybox_database_converter.rs +++ /dev/null @@ -1,106 +0,0 @@ -use anyhow::{anyhow, Context}; -use kittybox::database::FileStorage; -use kittybox::database::Storage; -use redis::{self, AsyncCommands}; -use std::collections::HashMap; - -/// Convert from a Redis storage to a new storage new_storage. -async fn convert_from_redis(from: String, new_storage: S) -> anyhow::Result<()> { - let db = redis::Client::open(from).context("Failed to open the Redis connection")?; - - let mut conn = db - .get_async_std_connection() - .await - .context("Failed to connect to Redis")?; - - // Rebinding to convince the borrow checker we're not smuggling stuff outta scope - let storage = &new_storage; - - let mut stream = conn.hscan::<_, String>("posts").await?; - - while let Some(key) = stream.next_item().await { - let value = serde_json::from_str::( - &stream - .next_item() - .await - .ok_or(anyhow!("Failed to find a corresponding value for the key"))?, - )?; - - println!("{}, {:?}", key, value); - - if value["see_other"].is_string() { - continue; - } - - let user = &(url::Url::parse(value["properties"]["uid"][0].as_str().unwrap()) - .unwrap() - .origin() - .ascii_serialization() - .clone() - + "/"); - if let Err(err) = storage.clone().put_post(&value, user).await { - eprintln!("Error saving post: {}", err); - } - } - - let mut stream: redis::AsyncIter = conn.scan_match("settings_*").await?; - while let Some(key) = stream.next_item().await { - let mut conn = db - .get_async_std_connection() - .await - .context("Failed to connect to Redis")?; - let user = key.strip_prefix("settings_").unwrap(); - match conn - .hgetall::<&str, HashMap>(&key) - .await - .context(format!("Failed getting settings from key {}", key)) - { - Ok(settings) => { - for (k, v) in settings.iter() { - if let Err(e) = storage - .set_setting(k, user, v) - .await - .with_context(|| format!("Failed setting {} for {}", k, user)) - { - eprintln!("{}", e); - } - } - } - Err(e) => { - eprintln!("{}", e); - } - } - } - - Ok(()) -} - -#[async_std::main] -async fn main() -> anyhow::Result<()> { - let mut args = std::env::args(); - args.next(); // skip argv[0] - let old_uri = args - .next() - .ok_or_else(|| anyhow!("No import source is provided."))?; - let new_uri = args - .next() - .ok_or_else(|| anyhow!("No import destination is provided."))?; - - let storage = if new_uri.starts_with("file:") { - let folder = new_uri.strip_prefix("file://").unwrap(); - let path = std::path::PathBuf::from(folder); - Box::new( - FileStorage::new(path) - .await - .context("Failed to construct the file storage")?, - ) - } else { - anyhow::bail!("Cannot construct the storage abstraction for destination storage. Check the storage type?"); - }; - - if old_uri.starts_with("redis") { - convert_from_redis(old_uri, *storage).await? - } - - Ok(()) -} diff --git a/kittybox-rs/src/database/file/mod.rs b/kittybox-rs/src/database/file/mod.rs deleted file mode 100644 index 27d3da1..0000000 --- a/kittybox-rs/src/database/file/mod.rs +++ /dev/null @@ -1,733 +0,0 @@ -//#![warn(clippy::unwrap_used)] -use crate::database::{ErrorKind, Result, settings, Storage, StorageError}; -use crate::micropub::{MicropubUpdate, MicropubPropertyDeletion}; -use async_trait::async_trait; -use futures::{stream, StreamExt, TryStreamExt}; -use kittybox_util::MentionType; -use serde_json::json; -use std::borrow::Cow; -use std::collections::HashMap; -use std::io::ErrorKind as IOErrorKind; -use std::path::{Path, PathBuf}; -use tokio::fs::{File, OpenOptions}; -use tokio::io::{AsyncReadExt, AsyncWriteExt}; -use tokio::task::spawn_blocking; -use tracing::{debug, error}; - -impl From for StorageError { - fn from(source: std::io::Error) -> Self { - Self::with_source( - match source.kind() { - IOErrorKind::NotFound => ErrorKind::NotFound, - IOErrorKind::AlreadyExists => ErrorKind::Conflict, - _ => ErrorKind::Backend, - }, - Cow::Owned(format!("file I/O error: {}", &source)), - Box::new(source), - ) - } -} - -impl From for StorageError { - fn from(source: tokio::time::error::Elapsed) -> Self { - Self::with_source( - ErrorKind::Backend, - Cow::Borrowed("timeout on I/O operation"), - Box::new(source), - ) - } -} - -// Copied from https://stackoverflow.com/questions/39340924 -// This routine is adapted from the *old* Path's `path_relative_from` -// function, which works differently from the new `relative_from` function. -// In particular, this handles the case on unix where both paths are -// absolute but with only the root as the common directory. -fn path_relative_from(path: &Path, base: &Path) -> Option { - use std::path::Component; - - if path.is_absolute() != base.is_absolute() { - if path.is_absolute() { - Some(PathBuf::from(path)) - } else { - None - } - } else { - let mut ita = path.components(); - let mut itb = base.components(); - let mut comps: Vec = vec![]; - loop { - match (ita.next(), itb.next()) { - (None, None) => break, - (Some(a), None) => { - comps.push(a); - comps.extend(ita.by_ref()); - break; - } - (None, _) => comps.push(Component::ParentDir), - (Some(a), Some(b)) if comps.is_empty() && a == b => (), - (Some(a), Some(b)) if b == Component::CurDir => comps.push(a), - (Some(_), Some(b)) if b == Component::ParentDir => return None, - (Some(a), Some(_)) => { - comps.push(Component::ParentDir); - for _ in itb { - comps.push(Component::ParentDir); - } - comps.push(a); - comps.extend(ita.by_ref()); - break; - } - } - } - Some(comps.iter().map(|c| c.as_os_str()).collect()) - } -} - -#[allow(clippy::unwrap_used, clippy::expect_used)] -#[cfg(test)] -mod tests { - #[test] - fn test_relative_path_resolving() { - let path1 = std::path::Path::new("/home/vika/Projects/kittybox"); - let path2 = std::path::Path::new("/home/vika/Projects/nixpkgs"); - let relative_path = super::path_relative_from(path2, path1).unwrap(); - - assert_eq!(relative_path, std::path::Path::new("../nixpkgs")) - } -} - -// TODO: Check that the path ACTUALLY IS INSIDE THE ROOT FOLDER -// This could be checked by completely resolving the path -// and checking if it has a common prefix -fn url_to_path(root: &Path, url: &str) -> PathBuf { - let path = url_to_relative_path(url).to_logical_path(root); - if !path.starts_with(root) { - // TODO: handle more gracefully - panic!("Security error: {:?} is not a prefix of {:?}", path, root) - } else { - path - } -} - -fn url_to_relative_path(url: &str) -> relative_path::RelativePathBuf { - let url = url::Url::try_from(url).expect("Couldn't parse a URL"); - let mut path = relative_path::RelativePathBuf::new(); - let user_domain = format!( - "{}{}", - url.host_str().unwrap(), - url.port() - .map(|port| format!(":{}", port)) - .unwrap_or_default() - ); - path.push(user_domain + url.path() + ".json"); - - path -} - -fn modify_post(post: &serde_json::Value, update: MicropubUpdate) -> Result { - let mut post = post.clone(); - - let mut add_keys: HashMap> = HashMap::new(); - let mut remove_keys: Vec = vec![]; - let mut remove_values: HashMap> = HashMap::new(); - - if let Some(MicropubPropertyDeletion::Properties(delete)) = update.delete { - remove_keys.extend(delete.iter().cloned()); - } else if let Some(MicropubPropertyDeletion::Values(delete)) = update.delete { - for (k, v) in delete { - remove_values - .entry(k.to_string()) - .or_default() - .extend(v.clone()); - } - } - if let Some(add) = update.add { - for (k, v) in add { - add_keys.insert(k.to_string(), v.clone()); - } - } - if let Some(replace) = update.replace { - for (k, v) in replace { - remove_keys.push(k.to_string()); - add_keys.insert(k.to_string(), v.clone()); - } - } - - if let Some(props) = post["properties"].as_object_mut() { - for k in remove_keys { - props.remove(&k); - } - } - for (k, v) in remove_values { - let k = &k; - let props = if k == "children" { - &mut post - } else { - &mut post["properties"] - }; - v.iter().for_each(|v| { - if let Some(vec) = props[k].as_array_mut() { - if let Some(index) = vec.iter().position(|w| w == v) { - vec.remove(index); - } - } - }); - } - for (k, v) in add_keys { - tracing::debug!("Adding k/v to post: {} => {:?}", k, v); - let props = if k == "children" { - &mut post - } else { - &mut post["properties"] - }; - if let Some(prop) = props[&k].as_array_mut() { - if k == "children" { - v.into_iter().rev().for_each(|v| prop.insert(0, v)); - } else { - prop.extend(v.into_iter()); - } - } else { - props[&k] = serde_json::Value::Array(v) - } - } - Ok(post) -} - -#[derive(Clone, Debug)] -/// A backend using a folder with JSON files as a backing store. -/// Uses symbolic links to represent a many-to-one mapping of URLs to a post. -pub struct FileStorage { - root_dir: PathBuf, -} - -impl FileStorage { - /// Create a new storage wrapping a folder specified by root_dir. - pub async fn new(root_dir: PathBuf) -> Result { - // TODO check if the dir is writable - Ok(Self { root_dir }) - } -} - -async fn hydrate_author( - feed: &mut serde_json::Value, - user: &'_ Option, - storage: &S, -) { - let url = feed["properties"]["uid"][0] - .as_str() - .expect("MF2 value should have a UID set! Check if you used normalize_mf2 before recording the post!"); - if let Some(author) = feed["properties"]["author"].as_array().cloned() { - if !feed["type"] - .as_array() - .expect("MF2 value should have a type set!") - .iter() - .any(|i| i == "h-card") - { - let author_list: Vec = stream::iter(author.iter()) - .then(|i| async move { - if let Some(i) = i.as_str() { - match storage.get_post(i).await { - Ok(post) => match post { - Some(post) => post, - None => json!(i), - }, - Err(e) => { - error!("Error while hydrating post {}: {}", url, e); - json!(i) - } - } - } else { - i.clone() - } - }) - .collect::>() - .await; - if let Some(props) = feed["properties"].as_object_mut() { - props["author"] = json!(author_list); - } else { - feed["properties"] = json!({ "author": author_list }); - } - } - } -} - -#[async_trait] -impl Storage for FileStorage { - #[tracing::instrument(skip(self))] - async fn post_exists(&self, url: &str) -> Result { - let path = url_to_path(&self.root_dir, url); - debug!("Checking if {:?} exists...", path); - /*let result = match tokio::fs::metadata(path).await { - Ok(metadata) => { - Ok(true) - }, - Err(err) => { - if err.kind() == IOErrorKind::NotFound { - Ok(false) - } else { - Err(err.into()) - } - } - };*/ - #[allow(clippy::unwrap_used)] // JoinHandle captures panics, this closure shouldn't panic - Ok(spawn_blocking(move || path.is_file()).await.unwrap()) - } - - #[tracing::instrument(skip(self))] - async fn get_post(&self, url: &str) -> Result> { - let path = url_to_path(&self.root_dir, url); - // TODO: check that the path actually belongs to the dir of user who requested it - // it's not like you CAN access someone else's private posts with it - // so it's not exactly a security issue, but it's still not good - debug!("Opening {:?}", path); - - match File::open(&path).await { - Ok(mut file) => { - let mut content = String::new(); - // Typechecks because OS magic acts on references - // to FDs as if they were behind a mutex - AsyncReadExt::read_to_string(&mut file, &mut content).await?; - debug!( - "Read {} bytes successfully from {:?}", - content.as_bytes().len(), - &path - ); - Ok(Some(serde_json::from_str(&content)?)) - } - Err(err) => { - if err.kind() == IOErrorKind::NotFound { - Ok(None) - } else { - Err(err.into()) - } - } - } - } - - #[tracing::instrument(skip(self))] - async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()> { - let key = post["properties"]["uid"][0] - .as_str() - .expect("Tried to save a post without UID"); - let path = url_to_path(&self.root_dir, key); - let tempfile = (&path).with_extension("tmp"); - debug!("Creating {:?}", path); - - let parent = path - .parent() - .expect("Parent for this directory should always exist") - .to_owned(); - tokio::fs::create_dir_all(&parent).await?; - - let mut file = tokio::fs::OpenOptions::new() - .write(true) - .create_new(true) - .open(&tempfile) - .await?; - - file.write_all(post.to_string().as_bytes()).await?; - file.flush().await?; - file.sync_all().await?; - drop(file); - tokio::fs::rename(&tempfile, &path).await?; - tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; - - if let Some(urls) = post["properties"]["url"].as_array() { - for url in urls.iter().map(|i| i.as_str().unwrap()) { - let url_domain = { - let url = url::Url::parse(url).unwrap(); - format!( - "{}{}", - url.host_str().unwrap(), - url.port() - .map(|port| format!(":{}", port)) - .unwrap_or_default() - ) - }; - if url != key && url_domain == user { - let link = url_to_path(&self.root_dir, url); - debug!("Creating a symlink at {:?}", link); - let orig = path.clone(); - // We're supposed to have a parent here. - let basedir = link.parent().ok_or_else(|| { - StorageError::from_static( - ErrorKind::Backend, - "Failed to calculate parent directory when creating a symlink", - ) - })?; - let relative = path_relative_from(&orig, basedir).unwrap(); - println!("{:?} - {:?} = {:?}", &orig, &basedir, &relative); - tokio::fs::symlink(relative, link).await?; - } - } - } - - if post["type"] - .as_array() - .unwrap() - .iter() - .any(|s| s.as_str() == Some("h-feed")) - { - tracing::debug!("Adding to channel list..."); - // Add the h-feed to the channel list - let path = { - let mut path = relative_path::RelativePathBuf::new(); - path.push(user); - path.push("channels"); - - path.to_path(&self.root_dir) - }; - tokio::fs::create_dir_all(path.parent().unwrap()).await?; - tracing::debug!("Channels file path: {}", path.display()); - let tempfilename = path.with_extension("tmp"); - let channel_name = post["properties"]["name"][0] - .as_str() - .map(|s| s.to_string()) - .unwrap_or_else(String::default); - let key = key.to_string(); - tracing::debug!("Opening temporary file to modify chnanels..."); - let mut tempfile = OpenOptions::new() - .write(true) - .create_new(true) - .open(&tempfilename) - .await?; - tracing::debug!("Opening real channel file..."); - let mut channels: Vec = { - match OpenOptions::new() - .read(true) - .write(false) - .truncate(false) - .create(false) - .open(&path) - .await - { - Err(err) if err.kind() == std::io::ErrorKind::NotFound => { - Vec::default() - } - Err(err) => { - // Propagate the error upwards - return Err(err.into()); - } - Ok(mut file) => { - let mut content = String::new(); - file.read_to_string(&mut content).await?; - drop(file); - - if !content.is_empty() { - serde_json::from_str(&content)? - } else { - Vec::default() - } - } - } - }; - - channels.push(super::MicropubChannel { - uid: key.to_string(), - name: channel_name, - }); - - tempfile - .write_all(serde_json::to_string(&channels)?.as_bytes()) - .await?; - tempfile.flush().await?; - tempfile.sync_all().await?; - drop(tempfile); - tokio::fs::rename(tempfilename, &path).await?; - tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; - } - Ok(()) - } - - #[tracing::instrument(skip(self))] - async fn update_post(&self, url: &str, update: MicropubUpdate) -> Result<()> { - let path = url_to_path(&self.root_dir, url); - let tempfilename = path.with_extension("tmp"); - #[allow(unused_variables)] - let (old_json, new_json) = { - let mut temp = OpenOptions::new() - .write(true) - .create_new(true) - .open(&tempfilename) - .await?; - let mut file = OpenOptions::new().read(true).open(&path).await?; - - let mut content = String::new(); - file.read_to_string(&mut content).await?; - let json: serde_json::Value = serde_json::from_str(&content)?; - drop(file); - // Apply the editing algorithms - let new_json = modify_post(&json, update)?; - - temp.write_all(new_json.to_string().as_bytes()).await?; - temp.flush().await?; - temp.sync_all().await?; - drop(temp); - tokio::fs::rename(tempfilename, &path).await?; - tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; - - (json, new_json) - }; - // TODO check if URLs changed between old and new JSON - Ok(()) - } - - #[tracing::instrument(skip(self))] - async fn get_channels(&self, user: &'_ str) -> Result> { - let mut path = relative_path::RelativePathBuf::new(); - path.push(user); - path.push("channels"); - - let path = path.to_path(&self.root_dir); - tracing::debug!("Channels file path: {}", path.display()); - - match File::open(&path).await { - Ok(mut f) => { - let mut content = String::new(); - f.read_to_string(&mut content).await?; - // This should not happen, but if it does, handle it gracefully - if content.is_empty() { - return Ok(vec![]); - } - let channels: Vec = serde_json::from_str(&content)?; - Ok(channels) - } - Err(e) => { - if e.kind() == IOErrorKind::NotFound { - Ok(vec![]) - } else { - Err(e.into()) - } - } - } - } - - async fn read_feed_with_cursor( - &self, - url: &'_ str, - cursor: Option<&'_ str>, - limit: usize, - user: Option<&'_ str> - ) -> Result)>> { - Ok(self.read_feed_with_limit( - url, - &cursor.map(|v| v.to_owned()), - limit, - &user.map(|v| v.to_owned()) - ).await? - .map(|feed| { - tracing::debug!("Feed: {:#}", serde_json::Value::Array( - feed["children"] - .as_array() - .map(|v| v.as_slice()) - .unwrap_or_default() - .iter() - .map(|mf2| mf2["properties"]["uid"][0].clone()) - .collect::>() - )); - let cursor: Option = feed["children"] - .as_array() - .map(|v| v.as_slice()) - .unwrap_or_default() - .last() - .map(|v| v["properties"]["uid"][0].as_str().unwrap().to_owned()); - tracing::debug!("Extracted the cursor: {:?}", cursor); - (feed, cursor) - }) - ) - } - - #[tracing::instrument(skip(self))] - async fn read_feed_with_limit( - &self, - url: &'_ str, - after: &'_ Option, - limit: usize, - user: &'_ Option, - ) -> Result> { - if let Some(mut feed) = self.get_post(url).await? { - if feed["children"].is_array() { - // Take this out of the MF2-JSON document to save memory - // - // This uses a clever match with enum destructuring - // to extract the underlying Vec without cloning it - let children: Vec = match feed["children"].take() { - serde_json::Value::Array(children) => children, - // We've already checked it's an array - _ => unreachable!() - }; - tracing::debug!("Full children array: {:#}", serde_json::Value::Array(children.clone())); - let mut posts_iter = children - .into_iter() - .map(|s: serde_json::Value| s.as_str().unwrap().to_string()); - // Note: we can't actually use `skip_while` here because we end up emitting `after`. - // This imperative snippet consumes after instead of emitting it, allowing the - // stream of posts to return only those items that truly come *after* that one. - // If I would implement an Iter combinator like this, I would call it `skip_until` - if let Some(after) = after { - for s in posts_iter.by_ref() { - if &s == after { - break; - } - } - }; - let posts = stream::iter(posts_iter) - .map(|url: String| async move { self.get_post(&url).await }) - .buffered(std::cmp::min(3, limit)) - // Hack to unwrap the Option and sieve out broken links - // Broken links return None, and Stream::filter_map skips Nones. - .try_filter_map(|post: Option| async move { Ok(post) }) - .and_then(|mut post| async move { - hydrate_author(&mut post, user, self).await; - Ok(post) - }) - .take(limit); - - match posts.try_collect::>().await { - Ok(posts) => feed["children"] = serde_json::json!(posts), - Err(err) => { - return Err(StorageError::with_source( - ErrorKind::Other, - Cow::Owned(format!("Feed assembly error: {}", &err)), - Box::new(err), - )); - } - } - } - hydrate_author(&mut feed, user, self).await; - Ok(Some(feed)) - } else { - Ok(None) - } - } - - #[tracing::instrument(skip(self))] - async fn delete_post(&self, url: &'_ str) -> Result<()> { - let path = url_to_path(&self.root_dir, url); - if let Err(e) = tokio::fs::remove_file(path).await { - Err(e.into()) - } else { - // TODO check for dangling references in the channel list - Ok(()) - } - } - - #[tracing::instrument(skip(self))] - async fn get_setting, 'a>(&self, user: &'_ str) -> Result { - debug!("User for getting settings: {}", user); - let mut path = relative_path::RelativePathBuf::new(); - path.push(user); - path.push("settings"); - - let path = path.to_path(&self.root_dir); - debug!("Getting settings from {:?}", &path); - - let mut file = File::open(path).await?; - let mut content = String::new(); - file.read_to_string(&mut content).await?; - - let settings: HashMap<&str, serde_json::Value> = serde_json::from_str(&content)?; - match settings.get(S::ID) { - Some(value) => Ok(serde_json::from_value::(value.clone())?), - None => Err(StorageError::from_static(ErrorKind::Backend, "Setting not set")) - } - } - - #[tracing::instrument(skip(self))] - async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()> { - let mut path = relative_path::RelativePathBuf::new(); - path.push(user); - path.push("settings"); - - let path = path.to_path(&self.root_dir); - let temppath = path.with_extension("tmp"); - - let parent = path.parent().unwrap().to_owned(); - tokio::fs::create_dir_all(&parent).await?; - - let mut tempfile = OpenOptions::new() - .write(true) - .create_new(true) - .open(&temppath) - .await?; - - let mut settings: HashMap = match File::open(&path).await { - Ok(mut f) => { - let mut content = String::new(); - f.read_to_string(&mut content).await?; - if content.is_empty() { - Default::default() - } else { - serde_json::from_str(&content)? - } - } - Err(err) => { - if err.kind() == IOErrorKind::NotFound { - Default::default() - } else { - return Err(err.into()); - } - } - }; - settings.insert(S::ID.to_owned(), serde_json::to_value(S::new(value))?); - - tempfile - .write_all(serde_json::to_string(&settings)?.as_bytes()) - .await?; - tempfile.flush().await?; - tempfile.sync_all().await?; - drop(tempfile); - tokio::fs::rename(temppath, &path).await?; - tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; - Ok(()) - } - - #[tracing::instrument(skip(self))] - async fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> Result<()> { - let path = url_to_path(&self.root_dir, target); - let tempfilename = path.with_extension("tmp"); - - let mut temp = OpenOptions::new() - .write(true) - .create_new(true) - .open(&tempfilename) - .await?; - let mut file = OpenOptions::new().read(true).open(&path).await?; - - let mut post: serde_json::Value = { - let mut content = String::new(); - file.read_to_string(&mut content).await?; - drop(file); - - serde_json::from_str(&content)? - }; - - let key: &'static str = match mention_type { - MentionType::Reply => "comment", - MentionType::Like => "like", - MentionType::Repost => "repost", - MentionType::Bookmark => "bookmark", - MentionType::Mention => "mention", - }; - let mention_uid = mention["properties"]["uid"][0].clone(); - if let Some(values) = post["properties"][key].as_array_mut() { - for value in values.iter_mut() { - if value["properties"]["uid"][0] == mention_uid { - *value = mention; - break; - } - } - } else { - post["properties"][key] = serde_json::Value::Array(vec![mention]); - } - - temp.write_all(post.to_string().as_bytes()).await?; - temp.flush().await?; - temp.sync_all().await?; - drop(temp); - tokio::fs::rename(tempfilename, &path).await?; - tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; - - Ok(()) - } -} diff --git a/kittybox-rs/src/database/memory.rs b/kittybox-rs/src/database/memory.rs deleted file mode 100644 index 6339e7a..0000000 --- a/kittybox-rs/src/database/memory.rs +++ /dev/null @@ -1,249 +0,0 @@ -#![allow(clippy::todo)] -use async_trait::async_trait; -use futures_util::FutureExt; -use serde_json::json; -use std::collections::HashMap; -use std::sync::Arc; -use tokio::sync::RwLock; - -use crate::database::{ErrorKind, MicropubChannel, Result, settings, Storage, StorageError}; - -#[derive(Clone, Debug)] -pub struct MemoryStorage { - pub mapping: Arc>>, - pub channels: Arc>>>, -} - -#[async_trait] -impl Storage for MemoryStorage { - async fn post_exists(&self, url: &str) -> Result { - return Ok(self.mapping.read().await.contains_key(url)); - } - - async fn get_post(&self, url: &str) -> Result> { - let mapping = self.mapping.read().await; - match mapping.get(url) { - Some(val) => { - if let Some(new_url) = val["see_other"].as_str() { - match mapping.get(new_url) { - Some(val) => Ok(Some(val.clone())), - None => { - drop(mapping); - self.mapping.write().await.remove(url); - Ok(None) - } - } - } else { - Ok(Some(val.clone())) - } - } - _ => Ok(None), - } - } - - async fn put_post(&self, post: &'_ serde_json::Value, _user: &'_ str) -> Result<()> { - let mapping = &mut self.mapping.write().await; - let key: &str = match post["properties"]["uid"][0].as_str() { - Some(uid) => uid, - None => { - return Err(StorageError::from_static( - ErrorKind::Other, - "post doesn't have a UID", - )) - } - }; - mapping.insert(key.to_string(), post.clone()); - if post["properties"]["url"].is_array() { - for url in post["properties"]["url"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap().to_string()) - { - if url != key { - mapping.insert(url, json!({ "see_other": key })); - } - } - } - if post["type"] - .as_array() - .unwrap() - .iter() - .any(|i| i == "h-feed") - { - // This is a feed. Add it to the channels array if it's not already there. - println!("{:#}", post); - self.channels - .write() - .await - .entry( - post["properties"]["author"][0] - .as_str() - .unwrap() - .to_string(), - ) - .or_insert_with(Vec::new) - .push(key.to_string()) - } - Ok(()) - } - - async fn update_post(&self, url: &'_ str, update: crate::micropub::MicropubUpdate) -> Result<()> { - let mut guard = self.mapping.write().await; - let mut post = guard.get_mut(url).ok_or(StorageError::from_static(ErrorKind::NotFound, "The specified post wasn't found in the database."))?; - - use crate::micropub::MicropubPropertyDeletion; - - let mut add_keys: HashMap> = HashMap::new(); - let mut remove_keys: Vec = vec![]; - let mut remove_values: HashMap> = HashMap::new(); - - if let Some(MicropubPropertyDeletion::Properties(delete)) = update.delete { - remove_keys.extend(delete.iter().cloned()); - } else if let Some(MicropubPropertyDeletion::Values(delete)) = update.delete { - for (k, v) in delete { - remove_values - .entry(k.to_string()) - .or_default() - .extend(v.clone()); - } - } - if let Some(add) = update.add { - for (k, v) in add { - add_keys.insert(k.to_string(), v.clone()); - } - } - if let Some(replace) = update.replace { - for (k, v) in replace { - remove_keys.push(k.to_string()); - add_keys.insert(k.to_string(), v.clone()); - } - } - - if let Some(props) = post["properties"].as_object_mut() { - for k in remove_keys { - props.remove(&k); - } - } - for (k, v) in remove_values { - let k = &k; - let props = if k == "children" { - &mut post - } else { - &mut post["properties"] - }; - v.iter().for_each(|v| { - if let Some(vec) = props[k].as_array_mut() { - if let Some(index) = vec.iter().position(|w| w == v) { - vec.remove(index); - } - } - }); - } - for (k, v) in add_keys { - tracing::debug!("Adding k/v to post: {} => {:?}", k, v); - let props = if k == "children" { - &mut post - } else { - &mut post["properties"] - }; - if let Some(prop) = props[&k].as_array_mut() { - if k == "children" { - v.into_iter().rev().for_each(|v| prop.insert(0, v)); - } else { - prop.extend(v.into_iter()); - } - } else { - props[&k] = serde_json::Value::Array(v) - } - } - - Ok(()) - } - - async fn get_channels(&self, user: &'_ str) -> Result> { - match self.channels.read().await.get(user) { - Some(channels) => Ok(futures_util::future::join_all( - channels - .iter() - .map(|channel| { - self.get_post(channel).map(|result| result.unwrap()).map( - |post: Option| { - post.map(|post| MicropubChannel { - uid: post["properties"]["uid"][0].as_str().unwrap().to_string(), - name: post["properties"]["name"][0] - .as_str() - .unwrap() - .to_string(), - }) - }, - ) - }) - .collect::>(), - ) - .await - .into_iter() - .flatten() - .collect::>()), - None => Ok(vec![]), - } - } - - #[allow(unused_variables)] - async fn read_feed_with_limit( - &self, - url: &'_ str, - after: &'_ Option, - limit: usize, - user: &'_ Option, - ) -> Result> { - todo!() - } - - #[allow(unused_variables)] - async fn read_feed_with_cursor( - &self, - url: &'_ str, - cursor: Option<&'_ str>, - limit: usize, - user: Option<&'_ str> - ) -> Result)>> { - todo!() - } - - async fn delete_post(&self, url: &'_ str) -> Result<()> { - self.mapping.write().await.remove(url); - Ok(()) - } - - #[allow(unused_variables)] - async fn get_setting, 'a>(&'_ self, user: &'_ str) -> Result { - todo!() - } - - #[allow(unused_variables)] - async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()> { - todo!() - } - - #[allow(unused_variables)] - async fn add_or_update_webmention(&self, target: &str, mention_type: kittybox_util::MentionType, mention: serde_json::Value) -> Result<()> { - todo!() - } - -} - -impl Default for MemoryStorage { - fn default() -> Self { - Self::new() - } -} - -impl MemoryStorage { - pub fn new() -> Self { - Self { - mapping: Arc::new(RwLock::new(HashMap::new())), - channels: Arc::new(RwLock::new(HashMap::new())), - } - } -} diff --git a/kittybox-rs/src/database/mod.rs b/kittybox-rs/src/database/mod.rs deleted file mode 100644 index b4b70b2..0000000 --- a/kittybox-rs/src/database/mod.rs +++ /dev/null @@ -1,793 +0,0 @@ -#![warn(missing_docs)] -use std::borrow::Cow; - -use async_trait::async_trait; -use kittybox_util::MentionType; - -mod file; -pub use crate::database::file::FileStorage; -use crate::micropub::MicropubUpdate; -#[cfg(feature = "postgres")] -mod postgres; -#[cfg(feature = "postgres")] -pub use postgres::PostgresStorage; - -#[cfg(test)] -mod memory; -#[cfg(test)] -pub use crate::database::memory::MemoryStorage; - -pub use kittybox_util::MicropubChannel; - -use self::settings::Setting; - -/// Enum representing different errors that might occur during the database query. -#[derive(Debug, Clone, Copy)] -pub enum ErrorKind { - /// Backend error (e.g. database connection error) - Backend, - /// Error due to insufficient contextual permissions for the query - PermissionDenied, - /// Error due to the database being unable to parse JSON returned from the backing storage. - /// Usually indicative of someone fiddling with the database manually instead of using proper tools. - JsonParsing, - /// - ErrorKind::NotFound - equivalent to a 404 error. Note, some requests return an Option, - /// in which case None is also equivalent to a 404. - NotFound, - /// The user's query or request to the database was malformed. Used whenever the database processes - /// the user's query directly, such as when editing posts inside of the database (e.g. Redis backend) - BadRequest, - /// the user's query collided with an in-flight request and needs to be retried - Conflict, - /// - ErrorKind::Other - when something so weird happens that it becomes undescribable. - Other, -} - -/// Settings that can be stored in the database. -pub mod settings { - mod private { - pub trait Sealed {} - } - - /// A trait for various settings that should be contained here. - /// - /// **Note**: this trait is sealed to prevent external - /// implementations, as it wouldn't make sense to add new settings - /// that aren't used by Kittybox itself. - pub trait Setting<'de>: private::Sealed + std::fmt::Debug + Default + Clone + serde::Serialize + serde::de::DeserializeOwned + /*From +*/ Send + Sync { - type Data: std::fmt::Debug + Send + Sync; - const ID: &'static str; - - /// Unwrap the setting type, returning owned data contained within. - fn into_inner(self) -> Self::Data; - /// Create a new instance of this type containing certain data. - fn new(data: Self::Data) -> Self; - } - - /// A website's title, shown in the header. - #[derive(Debug, serde::Deserialize, serde::Serialize, Clone, PartialEq, Eq)] - pub struct SiteName(String); - impl Default for SiteName { - fn default() -> Self { - Self("Kittybox".to_string()) - } - } - impl AsRef for SiteName { - fn as_ref(&self) -> &str { - self.0.as_str() - } - } - impl private::Sealed for SiteName {} - impl Setting<'_> for SiteName { - type Data = String; - const ID: &'static str = "site_name"; - - fn into_inner(self) -> String { - self.0 - } - fn new(data: Self::Data) -> Self { - Self(data) - } - } - impl SiteName { - fn from_str(data: &str) -> Self { - Self(data.to_owned()) - } - } - - /// Participation status in the IndieWeb Webring: https://πŸ•ΈπŸ’.ws/dashboard - #[derive(Debug, Default, serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq, Eq)] - pub struct Webring(bool); - impl private::Sealed for Webring {} - impl Setting<'_> for Webring { - type Data = bool; - const ID: &'static str = "webring"; - - fn into_inner(self) -> Self::Data { - self.0 - } - - fn new(data: Self::Data) -> Self { - Self(data) - } - } -} - -/// Error signalled from the database. -#[derive(Debug)] -pub struct StorageError { - msg: std::borrow::Cow<'static, str>, - source: Option>, - kind: ErrorKind, -} - -impl std::error::Error for StorageError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.source - .as_ref() - .map(|e| e.as_ref() as &dyn std::error::Error) - } -} -impl From for StorageError { - fn from(err: serde_json::Error) -> Self { - Self { - msg: std::borrow::Cow::Owned(format!("{}", err)), - source: Some(Box::new(err)), - kind: ErrorKind::JsonParsing, - } - } -} -impl std::fmt::Display for StorageError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}: {}", - match self.kind { - ErrorKind::Backend => "backend error", - ErrorKind::JsonParsing => "JSON parsing error", - ErrorKind::PermissionDenied => "permission denied", - ErrorKind::NotFound => "not found", - ErrorKind::BadRequest => "bad request", - ErrorKind::Conflict => "conflict with an in-flight request or existing data", - ErrorKind::Other => "generic storage layer error", - }, - self.msg - ) - } -} -impl serde::Serialize for StorageError { - fn serialize( - &self, - serializer: S, - ) -> std::result::Result { - serializer.serialize_str(&self.to_string()) - } -} -impl StorageError { - /// Create a new StorageError of an ErrorKind with a message. - pub fn new(kind: ErrorKind, msg: String) -> Self { - Self { - msg: Cow::Owned(msg), - source: None, - kind, - } - } - /// Create a new StorageError of an ErrorKind with a message from - /// a static string. - /// - /// This saves an allocation for a new string and is the preferred - /// way in case the error message doesn't change. - pub fn from_static(kind: ErrorKind, msg: &'static str) -> Self { - Self { - msg: Cow::Borrowed(msg), - source: None, - kind - } - } - /// Create a StorageError using another arbitrary Error as a source. - pub fn with_source( - kind: ErrorKind, - msg: std::borrow::Cow<'static, str>, - source: Box, - ) -> Self { - Self { - msg, - source: Some(source), - kind, - } - } - /// Get the kind of an error. - pub fn kind(&self) -> ErrorKind { - self.kind - } - /// Get the message as a string slice. - pub fn msg(&self) -> &str { - &self.msg - } -} - -/// A special Result type for the Micropub backing storage. -pub type Result = std::result::Result; - -/// A storage backend for the Micropub server. -/// -/// Implementations should note that all methods listed on this trait MUST be fully atomic -/// or lock the database so that write conflicts or reading half-written data should not occur. -#[async_trait] -pub trait Storage: std::fmt::Debug + Clone + Send + Sync { - /// Check if a post exists in the database. - async fn post_exists(&self, url: &str) -> Result; - - /// Load a post from the database in MF2-JSON format, deserialized from JSON. - async fn get_post(&self, url: &str) -> Result>; - - /// Save a post to the database as an MF2-JSON structure. - /// - /// Note that the `post` object MUST have `post["properties"]["uid"][0]` defined. - async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()>; - - /// Add post to feed. Some database implementations might have optimized ways to do this. - #[tracing::instrument(skip(self))] - async fn add_to_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { - tracing::debug!("Inserting {} into {} using `update_post`", post, feed); - self.update_post(feed, serde_json::from_value( - serde_json::json!({"add": {"children": [post]}})).unwrap() - ).await - } - /// Remove post from feed. Some database implementations might have optimized ways to do this. - #[tracing::instrument(skip(self))] - async fn remove_from_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { - tracing::debug!("Removing {} into {} using `update_post`", post, feed); - self.update_post(feed, serde_json::from_value( - serde_json::json!({"delete": {"children": [post]}})).unwrap() - ).await - } - - /// Modify a post using an update object as defined in the - /// Micropub spec. - /// - /// Note to implementors: the update operation MUST be atomic and - /// SHOULD lock the database to prevent two clients overwriting - /// each other's changes or simply corrupting something. Rejecting - /// is allowed in case of concurrent updates if waiting for a lock - /// cannot be done. - async fn update_post(&self, url: &str, update: MicropubUpdate) -> Result<()>; - - /// Get a list of channels available for the user represented by - /// the `user` domain to write to. - async fn get_channels(&self, user: &'_ str) -> Result>; - - /// Fetch a feed at `url` and return an h-feed object containing - /// `limit` posts after a post by url `after`, filtering the content - /// in context of a user specified by `user` (or an anonymous user). - /// - /// This method MUST hydrate the `author` property with an h-card - /// from the database by replacing URLs with corresponding h-cards. - /// - /// When encountering posts which the `user` is not authorized to - /// access, this method MUST elide such posts (as an optimization - /// for the frontend) and not return them, but still return up to - /// `limit` posts (to not reveal the hidden posts' presence). - /// - /// Note for implementors: if you use streams to fetch posts in - /// parallel from the database, preferably make this method use a - /// connection pool to reduce overhead of creating a database - /// connection per post for parallel fetching. - async fn read_feed_with_limit( - &self, - url: &'_ str, - after: &'_ Option, - limit: usize, - user: &'_ Option, - ) -> Result>; - - /// Fetch a feed at `url` and return an h-feed object containing - /// `limit` posts after a `cursor` (filtering the content in - /// context of a user specified by `user`, or an anonymous user), - /// as well as a new cursor to paginate with. - /// - /// This method MUST hydrate the `author` property with an h-card - /// from the database by replacing URLs with corresponding h-cards. - /// - /// When encountering posts which the `user` is not authorized to - /// access, this method MUST elide such posts (as an optimization - /// for the frontend) and not return them, but still return an - /// amount of posts as close to `limit` as possible (to avoid - /// revealing the existence of the hidden post). - /// - /// Note for implementors: if you use streams to fetch posts in - /// parallel from the database, preferably make this method use a - /// connection pool to reduce overhead of creating a database - /// connection per post for parallel fetching. - async fn read_feed_with_cursor( - &self, - url: &'_ str, - cursor: Option<&'_ str>, - limit: usize, - user: Option<&'_ str> - ) -> Result)>>; - - /// Deletes a post from the database irreversibly. Must be idempotent. - async fn delete_post(&self, url: &'_ str) -> Result<()>; - - /// Gets a setting from the setting store and passes the result. - async fn get_setting, 'a>(&'_ self, user: &'_ str) -> Result; - - /// Commits a setting to the setting store. - async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()>; - - /// Add (or update) a webmention on a certian post. - /// - /// The MF2 object describing the webmention content will always - /// be of type `h-cite`, and the `uid` property on the object will - /// always be set. - /// - /// The rationale for this function is as follows: webmentions - /// might be duplicated, and we need to deduplicate them first. As - /// we lack support for transactions and locking posts on the - /// database, the only way is to implement the operation on the - /// database itself. - /// - /// Besides, it may even allow for nice tricks like storing the - /// webmentions separately and rehydrating them on feed reads. - async fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> Result<()>; -} - -#[cfg(test)] -mod tests { - use super::settings; - - use super::{MicropubChannel, Storage}; - use kittybox_util::MentionType; - use serde_json::json; - - async fn test_basic_operations(backend: Backend) { - let post: serde_json::Value = json!({ - "type": ["h-entry"], - "properties": { - "content": ["Test content"], - "author": ["https://fireburn.ru/"], - "uid": ["https://fireburn.ru/posts/hello"], - "url": ["https://fireburn.ru/posts/hello", "https://fireburn.ru/posts/test"] - } - }); - let key = post["properties"]["uid"][0].as_str().unwrap().to_string(); - let alt_url = post["properties"]["url"][1].as_str().unwrap().to_string(); - - // Reading and writing - backend - .put_post(&post, "fireburn.ru") - .await - .unwrap(); - if let Some(returned_post) = backend.get_post(&key).await.unwrap() { - assert!(returned_post.is_object()); - assert_eq!( - returned_post["type"].as_array().unwrap().len(), - post["type"].as_array().unwrap().len() - ); - assert_eq!( - returned_post["type"].as_array().unwrap(), - post["type"].as_array().unwrap() - ); - let props: &serde_json::Map = - post["properties"].as_object().unwrap(); - for key in props.keys() { - assert_eq!( - returned_post["properties"][key].as_array().unwrap(), - post["properties"][key].as_array().unwrap() - ) - } - } else { - panic!("For some reason the backend did not return the post.") - } - // Check the alternative URL - it should return the same post - if let Ok(Some(returned_post)) = backend.get_post(&alt_url).await { - assert!(returned_post.is_object()); - assert_eq!( - returned_post["type"].as_array().unwrap().len(), - post["type"].as_array().unwrap().len() - ); - assert_eq!( - returned_post["type"].as_array().unwrap(), - post["type"].as_array().unwrap() - ); - let props: &serde_json::Map = - post["properties"].as_object().unwrap(); - for key in props.keys() { - assert_eq!( - returned_post["properties"][key].as_array().unwrap(), - post["properties"][key].as_array().unwrap() - ) - } - } else { - panic!("For some reason the backend did not return the post.") - } - } - - /// Note: this is merely a smoke check and is in no way comprehensive. - // TODO updates for feeds must update children using special logic - async fn test_update(backend: Backend) { - let post: serde_json::Value = json!({ - "type": ["h-entry"], - "properties": { - "content": ["Test content"], - "author": ["https://fireburn.ru/"], - "uid": ["https://fireburn.ru/posts/hello"], - "url": ["https://fireburn.ru/posts/hello", "https://fireburn.ru/posts/test"] - } - }); - let key = post["properties"]["uid"][0].as_str().unwrap().to_string(); - - // Reading and writing - backend - .put_post(&post, "fireburn.ru") - .await - .unwrap(); - - backend - .update_post( - &key, - serde_json::from_value(json!({ - "url": &key, - "add": { - "category": ["testing"], - }, - "replace": { - "content": ["Different test content"] - } - })).unwrap(), - ) - .await - .unwrap(); - - match backend.get_post(&key).await { - Ok(Some(returned_post)) => { - assert!(returned_post.is_object()); - assert_eq!( - returned_post["type"].as_array().unwrap().len(), - post["type"].as_array().unwrap().len() - ); - assert_eq!( - returned_post["type"].as_array().unwrap(), - post["type"].as_array().unwrap() - ); - assert_eq!( - returned_post["properties"]["content"][0].as_str().unwrap(), - "Different test content" - ); - assert_eq!( - returned_post["properties"]["category"].as_array().unwrap(), - &vec![json!("testing")] - ); - } - something_else => { - something_else - .expect("Shouldn't error") - .expect("Should have the post"); - } - } - } - - async fn test_get_channel_list(backend: Backend) { - let feed = json!({ - "type": ["h-feed"], - "properties": { - "name": ["Main Page"], - "author": ["https://fireburn.ru/"], - "uid": ["https://fireburn.ru/feeds/main"] - }, - "children": [] - }); - backend - .put_post(&feed, "fireburn.ru") - .await - .unwrap(); - let chans = backend.get_channels("fireburn.ru").await.unwrap(); - assert_eq!(chans.len(), 1); - assert_eq!( - chans[0], - MicropubChannel { - uid: "https://fireburn.ru/feeds/main".to_string(), - name: "Main Page".to_string() - } - ); - } - - async fn test_settings(backend: Backend) { - backend - .set_setting::( - "https://fireburn.ru/", - "Vika's Hideout".to_owned() - ) - .await - .unwrap(); - assert_eq!( - backend - .get_setting::("https://fireburn.ru/") - .await - .unwrap() - .as_ref(), - "Vika's Hideout" - ); - } - - fn gen_random_post(domain: &str) -> serde_json::Value { - use faker_rand::lorem::{Paragraphs, Word}; - - let uid = format!( - "https://{domain}/posts/{}-{}-{}", - rand::random::(), - rand::random::(), - rand::random::() - ); - - let time = chrono::Local::now().to_rfc3339(); - let post = json!({ - "type": ["h-entry"], - "properties": { - "content": [rand::random::().to_string()], - "uid": [&uid], - "url": [&uid], - "published": [&time] - } - }); - - post - } - - fn gen_random_mention(domain: &str, mention_type: MentionType, url: &str) -> serde_json::Value { - use faker_rand::lorem::{Paragraphs, Word}; - - let uid = format!( - "https://{domain}/posts/{}-{}-{}", - rand::random::(), - rand::random::(), - rand::random::() - ); - - let time = chrono::Local::now().to_rfc3339(); - let post = json!({ - "type": ["h-cite"], - "properties": { - "content": [rand::random::().to_string()], - "uid": [&uid], - "url": [&uid], - "published": [&time], - (match mention_type { - MentionType::Reply => "in-reply-to", - MentionType::Like => "like-of", - MentionType::Repost => "repost-of", - MentionType::Bookmark => "bookmark-of", - MentionType::Mention => unimplemented!(), - }): [url] - } - }); - - post - } - - async fn test_feed_pagination(backend: Backend) { - let posts = { - let mut posts = std::iter::from_fn( - || Some(gen_random_post("fireburn.ru")) - ) - .take(40) - .collect::>(); - - // Reverse the array so it's in reverse-chronological order - posts.reverse(); - - posts - }; - - let feed = json!({ - "type": ["h-feed"], - "properties": { - "name": ["Main Page"], - "author": ["https://fireburn.ru/"], - "uid": ["https://fireburn.ru/feeds/main"] - }, - }); - let key = feed["properties"]["uid"][0].as_str().unwrap(); - - backend - .put_post(&feed, "fireburn.ru") - .await - .unwrap(); - - for (i, post) in posts.iter().rev().enumerate() { - backend - .put_post(post, "fireburn.ru") - .await - .unwrap(); - backend.add_to_feed(key, post["properties"]["uid"][0].as_str().unwrap()).await.unwrap(); - } - - let limit: usize = 10; - - tracing::debug!("Starting feed reading..."); - let (result, cursor) = backend - .read_feed_with_cursor(key, None, limit, None) - .await - .unwrap() - .unwrap(); - - assert_eq!(result["children"].as_array().unwrap().len(), limit); - assert_eq!( - result["children"] - .as_array() - .unwrap() - .iter() - .map(|post| post["properties"]["uid"][0].as_str().unwrap()) - .collect::>() - [0..10], - posts - .iter() - .map(|post| post["properties"]["uid"][0].as_str().unwrap()) - .collect::>() - [0..10] - ); - - tracing::debug!("Continuing with cursor: {:?}", cursor); - let (result2, cursor2) = backend - .read_feed_with_cursor( - key, - cursor.as_deref(), - limit, - None, - ) - .await - .unwrap() - .unwrap(); - - assert_eq!( - result2["children"].as_array().unwrap()[0..10], - posts[10..20] - ); - - tracing::debug!("Continuing with cursor: {:?}", cursor); - let (result3, cursor3) = backend - .read_feed_with_cursor( - key, - cursor2.as_deref(), - limit, - None, - ) - .await - .unwrap() - .unwrap(); - - assert_eq!( - result3["children"].as_array().unwrap()[0..10], - posts[20..30] - ); - - tracing::debug!("Continuing with cursor: {:?}", cursor); - let (result4, _) = backend - .read_feed_with_cursor( - key, - cursor3.as_deref(), - limit, - None, - ) - .await - .unwrap() - .unwrap(); - - assert_eq!( - result4["children"].as_array().unwrap()[0..10], - posts[30..40] - ); - - // Regression test for #4 - // - // Results for a bogus cursor are undefined, so we aren't - // checking them. But the function at least shouldn't hang. - let nonsense_after = Some("1010101010"); - let _ = tokio::time::timeout(tokio::time::Duration::from_secs(10), async move { - backend - .read_feed_with_cursor(key, nonsense_after, limit, None) - .await - }) - .await - .expect("Operation should not hang: see https://gitlab.com/kittybox/kittybox/-/issues/4"); - } - - async fn test_webmention_addition(db: Backend) { - let post = gen_random_post("fireburn.ru"); - - db.put_post(&post, "fireburn.ru").await.unwrap(); - const TYPE: MentionType = MentionType::Reply; - - let target = post["properties"]["uid"][0].as_str().unwrap(); - let mut reply = gen_random_mention("aaronparecki.com", TYPE, target); - - let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); - assert_eq!(post, read_post); - - db.add_or_update_webmention(target, TYPE, reply.clone()).await.unwrap(); - - let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); - assert_eq!(read_post["properties"]["comment"][0], reply); - - reply["properties"]["content"][0] = json!(rand::random::().to_string()); - - db.add_or_update_webmention(target, TYPE, reply.clone()).await.unwrap(); - let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); - assert_eq!(read_post["properties"]["comment"][0], reply); - } - - async fn test_pretty_permalinks(db: Backend) { - const PERMALINK: &str = "https://fireburn.ru/posts/pretty-permalink"; - - let post = { - let mut post = gen_random_post("fireburn.ru"); - let urls = post["properties"]["url"].as_array_mut().unwrap(); - urls.push(serde_json::Value::String( - PERMALINK.to_owned() - )); - - post - }; - db.put_post(&post, "fireburn.ru").await.unwrap(); - - for i in post["properties"]["url"].as_array().unwrap() { - let (read_post, _) = db.read_feed_with_cursor(i.as_str().unwrap(), None, 20, None).await.unwrap().unwrap(); - assert_eq!(read_post, post); - } - } - /// Automatically generates a test suite for - macro_rules! test_all { - ($func_name:ident, $mod_name:ident) => { - mod $mod_name { - $func_name!(test_basic_operations); - $func_name!(test_get_channel_list); - $func_name!(test_settings); - $func_name!(test_update); - $func_name!(test_feed_pagination); - $func_name!(test_webmention_addition); - $func_name!(test_pretty_permalinks); - } - }; - } - macro_rules! file_test { - ($func_name:ident) => { - #[tokio::test] - #[tracing_test::traced_test] - async fn $func_name() { - let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); - let backend = super::super::FileStorage::new( - tempdir.path().to_path_buf() - ) - .await - .unwrap(); - super::$func_name(backend).await - } - }; - } - - macro_rules! postgres_test { - ($func_name:ident) => { - #[cfg(feature = "sqlx")] - #[sqlx::test] - #[tracing_test::traced_test] - async fn $func_name( - pool_opts: sqlx::postgres::PgPoolOptions, - connect_opts: sqlx::postgres::PgConnectOptions - ) -> Result<(), sqlx::Error> { - let db = { - //use sqlx::ConnectOptions; - //connect_opts.log_statements(log::LevelFilter::Debug); - - pool_opts.connect_with(connect_opts).await? - }; - let backend = super::super::PostgresStorage::from_pool(db).await.unwrap(); - - Ok(super::$func_name(backend).await) - } - }; - } - - test_all!(file_test, file); - test_all!(postgres_test, postgres); -} diff --git a/kittybox-rs/src/database/postgres/mod.rs b/kittybox-rs/src/database/postgres/mod.rs deleted file mode 100644 index 9176d12..0000000 --- a/kittybox-rs/src/database/postgres/mod.rs +++ /dev/null @@ -1,416 +0,0 @@ -#![allow(unused_variables)] -use std::borrow::Cow; -use std::str::FromStr; - -use kittybox_util::{MicropubChannel, MentionType}; -use sqlx::{PgPool, Executor}; -use crate::micropub::{MicropubUpdate, MicropubPropertyDeletion}; - -use super::settings::Setting; -use super::{Storage, Result, StorageError, ErrorKind}; - -static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!(); - -impl From for StorageError { - fn from(value: sqlx::Error) -> Self { - Self::with_source( - super::ErrorKind::Backend, - Cow::Owned(format!("sqlx error: {}", &value)), - Box::new(value) - ) - } -} - -impl From for StorageError { - fn from(value: sqlx::migrate::MigrateError) -> Self { - Self::with_source( - super::ErrorKind::Backend, - Cow::Owned(format!("sqlx migration error: {}", &value)), - Box::new(value) - ) - } -} - -#[derive(Debug, Clone)] -pub struct PostgresStorage { - db: PgPool -} - -impl PostgresStorage { - /// Construct a new [`PostgresStorage`] from an URI string and run - /// migrations on the database. - /// - /// If `PGPASS_FILE` environment variable is defined, read the - /// password from the file at the specified path. If, instead, - /// the `PGPASS` environment variable is present, read the - /// password from it. - pub async fn new(uri: &str) -> Result { - tracing::debug!("Postgres URL: {uri}"); - let mut options = sqlx::postgres::PgConnectOptions::from_str(uri)? - .options([("search_path", "kittybox")]); - if let Ok(password_file) = std::env::var("PGPASS_FILE") { - let password = tokio::fs::read_to_string(password_file).await.unwrap(); - options = options.password(&password); - } else if let Ok(password) = std::env::var("PGPASS") { - options = options.password(&password) - } - Self::from_pool( - sqlx::postgres::PgPoolOptions::new() - .max_connections(50) - .connect_with(options) - .await? - ).await - - } - - /// Construct a [`PostgresStorage`] from a [`sqlx::PgPool`], - /// running appropriate migrations. - pub async fn from_pool(db: sqlx::PgPool) -> Result { - db.execute(sqlx::query("CREATE SCHEMA IF NOT EXISTS kittybox")).await?; - MIGRATOR.run(&db).await?; - Ok(Self { db }) - } -} - -#[async_trait::async_trait] -impl Storage for PostgresStorage { - #[tracing::instrument(skip(self))] - async fn post_exists(&self, url: &str) -> Result { - sqlx::query_as::<_, (bool,)>("SELECT exists(SELECT 1 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1)") - .bind(url) - .fetch_one(&self.db) - .await - .map(|v| v.0) - .map_err(|err| err.into()) - } - - #[tracing::instrument(skip(self))] - async fn get_post(&self, url: &str) -> Result> { - sqlx::query_as::<_, (serde_json::Value,)>("SELECT mf2 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1") - .bind(url) - .fetch_optional(&self.db) - .await - .map(|v| v.map(|v| v.0)) - .map_err(|err| err.into()) - - } - - #[tracing::instrument(skip(self))] - async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()> { - tracing::debug!("New post: {}", post); - sqlx::query("INSERT INTO kittybox.mf2_json (uid, mf2, owner) VALUES ($1 #>> '{properties,uid,0}', $1, $2)") - .bind(post) - .bind(user) - .execute(&self.db) - .await - .map(|_| ()) - .map_err(Into::into) - } - - #[tracing::instrument(skip(self))] - async fn add_to_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { - tracing::debug!("Inserting {} into {}", post, feed); - sqlx::query("INSERT INTO kittybox.children (parent, child) VALUES ($1, $2) ON CONFLICT DO NOTHING") - .bind(feed) - .bind(post) - .execute(&self.db) - .await - .map(|_| ()) - .map_err(Into::into) - } - - #[tracing::instrument(skip(self))] - async fn remove_from_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { - sqlx::query("DELETE FROM kittybox.children WHERE parent = $1 AND child = $2") - .bind(feed) - .bind(post) - .execute(&self.db) - .await - .map_err(Into::into) - .map(|_| ()) - } - - #[tracing::instrument(skip(self))] - async fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> Result<()> { - let mut txn = self.db.begin().await?; - - let (uid, mut post) = sqlx::query_as::<_, (String, serde_json::Value)>("SELECT uid, mf2 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 FOR UPDATE") - .bind(target) - .fetch_optional(&mut *txn) - .await? - .ok_or(StorageError::from_static( - ErrorKind::NotFound, - "The specified post wasn't found in the database." - ))?; - - tracing::debug!("Loaded post for target {} with uid {}", target, uid); - - let key: &'static str = match mention_type { - MentionType::Reply => "comment", - MentionType::Like => "like", - MentionType::Repost => "repost", - MentionType::Bookmark => "bookmark", - MentionType::Mention => "mention", - }; - - tracing::debug!("Mention type -> key: {}", key); - - let mention_uid = mention["properties"]["uid"][0].clone(); - if let Some(values) = post["properties"][key].as_array_mut() { - for value in values.iter_mut() { - if value["properties"]["uid"][0] == mention_uid { - *value = mention; - break; - } - } - } else { - post["properties"][key] = serde_json::Value::Array(vec![mention]); - } - - sqlx::query("UPDATE kittybox.mf2_json SET mf2 = $2 WHERE uid = $1") - .bind(uid) - .bind(post) - .execute(&mut *txn) - .await?; - - txn.commit().await.map_err(Into::into) - } - #[tracing::instrument(skip(self))] - async fn update_post(&self, url: &'_ str, update: MicropubUpdate) -> Result<()> { - tracing::debug!("Updating post {}", url); - let mut txn = self.db.begin().await?; - let (uid, mut post) = sqlx::query_as::<_, (String, serde_json::Value)>("SELECT uid, mf2 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 FOR UPDATE") - .bind(url) - .fetch_optional(&mut *txn) - .await? - .ok_or(StorageError::from_static( - ErrorKind::NotFound, - "The specified post wasn't found in the database." - ))?; - - if let Some(MicropubPropertyDeletion::Properties(ref delete)) = update.delete { - if let Some(props) = post["properties"].as_object_mut() { - for key in delete { - props.remove(key); - } - } - } else if let Some(MicropubPropertyDeletion::Values(ref delete)) = update.delete { - if let Some(props) = post["properties"].as_object_mut() { - for (key, values) in delete { - if let Some(prop) = props.get_mut(key).and_then(serde_json::Value::as_array_mut) { - prop.retain(|v| { values.iter().all(|i| i != v) }) - } - } - } - } - if let Some(replace) = update.replace { - if let Some(props) = post["properties"].as_object_mut() { - for (key, value) in replace { - props.insert(key, serde_json::Value::Array(value)); - } - } - } - if let Some(add) = update.add { - if let Some(props) = post["properties"].as_object_mut() { - for (key, value) in add { - if let Some(prop) = props.get_mut(&key).and_then(serde_json::Value::as_array_mut) { - prop.extend_from_slice(value.as_slice()); - } else { - props.insert(key, serde_json::Value::Array(value)); - } - } - } - } - - sqlx::query("UPDATE kittybox.mf2_json SET mf2 = $2 WHERE uid = $1") - .bind(uid) - .bind(post) - .execute(&mut *txn) - .await?; - - txn.commit().await.map_err(Into::into) - } - - #[tracing::instrument(skip(self))] - async fn get_channels(&self, user: &'_ str) -> Result> { - /*sqlx::query_as::<_, MicropubChannel>("SELECT name, uid FROM kittybox.channels WHERE owner = $1") - .bind(user) - .fetch_all(&self.db) - .await - .map_err(|err| err.into())*/ - sqlx::query_as::<_, MicropubChannel>(r#"SELECT mf2 #>> '{properties,name,0}' as name, uid FROM kittybox.mf2_json WHERE '["h-feed"]'::jsonb @> mf2['type'] AND owner = $1"#) - .bind(user) - .fetch_all(&self.db) - .await - .map_err(|err| err.into()) - } - - #[tracing::instrument(skip(self))] - async fn read_feed_with_limit( - &self, - url: &'_ str, - after: &'_ Option, - limit: usize, - user: &'_ Option, - ) -> Result> { - let mut feed = match sqlx::query_as::<_, (serde_json::Value,)>(" -SELECT jsonb_set( - mf2, - '{properties,author,0}', - (SELECT mf2 FROM kittybox.mf2_json - WHERE uid = mf2 #>> '{properties,author,0}') -) FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 -") - .bind(url) - .fetch_optional(&self.db) - .await? - .map(|v| v.0) - { - Some(feed) => feed, - None => return Ok(None) - }; - - let posts: Vec = { - let mut posts_iter = feed["children"] - .as_array() - .cloned() - .unwrap_or_default() - .into_iter() - .map(|s| s.as_str().unwrap().to_string()); - if let Some(after) = after { - for s in posts_iter.by_ref() { - if &s == after { - break; - } - } - }; - - posts_iter.take(limit).collect::>() - }; - feed["children"] = serde_json::Value::Array( - sqlx::query_as::<_, (serde_json::Value,)>(" -SELECT jsonb_set( - mf2, - '{properties,author,0}', - (SELECT mf2 FROM kittybox.mf2_json - WHERE uid = mf2 #>> '{properties,author,0}') -) FROM kittybox.mf2_json -WHERE uid = ANY($1) -ORDER BY mf2 #>> '{properties,published,0}' DESC -") - .bind(&posts[..]) - .fetch_all(&self.db) - .await? - .into_iter() - .map(|v| v.0) - .collect::>() - ); - - Ok(Some(feed)) - - } - - #[tracing::instrument(skip(self))] - async fn read_feed_with_cursor( - &self, - url: &'_ str, - cursor: Option<&'_ str>, - limit: usize, - user: Option<&'_ str> - ) -> Result)>> { - let mut txn = self.db.begin().await?; - sqlx::query("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ, READ ONLY") - .execute(&mut *txn) - .await?; - tracing::debug!("Started txn: {:?}", txn); - let mut feed = match sqlx::query_scalar::<_, serde_json::Value>(" -SELECT kittybox.hydrate_author(mf2) FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 -") - .bind(url) - .fetch_optional(&mut *txn) - .await? - { - Some(feed) => feed, - None => return Ok(None) - }; - - // Don't query for children if this isn't a feed. - // - // The second query is very long and will probably be extremely - // expensive. It's best to skip it on types where it doesn't make sense - // (Kittybox doesn't support rendering children on non-feeds) - if !feed["type"].as_array().unwrap().iter().any(|t| *t == serde_json::json!("h-feed")) { - return Ok(Some((feed, None))); - } - - feed["children"] = sqlx::query_scalar::<_, serde_json::Value>(" -SELECT kittybox.hydrate_author(mf2) FROM kittybox.mf2_json -INNER JOIN kittybox.children -ON mf2_json.uid = children.child -WHERE - children.parent = $1 - AND ( - ( - (mf2 #>> '{properties,visibility,0}') = 'public' - OR - NOT (mf2['properties'] ? 'visibility') - ) - OR - ( - $3 != null AND ( - mf2['properties']['audience'] ? $3 - OR mf2['properties']['author'] ? $3 - ) - ) - ) - AND ($4 IS NULL OR ((mf2_json.mf2 #>> '{properties,published,0}') < $4)) -ORDER BY (mf2_json.mf2 #>> '{properties,published,0}') DESC -LIMIT $2" - ) - .bind(url) - .bind(limit as i64) - .bind(user) - .bind(cursor) - .fetch_all(&mut *txn) - .await - .map(serde_json::Value::Array)?; - - let new_cursor = feed["children"].as_array().unwrap() - .last() - .map(|v| v["properties"]["published"][0].as_str().unwrap().to_owned()); - - txn.commit().await?; - - Ok(Some((feed, new_cursor))) - } - - #[tracing::instrument(skip(self))] - async fn delete_post(&self, url: &'_ str) -> Result<()> { - todo!() - } - - #[tracing::instrument(skip(self))] - async fn get_setting, 'a>(&'_ self, user: &'_ str) -> Result { - match sqlx::query_as::<_, (serde_json::Value,)>("SELECT kittybox.get_setting($1, $2)") - .bind(user) - .bind(S::ID) - .fetch_one(&self.db) - .await - { - Ok((value,)) => Ok(serde_json::from_value(value)?), - Err(err) => Err(err.into()) - } - } - - #[tracing::instrument(skip(self))] - async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()> { - sqlx::query("SELECT kittybox.set_setting($1, $2, $3)") - .bind(user) - .bind(S::ID) - .bind(serde_json::to_value(S::new(value)).unwrap()) - .execute(&self.db) - .await - .map_err(Into::into) - .map(|_| ()) - } -} diff --git a/kittybox-rs/src/database/redis/edit_post.lua b/kittybox-rs/src/database/redis/edit_post.lua deleted file mode 100644 index a398f8d..0000000 --- a/kittybox-rs/src/database/redis/edit_post.lua +++ /dev/null @@ -1,93 +0,0 @@ -local posts = KEYS[1] -local update_desc = cjson.decode(ARGV[2]) -local post = cjson.decode(redis.call("HGET", posts, ARGV[1])) - -local delete_keys = {} -local delete_kvs = {} -local add_keys = {} - -if update_desc.replace ~= nil then - for k, v in pairs(update_desc.replace) do - table.insert(delete_keys, k) - add_keys[k] = v - end -end -if update_desc.delete ~= nil then - if update_desc.delete[0] == nil then - -- Table has string keys. Probably! - for k, v in pairs(update_desc.delete) do - delete_kvs[k] = v - end - else - -- Table has numeric keys. Probably! - for i, v in ipairs(update_desc.delete) do - table.insert(delete_keys, v) - end - end -end -if update_desc.add ~= nil then - for k, v in pairs(update_desc.add) do - add_keys[k] = v - end -end - -for i, v in ipairs(delete_keys) do - post["properties"][v] = nil - -- TODO delete URL links -end - -for k, v in pairs(delete_kvs) do - local index = -1 - if k == "children" then - for j, w in ipairs(post[k]) do - if w == v then - index = j - break - end - end - if index > -1 then - table.remove(post[k], index) - end - else - for j, w in ipairs(post["properties"][k]) do - if w == v then - index = j - break - end - end - if index > -1 then - table.remove(post["properties"][k], index) - -- TODO delete URL links - end - end -end - -for k, v in pairs(add_keys) do - if k == "children" then - if post["children"] == nil then - post["children"] = {} - end - for i, w in ipairs(v) do - table.insert(post["children"], 1, w) - end - else - if post["properties"][k] == nil then - post["properties"][k] = {} - end - for i, w in ipairs(v) do - table.insert(post["properties"][k], w) - end - if k == "url" then - redis.call("HSET", posts, v, cjson.encode({ see_other = post["properties"]["uid"][1] })) - elseif k == "channel" then - local feed = cjson.decode(redis.call("HGET", posts, v)) - table.insert(feed["children"], 1, post["properties"]["uid"][1]) - redis.call("HSET", posts, v, cjson.encode(feed)) - end - end -end - -local encoded = cjson.encode(post) -redis.call("SET", "debug", encoded) -redis.call("HSET", posts, post["properties"]["uid"][1], encoded) -return \ No newline at end of file diff --git a/kittybox-rs/src/database/redis/mod.rs b/kittybox-rs/src/database/redis/mod.rs deleted file mode 100644 index 39ee852..0000000 --- a/kittybox-rs/src/database/redis/mod.rs +++ /dev/null @@ -1,398 +0,0 @@ -use async_trait::async_trait; -use futures::stream; -use futures_util::FutureExt; -use futures_util::StreamExt; -use futures_util::TryStream; -use futures_util::TryStreamExt; -use lazy_static::lazy_static; -use log::error; -use mobc::Pool; -use mobc_redis::redis; -use mobc_redis::redis::AsyncCommands; -use mobc_redis::RedisConnectionManager; -use serde_json::json; -use std::time::Duration; - -use crate::database::{ErrorKind, MicropubChannel, Result, Storage, StorageError, filter_post}; -use crate::indieauth::User; - -struct RedisScripts { - edit_post: redis::Script, -} - -impl From for StorageError { - fn from(err: mobc_redis::redis::RedisError) -> Self { - Self { - msg: format!("{}", err), - source: Some(Box::new(err)), - kind: ErrorKind::Backend, - } - } -} -impl From> for StorageError { - fn from(err: mobc::Error) -> Self { - Self { - msg: format!("{}", err), - source: Some(Box::new(err)), - kind: ErrorKind::Backend, - } - } -} - -lazy_static! { - static ref SCRIPTS: RedisScripts = RedisScripts { - edit_post: redis::Script::new(include_str!("./edit_post.lua")) - }; -} -/*#[cfg(feature(lazy_cell))] -static SCRIPTS_CELL: std::cell::LazyCell = std::cell::LazyCell::new(|| { - RedisScripts { - edit_post: redis::Script::new(include_str!("./edit_post.lua")) - } -});*/ - -#[derive(Clone)] -pub struct RedisStorage { - // note to future Vika: - // mobc::Pool is actually a fancy name for an Arc - // around a shared connection pool with a manager - // which makes it safe to implement [`Clone`] and - // not worry about new pools being suddenly made - // - // stop worrying and start coding, you dum-dum - redis: mobc::Pool, -} - -#[async_trait] -impl Storage for RedisStorage { - async fn get_setting<'a>(&self, setting: &'a str, user: &'a str) -> Result { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - Ok(conn - .hget::(format!("settings_{}", user), setting) - .await?) - } - - async fn set_setting<'a>(&self, setting: &'a str, user: &'a str, value: &'a str) -> Result<()> { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - Ok(conn - .hset::(format!("settings_{}", user), setting, value) - .await?) - } - - async fn delete_post<'a>(&self, url: &'a str) -> Result<()> { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - Ok(conn.hdel::<&str, &str, ()>("posts", url).await?) - } - - async fn post_exists(&self, url: &str) -> Result { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - Ok(conn.hexists::<&str, &str, bool>("posts", url).await?) - } - - async fn get_post(&self, url: &str) -> Result> { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - match conn - .hget::<&str, &str, Option>("posts", url) - .await? - { - Some(val) => { - let parsed = serde_json::from_str::(&val)?; - if let Some(new_url) = parsed["see_other"].as_str() { - match conn - .hget::<&str, &str, Option>("posts", new_url) - .await? - { - Some(val) => Ok(Some(serde_json::from_str::(&val)?)), - None => Ok(None), - } - } else { - Ok(Some(parsed)) - } - } - None => Ok(None), - } - } - - async fn get_channels(&self, user: &User) -> Result> { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - let channels = conn - .smembers::>("channels_".to_string() + user.me.as_str()) - .await?; - // TODO: use streams here instead of this weird thing... how did I even write this?! - Ok(futures_util::future::join_all( - channels - .iter() - .map(|channel| { - self.get_post(channel).map(|result| result.unwrap()).map( - |post: Option| { - post.map(|post| MicropubChannel { - uid: post["properties"]["uid"][0].as_str().unwrap().to_string(), - name: post["properties"]["name"][0].as_str().unwrap().to_string(), - }) - }, - ) - }) - .collect::>(), - ) - .await - .into_iter() - .flatten() - .collect::>()) - } - - async fn put_post<'a>(&self, post: &'a serde_json::Value, user: &'a str) -> Result<()> { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - let key: &str; - match post["properties"]["uid"][0].as_str() { - Some(uid) => key = uid, - None => { - return Err(StorageError::new( - ErrorKind::BadRequest, - "post doesn't have a UID", - )) - } - } - conn.hset::<&str, &str, String, ()>("posts", key, post.to_string()) - .await?; - if post["properties"]["url"].is_array() { - for url in post["properties"]["url"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap().to_string()) - { - if url != key && url.starts_with(user) { - conn.hset::<&str, &str, String, ()>( - "posts", - &url, - json!({ "see_other": key }).to_string(), - ) - .await?; - } - } - } - if post["type"] - .as_array() - .unwrap() - .iter() - .any(|i| i == "h-feed") - { - // This is a feed. Add it to the channels array if it's not already there. - conn.sadd::( - "channels_".to_string() + post["properties"]["author"][0].as_str().unwrap(), - key, - ) - .await? - } - Ok(()) - } - - async fn read_feed_with_limit<'a>( - &self, - url: &'a str, - after: &'a Option, - limit: usize, - user: &'a Option, - ) -> Result> { - let mut conn = self.redis.get().await?; - let mut feed; - match conn - .hget::<&str, &str, Option>("posts", url) - .await - .map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))? - { - Some(post) => feed = serde_json::from_str::(&post)?, - None => return Ok(None), - } - if feed["see_other"].is_string() { - match conn - .hget::<&str, &str, Option>("posts", feed["see_other"].as_str().unwrap()) - .await? - { - Some(post) => feed = serde_json::from_str::(&post)?, - None => return Ok(None), - } - } - if let Some(post) = filter_post(feed, user) { - feed = post - } else { - return Err(StorageError::new( - ErrorKind::PermissionDenied, - "specified user cannot access this post", - )); - } - if feed["children"].is_array() { - let children = feed["children"].as_array().unwrap(); - let mut posts_iter = children.iter().map(|i| i.as_str().unwrap().to_string()); - if after.is_some() { - loop { - let i = posts_iter.next(); - if &i == after { - break; - } - } - } - async fn fetch_post_for_feed(url: String) -> Option { - return Some(serde_json::json!({})); - } - let posts = stream::iter(posts_iter) - .map(|url: String| async move { - return Ok(fetch_post_for_feed(url).await); - /*match self.redis.get().await { - Ok(mut conn) => { - match conn.hget::<&str, &str, Option>("posts", &url).await { - Ok(post) => match post { - Some(post) => { - Ok(Some(serde_json::from_str(&post)?)) - } - // Happens because of a broken link (result of an improper deletion?) - None => Ok(None), - }, - Err(err) => Err(StorageError::with_source(ErrorKind::Backend, "Error executing a Redis command", Box::new(err))) - } - } - Err(err) => Err(StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(err))) - }*/ - }) - // TODO: determine the optimal value for this buffer - // It will probably depend on how often can you encounter a private post on the page - // It shouldn't be too large, or we'll start fetching too many posts from the database - // It MUST NOT be larger than the typical page size - // It MUST NOT be a significant amount of the connection pool size - //.buffered(std::cmp::min(3, limit)) - // Hack to unwrap the Option and sieve out broken links - // Broken links return None, and Stream::filter_map skips all Nones. - // I wonder if one can use try_flatten() here somehow akin to iters - .try_filter_map(|post| async move { Ok(post) }) - .try_filter_map(|post| async move { - Ok(filter_post(post, user)) - }) - .take(limit); - match posts.try_collect::>().await { - Ok(posts) => feed["children"] = json!(posts), - Err(err) => { - let e = StorageError::with_source( - ErrorKind::Other, - "An error was encountered while processing the feed", - Box::new(err) - ); - error!("Error while assembling feed: {}", e); - return Err(e); - } - } - } - return Ok(Some(feed)); - } - - async fn update_post<'a>(&self, mut url: &'a str, update: serde_json::Value) -> Result<()> { - let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; - if !conn - .hexists::<&str, &str, bool>("posts", url) - .await - .unwrap() - { - return Err(StorageError::new( - ErrorKind::NotFound, - "can't edit a non-existent post", - )); - } - let post: serde_json::Value = - serde_json::from_str(&conn.hget::<&str, &str, String>("posts", url).await?)?; - if let Some(new_url) = post["see_other"].as_str() { - url = new_url - } - Ok(SCRIPTS - .edit_post - .key("posts") - .arg(url) - .arg(update.to_string()) - .invoke_async::<_, ()>(&mut conn as &mut redis::aio::Connection) - .await?) - } -} - -impl RedisStorage { - /// Create a new RedisDatabase that will connect to Redis at `redis_uri` to store data. - pub async fn new(redis_uri: String) -> Result { - match redis::Client::open(redis_uri) { - Ok(client) => Ok(Self { - redis: Pool::builder() - .max_open(20) - .max_idle(5) - .get_timeout(Some(Duration::from_secs(3))) - .max_lifetime(Some(Duration::from_secs(120))) - .build(RedisConnectionManager::new(client)), - }), - Err(e) => Err(e.into()), - } - } - - pub async fn conn(&self) -> Result> { - self.redis.get().await.map_err(|e| StorageError::with_source( - ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e) - )) - } -} - -#[cfg(test)] -pub mod tests { - use mobc_redis::redis; - use std::process; - use std::time::Duration; - - pub struct RedisInstance { - // We just need to hold on to it so it won't get dropped and remove the socket - _tempdir: tempdir::TempDir, - uri: String, - child: std::process::Child, - } - impl Drop for RedisInstance { - fn drop(&mut self) { - self.child.kill().expect("Failed to kill the child!"); - } - } - impl RedisInstance { - pub fn uri(&self) -> &str { - &self.uri - } - } - - pub async fn get_redis_instance() -> RedisInstance { - let tempdir = tempdir::TempDir::new("redis").expect("failed to create tempdir"); - let socket = tempdir.path().join("redis.sock"); - let redis_child = process::Command::new("redis-server") - .current_dir(&tempdir) - .arg("--port") - .arg("0") - .arg("--unixsocket") - .arg(&socket) - .stdout(process::Stdio::null()) - .stderr(process::Stdio::null()) - .spawn() - .expect("Failed to spawn Redis"); - println!("redis+unix:///{}", socket.to_str().unwrap()); - let uri = format!("redis+unix:///{}", socket.to_str().unwrap()); - // There should be a slight delay, we need to wait for Redis to spin up - let client = redis::Client::open(uri.clone()).unwrap(); - let millisecond = Duration::from_millis(1); - let mut retries: usize = 0; - const MAX_RETRIES: usize = 60 * 1000/*ms*/; - while let Err(err) = client.get_connection() { - if err.is_connection_refusal() { - async_std::task::sleep(millisecond).await; - retries += 1; - if retries > MAX_RETRIES { - panic!("Timeout waiting for Redis, last error: {}", err); - } - } else { - panic!("Could not connect: {}", err); - } - } - - RedisInstance { - uri, - child: redis_child, - _tempdir: tempdir, - } - } -} diff --git a/kittybox-rs/src/frontend/login.rs b/kittybox-rs/src/frontend/login.rs deleted file mode 100644 index c693899..0000000 --- a/kittybox-rs/src/frontend/login.rs +++ /dev/null @@ -1,333 +0,0 @@ -use http_types::Mime; -use log::{debug, error}; -use rand::Rng; -use serde::{Deserialize, Serialize}; -use sha2::{Digest, Sha256}; -use std::convert::TryInto; -use std::str::FromStr; - -use crate::frontend::templates::Template; -use crate::frontend::{FrontendError, IndiewebEndpoints}; -use crate::{database::Storage, ApplicationState}; -use kittybox_frontend_renderer::LoginPage; - -pub async fn form(req: Request>) -> Result { - let owner = req.url().origin().ascii_serialization() + "/"; - let storage = &req.state().storage; - let authorization_endpoint = req.state().authorization_endpoint.to_string(); - let token_endpoint = req.state().token_endpoint.to_string(); - let blog_name = storage - .get_setting("site_name", &owner) - .await - .unwrap_or_else(|_| "Kitty Box!".to_string()); - let feeds = storage.get_channels(&owner).await.unwrap_or_default(); - - Ok(Response::builder(200) - .body( - Template { - title: "Sign in with IndieAuth", - blog_name: &blog_name, - endpoints: IndiewebEndpoints { - authorization_endpoint, - token_endpoint, - webmention: None, - microsub: None, - }, - feeds, - user: req.session().get("user"), - content: LoginPage {}.to_string(), - } - .to_string(), - ) - .content_type("text/html; charset=utf-8") - .build()) -} - -#[derive(Serialize, Deserialize)] -struct LoginForm { - url: String, -} - -#[derive(Serialize, Deserialize)] -struct IndieAuthClientState { - /// A random value to protect from CSRF attacks. - nonce: String, - /// The user's initial "me" value. - me: String, - /// Authorization endpoint used. - authorization_endpoint: String, -} - -#[derive(Serialize, Deserialize)] -struct IndieAuthRequestParams { - response_type: String, // can only have "code". TODO make an enum - client_id: String, // always a URL. TODO consider making a URL - redirect_uri: surf::Url, // callback URI for IndieAuth - state: String, // CSRF protection, should include randomness and be passed through - code_challenge: String, // base64-encoded PKCE challenge - code_challenge_method: String, // usually "S256". TODO make an enum - scope: Option, // oAuth2 scopes to grant, - me: surf::Url, // User's entered profile URL -} - -/// Handle login requests. Find the IndieAuth authorization endpoint and redirect to it. -pub async fn handler(mut req: Request>) -> Result { - let content_type = req.content_type(); - if content_type.is_none() { - return Err(FrontendError::with_code(400, "Use the login form, Luke.").into()); - } - if content_type.unwrap() != Mime::from_str("application/x-www-form-urlencoded").unwrap() { - return Err( - FrontendError::with_code(400, "Login form results must be a urlencoded form").into(), - ); - } - - let form = req.body_form::().await?; // FIXME check if it returns 400 or 500 on error - let homepage_uri = surf::Url::parse(&form.url)?; - let http = &req.state().http_client; - - let mut fetch_response = http.get(&homepage_uri).send().await?; - if fetch_response.status() != 200 { - return Err(FrontendError::with_code( - 500, - "Error fetching your authorization endpoint. Check if your website's okay.", - ) - .into()); - } - - let mut authorization_endpoint: Option = None; - if let Some(links) = fetch_response.header("Link") { - // NOTE: this is the same Link header parser used in src/micropub/post.rs:459. - // One should refactor it to a function to use independently and improve later - for link in links.iter().flat_map(|i| i.as_str().split(',')) { - debug!("Trying to match {} as authorization_endpoint", link); - let mut split_link = link.split(';'); - - match split_link.next() { - Some(uri) => { - if let Some(uri) = uri.strip_prefix('<').and_then(|uri| uri.strip_suffix('>')) { - debug!("uri: {}", uri); - for prop in split_link { - debug!("prop: {}", prop); - let lowercased = prop.to_ascii_lowercase(); - let trimmed = lowercased.trim(); - if trimmed == "rel=\"authorization_endpoint\"" - || trimmed == "rel=authorization_endpoint" - { - if let Ok(endpoint) = homepage_uri.join(uri) { - debug!( - "Found authorization endpoint {} for user {}", - endpoint, - homepage_uri.as_str() - ); - authorization_endpoint = Some(endpoint); - break; - } - } - } - } - } - None => continue, - } - } - } - // If the authorization_endpoint is still not found after the Link parsing gauntlet, - // bring out the big guns and parse HTML to find it. - if authorization_endpoint.is_none() { - let body = fetch_response.body_string().await?; - let pattern = - easy_scraper::Pattern::new(r#""#) - .expect("Cannot parse the pattern for authorization_endpoint"); - let matches = pattern.matches(&body); - debug!("Matches for authorization_endpoint in HTML: {:?}", matches); - if !matches.is_empty() { - if let Ok(endpoint) = homepage_uri.join(&matches[0]["url"]) { - debug!( - "Found authorization endpoint {} for user {}", - endpoint, - homepage_uri.as_str() - ); - authorization_endpoint = Some(endpoint) - } - } - }; - // If even after this the authorization endpoint is still not found, bail out. - if authorization_endpoint.is_none() { - error!( - "Couldn't find authorization_endpoint for {}", - homepage_uri.as_str() - ); - return Err(FrontendError::with_code( - 400, - "Your website doesn't support the IndieAuth protocol.", - ) - .into()); - } - let mut authorization_endpoint: surf::Url = authorization_endpoint.unwrap(); - let mut rng = rand::thread_rng(); - let state: String = data_encoding::BASE64URL.encode( - serde_urlencoded::to_string(IndieAuthClientState { - nonce: (0..8) - .map(|_| { - let idx = rng.gen_range(0..INDIEAUTH_PKCE_CHARSET.len()); - INDIEAUTH_PKCE_CHARSET[idx] as char - }) - .collect(), - me: homepage_uri.to_string(), - authorization_endpoint: authorization_endpoint.to_string(), - })? - .as_bytes(), - ); - // PKCE code generation - let code_verifier: String = (0..128) - .map(|_| { - let idx = rng.gen_range(0..INDIEAUTH_PKCE_CHARSET.len()); - INDIEAUTH_PKCE_CHARSET[idx] as char - }) - .collect(); - let mut hasher = Sha256::new(); - hasher.update(code_verifier.as_bytes()); - let code_challenge: String = data_encoding::BASE64URL.encode(&hasher.finalize()); - - authorization_endpoint.set_query(Some(&serde_urlencoded::to_string( - IndieAuthRequestParams { - response_type: "code".to_string(), - client_id: req.url().origin().ascii_serialization(), - redirect_uri: req.url().join("login/callback")?, - state: state.clone(), - code_challenge, - code_challenge_method: "S256".to_string(), - scope: Some("profile".to_string()), - me: homepage_uri, - }, - )?)); - - let cookies = vec![ - format!( - r#"indieauth_state="{}"; Same-Site: None; Secure; Max-Age: 600"#, - state - ), - format!( - r#"indieauth_code_verifier="{}"; Same-Site: None; Secure; Max-Age: 600"#, - code_verifier - ), - ]; - - let cookie_header = cookies - .iter() - .map(|i| -> http_types::headers::HeaderValue { (i as &str).try_into().unwrap() }) - .collect::>(); - - Ok(Response::builder(302) - .header("Location", authorization_endpoint.to_string()) - .header("Set-Cookie", &*cookie_header) - .build()) -} - -const INDIEAUTH_PKCE_CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ - abcdefghijklmnopqrstuvwxyz\ - 1234567890-._~"; - -#[derive(Deserialize)] -struct IndieAuthCallbackResponse { - code: Option, - error: Option, - error_description: Option, - #[allow(dead_code)] - error_uri: Option, - // This needs to be further decoded to receive state back and will always be present - state: String, -} - -impl IndieAuthCallbackResponse { - fn is_successful(&self) -> bool { - self.code.is_some() - } -} - -#[derive(Serialize, Deserialize)] -struct IndieAuthCodeRedeem { - grant_type: String, - code: String, - client_id: String, - redirect_uri: String, - code_verifier: String, -} - -#[derive(Serialize, Deserialize)] -struct IndieWebProfile { - name: Option, - url: Option, - email: Option, - photo: Option, -} - -#[derive(Serialize, Deserialize)] -struct IndieAuthResponse { - me: String, - scope: Option, - access_token: Option, - token_type: Option, - profile: Option, -} - -/// Handle IndieAuth parameters, fetch the final h-card and redirect the user to the homepage. -pub async fn callback(mut req: Request>) -> Result { - let params: IndieAuthCallbackResponse = req.query()?; - let http: &surf::Client = &req.state().http_client; - let origin = req.url().origin().ascii_serialization(); - - if req.cookie("indieauth_state").unwrap().value() != params.state { - return Err(FrontendError::with_code(400, "The state doesn't match. A possible CSRF attack was prevented. Please try again later.").into()); - } - let state: IndieAuthClientState = - serde_urlencoded::from_bytes(&data_encoding::BASE64URL.decode(params.state.as_bytes())?)?; - - if !params.is_successful() { - return Err(FrontendError::with_code( - 400, - &format!( - "The authorization endpoint indicated a following error: {:?}: {:?}", - ¶ms.error, ¶ms.error_description - ), - ) - .into()); - } - - let authorization_endpoint = surf::Url::parse(&state.authorization_endpoint).unwrap(); - let mut code_response = http - .post(authorization_endpoint) - .body_string(serde_urlencoded::to_string(IndieAuthCodeRedeem { - grant_type: "authorization_code".to_string(), - code: params.code.unwrap().to_string(), - client_id: origin.to_string(), - redirect_uri: origin + "/login/callback", - code_verifier: req - .cookie("indieauth_code_verifier") - .unwrap() - .value() - .to_string(), - })?) - .header("Content-Type", "application/x-www-form-urlencoded") - .header("Accept", "application/json") - .send() - .await?; - - if code_response.status() != 200 { - return Err(FrontendError::with_code( - code_response.status(), - &format!( - "Authorization endpoint returned an error when redeeming the code: {}", - code_response.body_string().await? - ), - ) - .into()); - } - - let json: IndieAuthResponse = code_response.body_json().await?; - let session = req.session_mut(); - session.insert("user", &json.me)?; - - // TODO redirect to the page user came from - Ok(Response::builder(302).header("Location", "/").build()) -} diff --git a/kittybox-rs/src/frontend/mod.rs b/kittybox-rs/src/frontend/mod.rs deleted file mode 100644 index 7a43532..0000000 --- a/kittybox-rs/src/frontend/mod.rs +++ /dev/null @@ -1,404 +0,0 @@ -use crate::database::{Storage, StorageError}; -use axum::{ - extract::{Host, Path, Query}, - http::{StatusCode, Uri}, - response::IntoResponse, - Extension, -}; -use futures_util::FutureExt; -use serde::Deserialize; -use std::convert::TryInto; -use tracing::{debug, error}; -//pub mod login; -pub mod onboarding; - -use kittybox_frontend_renderer::{ - Entry, Feed, VCard, - ErrorPage, Template, MainPage, - POSTS_PER_PAGE -}; -pub use kittybox_frontend_renderer::assets::statics; - -#[derive(Debug, Deserialize)] -pub struct QueryParams { - after: Option, -} - -#[derive(Debug)] -struct FrontendError { - msg: String, - source: Option>, - code: StatusCode, -} - -impl FrontendError { - pub fn with_code(code: C, msg: &str) -> Self - where - C: TryInto, - { - Self { - msg: msg.to_string(), - source: None, - code: code.try_into().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR), - } - } - pub fn msg(&self) -> &str { - &self.msg - } - pub fn code(&self) -> StatusCode { - self.code - } -} - -impl From for FrontendError { - fn from(err: StorageError) -> Self { - Self { - msg: "Database error".to_string(), - source: Some(Box::new(err)), - code: StatusCode::INTERNAL_SERVER_ERROR, - } - } -} - -impl std::error::Error for FrontendError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.source - .as_ref() - .map(|e| e.as_ref() as &(dyn std::error::Error + 'static)) - } -} - -impl std::fmt::Display for FrontendError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.msg)?; - if let Some(err) = std::error::Error::source(&self) { - write!(f, ": {}", err)?; - } - - Ok(()) - } -} - -/// Filter the post according to the value of `user`. -/// -/// Anonymous users cannot view private posts and protected locations; -/// Logged-in users can only view private posts targeted at them; -/// Logged-in users can't view private location data -#[tracing::instrument(skip(post), fields(post = %post))] -pub fn filter_post( - mut post: serde_json::Value, - user: Option<&str>, -) -> Option { - if post["properties"]["deleted"][0].is_string() { - tracing::debug!("Deleted post; returning tombstone instead"); - return Some(serde_json::json!({ - "type": post["type"], - "properties": { - "deleted": post["properties"]["deleted"] - } - })); - } - let empty_vec: Vec = vec![]; - let author_list = post["properties"]["author"] - .as_array() - .unwrap_or(&empty_vec) - .iter() - .map(|i| -> &str { - match i { - serde_json::Value::String(ref author) => author.as_str(), - mf2 => mf2["properties"]["uid"][0].as_str().unwrap() - } - }).collect::>(); - let visibility = post["properties"]["visibility"][0] - .as_str() - .unwrap_or("public"); - let audience = { - let mut audience = author_list.clone(); - audience.extend(post["properties"]["audience"] - .as_array() - .unwrap_or(&empty_vec) - .iter() - .map(|i| i.as_str().unwrap())); - - audience - }; - tracing::debug!("post audience = {:?}", audience); - if (visibility == "private" && !audience.iter().any(|i| Some(*i) == user)) - || (visibility == "protected" && user.is_none()) - { - return None; - } - if post["properties"]["location"].is_array() { - let location_visibility = post["properties"]["location-visibility"][0] - .as_str() - .unwrap_or("private"); - tracing::debug!("Post contains location, location privacy = {}", location_visibility); - let mut author = post["properties"]["author"] - .as_array() - .unwrap_or(&empty_vec) - .iter() - .map(|i| i.as_str().unwrap()); - if (location_visibility == "private" && !author.any(|i| Some(i) == user)) - || (location_visibility == "protected" && user.is_none()) - { - post["properties"] - .as_object_mut() - .unwrap() - .remove("location"); - } - } - - match post["properties"]["author"].take() { - serde_json::Value::Array(children) => { - post["properties"]["author"] = serde_json::Value::Array( - children - .into_iter() - .filter_map(|post| if post.is_string() { - Some(post) - } else { - filter_post(post, user) - }) - .collect::>() - ); - }, - serde_json::Value::Null => {}, - other => post["properties"]["author"] = other - } - - match post["children"].take() { - serde_json::Value::Array(children) => { - post["children"] = serde_json::Value::Array( - children - .into_iter() - .filter_map(|post| filter_post(post, user)) - .collect::>() - ); - }, - serde_json::Value::Null => {}, - other => post["children"] = other - } - Some(post) -} - -async fn get_post_from_database( - db: &S, - url: &str, - after: Option, - user: &Option, -) -> std::result::Result<(serde_json::Value, Option), FrontendError> { - match db - .read_feed_with_cursor(url, after.as_deref(), POSTS_PER_PAGE, user.as_deref()) - .await - { - Ok(result) => match result { - Some((post, cursor)) => match filter_post(post, user.as_deref()) { - Some(post) => Ok((post, cursor)), - None => { - // TODO: Authentication - if user.is_some() { - Err(FrontendError::with_code( - StatusCode::FORBIDDEN, - "User authenticated AND forbidden to access this resource", - )) - } else { - Err(FrontendError::with_code( - StatusCode::UNAUTHORIZED, - "User needs to authenticate themselves", - )) - } - } - } - None => Err(FrontendError::with_code( - StatusCode::NOT_FOUND, - "Post not found in the database", - )), - }, - Err(err) => match err.kind() { - crate::database::ErrorKind::PermissionDenied => { - // TODO: Authentication - if user.is_some() { - Err(FrontendError::with_code( - StatusCode::FORBIDDEN, - "User authenticated AND forbidden to access this resource", - )) - } else { - Err(FrontendError::with_code( - StatusCode::UNAUTHORIZED, - "User needs to authenticate themselves", - )) - } - } - _ => Err(err.into()), - }, - } -} - -#[tracing::instrument(skip(db))] -pub async fn homepage( - Host(host): Host, - Query(query): Query, - Extension(db): Extension, -) -> impl IntoResponse { - let user = None; // TODO authentication - let path = format!("https://{}/", host); - let feed_path = format!("https://{}/feeds/main", host); - - match tokio::try_join!( - get_post_from_database(&db, &path, None, &user), - get_post_from_database(&db, &feed_path, query.after, &user) - ) { - Ok(((hcard, _), (hfeed, cursor))) => { - // Here, we know those operations can't really fail - // (or it'll be a transient failure that will show up on - // other requests anyway if it's serious...) - // - // btw is it more efficient to fetch these in parallel? - let (blogname, webring, channels) = tokio::join!( - db.get_setting::(&host) - .map(Result::unwrap_or_default), - - db.get_setting::(&host) - .map(Result::unwrap_or_default), - - db.get_channels(&host).map(|i| i.unwrap_or_default()) - ); - // Render the homepage - ( - StatusCode::OK, - [( - axum::http::header::CONTENT_TYPE, - r#"text/html; charset="utf-8""#, - )], - Template { - title: blogname.as_ref(), - blog_name: blogname.as_ref(), - feeds: channels, - user, - content: MainPage { - feed: &hfeed, - card: &hcard, - cursor: cursor.as_deref(), - webring: crate::database::settings::Setting::into_inner(webring) - } - .to_string(), - } - .to_string(), - ) - } - Err(err) => { - if err.code == StatusCode::NOT_FOUND { - debug!("Transferring to onboarding..."); - // Transfer to onboarding - ( - StatusCode::FOUND, - [(axum::http::header::LOCATION, "/.kittybox/onboarding")], - String::default(), - ) - } else { - error!("Error while fetching h-card and/or h-feed: {}", err); - // Return the error - let (blogname, channels) = tokio::join!( - db.get_setting::(&host) - .map(Result::unwrap_or_default), - - db.get_channels(&host).map(|i| i.unwrap_or_default()) - ); - - ( - err.code(), - [( - axum::http::header::CONTENT_TYPE, - r#"text/html; charset="utf-8""#, - )], - Template { - title: blogname.as_ref(), - blog_name: blogname.as_ref(), - feeds: channels, - user, - content: ErrorPage { - code: err.code(), - msg: Some(err.msg().to_string()), - } - .to_string(), - } - .to_string(), - ) - } - } - } -} - -#[tracing::instrument(skip(db))] -pub async fn catchall( - Extension(db): Extension, - Host(host): Host, - Query(query): Query, - uri: Uri, -) -> impl IntoResponse { - let user = None; // TODO authentication - let path = url::Url::parse(&format!("https://{}/", host)) - .unwrap() - .join(uri.path()) - .unwrap(); - - match get_post_from_database(&db, path.as_str(), query.after, &user).await { - Ok((post, cursor)) => { - let (blogname, channels) = tokio::join!( - db.get_setting::(&host) - .map(Result::unwrap_or_default), - - db.get_channels(&host).map(|i| i.unwrap_or_default()) - ); - // Render the homepage - ( - StatusCode::OK, - [( - axum::http::header::CONTENT_TYPE, - r#"text/html; charset="utf-8""#, - )], - Template { - title: blogname.as_ref(), - blog_name: blogname.as_ref(), - feeds: channels, - user, - content: match post.pointer("/type/0").and_then(|i| i.as_str()) { - Some("h-entry") => Entry { post: &post }.to_string(), - Some("h-feed") => Feed { feed: &post, cursor: cursor.as_deref() }.to_string(), - Some("h-card") => VCard { card: &post }.to_string(), - unknown => { - unimplemented!("Template for MF2-JSON type {:?}", unknown) - } - }, - } - .to_string(), - ) - } - Err(err) => { - let (blogname, channels) = tokio::join!( - db.get_setting::(&host) - .map(Result::unwrap_or_default), - - db.get_channels(&host).map(|i| i.unwrap_or_default()) - ); - ( - err.code(), - [( - axum::http::header::CONTENT_TYPE, - r#"text/html; charset="utf-8""#, - )], - Template { - title: blogname.as_ref(), - blog_name: blogname.as_ref(), - feeds: channels, - user, - content: ErrorPage { - code: err.code(), - msg: Some(err.msg().to_owned()), - } - .to_string(), - } - .to_string(), - ) - } - } -} diff --git a/kittybox-rs/src/frontend/onboarding.rs b/kittybox-rs/src/frontend/onboarding.rs deleted file mode 100644 index e44e866..0000000 --- a/kittybox-rs/src/frontend/onboarding.rs +++ /dev/null @@ -1,181 +0,0 @@ -use std::sync::Arc; - -use crate::database::{settings, Storage}; -use axum::{ - extract::{Extension, Host}, - http::StatusCode, - response::{Html, IntoResponse}, - Json, -}; -use kittybox_frontend_renderer::{ErrorPage, OnboardingPage, Template}; -use serde::Deserialize; -use tokio::{task::JoinSet, sync::Mutex}; -use tracing::{debug, error}; - -use super::FrontendError; - -pub async fn get() -> Html { - Html( - Template { - title: "Kittybox - Onboarding", - blog_name: "Kittybox", - feeds: vec![], - user: None, - content: OnboardingPage {}.to_string(), - } - .to_string(), - ) -} - -#[derive(Deserialize, Debug)] -struct OnboardingFeed { - slug: String, - name: String, -} - -#[derive(Deserialize, Debug)] -pub struct OnboardingData { - user: serde_json::Value, - first_post: serde_json::Value, - #[serde(default = "OnboardingData::default_blog_name")] - blog_name: String, - feeds: Vec, -} - -impl OnboardingData { - fn default_blog_name() -> String { - "Kitty Box!".to_owned() - } -} - -#[tracing::instrument(skip(db, http))] -async fn onboard( - db: D, - user_uid: url::Url, - data: OnboardingData, - http: reqwest::Client, - jobset: Arc>>, -) -> Result<(), FrontendError> { - // Create a user to pass to the backend - // At this point the site belongs to nobody, so it is safe to do - tracing::debug!("Creating user..."); - let user = kittybox_indieauth::TokenData { - me: user_uid.clone(), - client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), - scope: kittybox_indieauth::Scopes::new(vec![kittybox_indieauth::Scope::Create]), - iat: None, exp: None - }; - tracing::debug!("User data: {:?}", user); - - if data.user["type"][0] != "h-card" || data.first_post["type"][0] != "h-entry" { - return Err(FrontendError::with_code( - StatusCode::BAD_REQUEST, - "user and first_post should be an h-card and an h-entry", - )); - } - - tracing::debug!("Setting settings..."); - let user_domain = format!( - "{}{}", - user.me.host_str().unwrap(), - user.me.port() - .map(|port| format!(":{}", port)) - .unwrap_or_default() - ); - db.set_setting::(&user_domain, data.blog_name.to_owned()) - .await - .map_err(FrontendError::from)?; - - db.set_setting::(&user_domain, false) - .await - .map_err(FrontendError::from)?; - - let (_, hcard) = { - let mut hcard = data.user; - hcard["properties"]["uid"] = serde_json::json!([&user_uid]); - crate::micropub::normalize_mf2(hcard, &user) - }; - db.put_post(&hcard, user_domain.as_str()) - .await - .map_err(FrontendError::from)?; - - debug!("Creating feeds..."); - for feed in data.feeds { - if feed.name.is_empty() || feed.slug.is_empty() { - continue; - }; - debug!("Creating feed {} with slug {}", &feed.name, &feed.slug); - let (_, feed) = crate::micropub::normalize_mf2( - serde_json::json!({ - "type": ["h-feed"], - "properties": {"name": [feed.name], "mp-slug": [feed.slug]} - }), - &user, - ); - - db.put_post(&feed, user_uid.as_str()) - .await - .map_err(FrontendError::from)?; - } - let (uid, post) = crate::micropub::normalize_mf2(data.first_post, &user); - tracing::debug!("Posting first post {}...", uid); - crate::micropub::_post(&user, uid, post, db, http, jobset) - .await - .map_err(|e| FrontendError { - msg: "Error while posting the first post".to_string(), - source: Some(Box::new(e)), - code: StatusCode::INTERNAL_SERVER_ERROR, - })?; - - Ok(()) -} - -pub async fn post( - Extension(db): Extension, - Host(host): Host, - Extension(http): Extension, - Extension(jobset): Extension>>>, - Json(data): Json, -) -> axum::response::Response { - let user_uid = format!("https://{}/", host.as_str()); - - if db.post_exists(&user_uid).await.unwrap() { - IntoResponse::into_response((StatusCode::FOUND, [("Location", "/")])) - } else { - match onboard(db, user_uid.parse().unwrap(), data, http, jobset).await { - Ok(()) => IntoResponse::into_response((StatusCode::FOUND, [("Location", "/")])), - Err(err) => { - error!("Onboarding error: {}", err); - IntoResponse::into_response(( - err.code(), - Html( - Template { - title: "Kittybox - Onboarding", - blog_name: "Kittybox", - feeds: vec![], - user: None, - content: ErrorPage { - code: err.code(), - msg: Some(err.msg().to_string()), - } - .to_string(), - } - .to_string(), - ), - )) - } - } - } -} - -pub fn router( - database: S, - http: reqwest::Client, - jobset: Arc>>, -) -> axum::routing::MethodRouter { - axum::routing::get(get) - .post(post::) - .layer::<_, _, std::convert::Infallible>(axum::Extension(database)) - .layer::<_, _, std::convert::Infallible>(axum::Extension(http)) - .layer(axum::Extension(jobset)) -} diff --git a/kittybox-rs/src/indieauth/backend.rs b/kittybox-rs/src/indieauth/backend.rs deleted file mode 100644 index 534bcfb..0000000 --- a/kittybox-rs/src/indieauth/backend.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::collections::HashMap; -use kittybox_indieauth::{ - AuthorizationRequest, TokenData -}; -pub use kittybox_util::auth::EnrolledCredential; - -type Result = std::io::Result; - -pub mod fs; -pub use fs::FileBackend; - -#[async_trait::async_trait] -pub trait AuthBackend: Clone + Send + Sync + 'static { - // Authorization code management. - /// Create a one-time OAuth2 authorization code for the passed - /// authorization request, and save it for later retrieval. - /// - /// Note for implementors: the [`AuthorizationRequest::me`] value - /// is guaranteed to be [`Some(url::Url)`][Option::Some] and can - /// be trusted to be correct and non-malicious. - async fn create_code(&self, data: AuthorizationRequest) -> Result; - /// Retreive an authorization request using the one-time - /// code. Implementations must sanitize the `code` field to - /// prevent exploits, and must check if the code should still be - /// valid at this point in time (validity interval is left up to - /// the implementation, but is recommended to be no more than 10 - /// minutes). - async fn get_code(&self, code: &str) -> Result>; - // Token management. - async fn create_token(&self, data: TokenData) -> Result; - async fn get_token(&self, website: &url::Url, token: &str) -> Result>; - async fn list_tokens(&self, website: &url::Url) -> Result>; - async fn revoke_token(&self, website: &url::Url, token: &str) -> Result<()>; - // Refresh token management. - async fn create_refresh_token(&self, data: TokenData) -> Result; - async fn get_refresh_token(&self, website: &url::Url, token: &str) -> Result>; - async fn list_refresh_tokens(&self, website: &url::Url) -> Result>; - async fn revoke_refresh_token(&self, website: &url::Url, token: &str) -> Result<()>; - // Password management. - /// Verify a password. - #[must_use] - async fn verify_password(&self, website: &url::Url, password: String) -> Result; - /// Enroll a password credential for a user. Only one password - /// credential must exist for a given user. - async fn enroll_password(&self, website: &url::Url, password: String) -> Result<()>; - /// List currently enrolled credential types for a given user. - async fn list_user_credential_types(&self, website: &url::Url) -> Result>; - // WebAuthn credential management. - #[cfg(feature = "webauthn")] - /// Enroll a WebAuthn authenticator public key for this user. - /// Multiple public keys may be saved for one user, corresponding - /// to different authenticators used by them. - /// - /// This function can also be used to overwrite a passkey with an - /// updated version after using - /// [webauthn::prelude::Passkey::update_credential()]. - async fn enroll_webauthn(&self, website: &url::Url, credential: webauthn::prelude::Passkey) -> Result<()>; - #[cfg(feature = "webauthn")] - /// List currently enrolled WebAuthn authenticators for a given user. - async fn list_webauthn_pubkeys(&self, website: &url::Url) -> Result>; - #[cfg(feature = "webauthn")] - /// Persist registration challenge state for a little while so it - /// can be used later. - /// - /// Challenges saved in this manner MUST expire after a little - /// while. 10 minutes is recommended. - async fn persist_registration_challenge( - &self, - website: &url::Url, - state: webauthn::prelude::PasskeyRegistration - ) -> Result; - #[cfg(feature = "webauthn")] - /// Retrieve a persisted registration challenge. - /// - /// The challenge should be deleted after retrieval. - async fn retrieve_registration_challenge( - &self, - website: &url::Url, - challenge_id: &str - ) -> Result; - #[cfg(feature = "webauthn")] - /// Persist authentication challenge state for a little while so - /// it can be used later. - /// - /// Challenges saved in this manner MUST expire after a little - /// while. 10 minutes is recommended. - /// - /// To support multiple authentication options, this can return an - /// opaque token that should be set as a cookie. - async fn persist_authentication_challenge( - &self, - website: &url::Url, - state: webauthn::prelude::PasskeyAuthentication - ) -> Result; - #[cfg(feature = "webauthn")] - /// Retrieve a persisted authentication challenge. - /// - /// The challenge should be deleted after retrieval. - async fn retrieve_authentication_challenge( - &self, - website: &url::Url, - challenge_id: &str - ) -> Result; - -} diff --git a/kittybox-rs/src/indieauth/backend/fs.rs b/kittybox-rs/src/indieauth/backend/fs.rs deleted file mode 100644 index 600e901..0000000 --- a/kittybox-rs/src/indieauth/backend/fs.rs +++ /dev/null @@ -1,420 +0,0 @@ -use std::{path::PathBuf, collections::HashMap, borrow::Cow, time::{SystemTime, Duration}}; - -use super::{AuthBackend, Result, EnrolledCredential}; -use async_trait::async_trait; -use kittybox_indieauth::{ - AuthorizationRequest, TokenData -}; -use serde::de::DeserializeOwned; -use tokio::{task::spawn_blocking, io::AsyncReadExt}; -#[cfg(feature = "webauthn")] -use webauthn::prelude::{Passkey, PasskeyRegistration, PasskeyAuthentication}; - -const CODE_LENGTH: usize = 16; -const TOKEN_LENGTH: usize = 128; -const CODE_DURATION: std::time::Duration = std::time::Duration::from_secs(600); - -#[derive(Clone, Debug)] -pub struct FileBackend { - path: PathBuf, -} - -impl FileBackend { - pub fn new>(path: T) -> Self { - Self { - path: path.into() - } - } - - /// Sanitize a filename, leaving only alphanumeric characters. - /// - /// Doesn't allocate a new string unless non-alphanumeric - /// characters are encountered. - fn sanitize_for_path(filename: &'_ str) -> Cow<'_, str> { - if filename.chars().all(char::is_alphanumeric) { - Cow::Borrowed(filename) - } else { - let mut s = String::with_capacity(filename.len()); - - filename.chars() - .filter(|c| c.is_alphanumeric()) - .for_each(|c| s.push(c)); - - Cow::Owned(s) - } - } - - #[inline] - async fn serialize_to_file>>( - &self, - dir: &str, - basename: B, - length: usize, - data: T - ) -> Result { - let basename = basename.into(); - let has_ext = basename.is_some(); - let (filename, mut file) = kittybox_util::fs::mktemp( - self.path.join(dir), basename, length - ) - .await - .map(|(name, file)| (name, file.try_into_std().unwrap()))?; - - spawn_blocking(move || serde_json::to_writer(&mut file, &data)) - .await - .unwrap_or_else(|e| panic!( - "Panic while serializing {}: {}", - std::any::type_name::(), - e - )) - .map(move |_| { - (if has_ext { - filename - .extension() - - } else { - filename - .file_name() - }) - .unwrap() - .to_str() - .unwrap() - .to_owned() - }) - .map_err(|err| err.into()) - } - - #[inline] - async fn deserialize_from_file<'filename, 'this: 'filename, T, B>( - &'this self, - dir: &'filename str, - basename: B, - filename: &'filename str, - ) -> Result> - where - T: serde::de::DeserializeOwned + Send, - B: Into> - { - let basename = basename.into(); - let path = self.path - .join(dir) - .join(format!( - "{}{}{}", - basename.unwrap_or(""), - if basename.is_none() { "" } else { "." }, - FileBackend::sanitize_for_path(filename) - )); - - let data = match tokio::fs::File::open(&path).await { - Ok(mut file) => { - let mut buf = Vec::new(); - - file.read_to_end(&mut buf).await?; - - match serde_json::from_slice::<'_, T>(buf.as_slice()) { - Ok(data) => data, - Err(err) => return Err(err.into()) - } - }, - Err(err) => if err.kind() == std::io::ErrorKind::NotFound { - return Ok(None) - } else { - return Err(err) - } - }; - - let ctime = tokio::fs::metadata(&path).await?.created()?; - - Ok(Some((path, ctime, data))) - } - - #[inline] - fn url_to_dir(url: &url::Url) -> String { - let host = url.host_str().unwrap(); - let port = url.port() - .map(|port| Cow::Owned(format!(":{}", port))) - .unwrap_or(Cow::Borrowed("")); - - format!("{}{}", host, port) - } - - async fn list_files<'dir, 'this: 'dir, T: DeserializeOwned + Send>( - &'this self, - dir: &'dir str, - prefix: &'static str - ) -> Result> { - let dir = self.path.join(dir); - - let mut hashmap = HashMap::new(); - let mut readdir = match tokio::fs::read_dir(dir).await { - Ok(readdir) => readdir, - Err(err) => if err.kind() == std::io::ErrorKind::NotFound { - // empty hashmap - return Ok(hashmap); - } else { - return Err(err); - } - }; - while let Some(entry) = readdir.next_entry().await? { - // safe to unwrap; filenames are alphanumeric - let filename = entry.file_name() - .into_string() - .expect("token filenames should be alphanumeric!"); - if let Some(token) = filename.strip_prefix(&format!("{}.", prefix)) { - match tokio::fs::File::open(entry.path()).await { - Ok(mut file) => { - let mut buf = Vec::new(); - - file.read_to_end(&mut buf).await?; - - match serde_json::from_slice::<'_, T>(buf.as_slice()) { - Ok(data) => hashmap.insert(token.to_string(), data), - Err(err) => { - tracing::error!( - "Error decoding token data from file {}: {}", - entry.path().display(), err - ); - continue; - } - }; - }, - Err(err) => if err.kind() == std::io::ErrorKind::NotFound { - continue - } else { - return Err(err) - } - } - } - } - - Ok(hashmap) - } -} - -#[async_trait] -impl AuthBackend for FileBackend { - // Authorization code management. - async fn create_code(&self, data: AuthorizationRequest) -> Result { - self.serialize_to_file("codes", None, CODE_LENGTH, data).await - } - - async fn get_code(&self, code: &str) -> Result> { - match self.deserialize_from_file("codes", None, FileBackend::sanitize_for_path(code).as_ref()).await? { - Some((path, ctime, data)) => { - if let Err(err) = tokio::fs::remove_file(path).await { - tracing::error!("Failed to clean up authorization code: {}", err); - } - // Err on the safe side in case of clock drift - if ctime.elapsed().unwrap_or(Duration::ZERO) > CODE_DURATION { - Ok(None) - } else { - Ok(Some(data)) - } - }, - None => Ok(None) - } - } - - // Token management. - async fn create_token(&self, data: TokenData) -> Result { - let dir = format!("{}/tokens", FileBackend::url_to_dir(&data.me)); - self.serialize_to_file(&dir, "access", TOKEN_LENGTH, data).await - } - - async fn get_token(&self, website: &url::Url, token: &str) -> Result> { - let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); - match self.deserialize_from_file::( - &dir, "access", - FileBackend::sanitize_for_path(token).as_ref() - ).await? { - Some((path, _, token)) => { - if token.expired() { - if let Err(err) = tokio::fs::remove_file(path).await { - tracing::error!("Failed to remove expired token: {}", err); - } - Ok(None) - } else { - Ok(Some(token)) - } - }, - None => Ok(None) - } - } - - async fn list_tokens(&self, website: &url::Url) -> Result> { - let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); - self.list_files(&dir, "access").await - } - - async fn revoke_token(&self, website: &url::Url, token: &str) -> Result<()> { - match tokio::fs::remove_file( - self.path - .join(FileBackend::url_to_dir(website)) - .join("tokens") - .join(format!("access.{}", FileBackend::sanitize_for_path(token))) - ).await { - Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()), - result => result - } - } - - // Refresh token management. - async fn create_refresh_token(&self, data: TokenData) -> Result { - let dir = format!("{}/tokens", FileBackend::url_to_dir(&data.me)); - self.serialize_to_file(&dir, "refresh", TOKEN_LENGTH, data).await - } - - async fn get_refresh_token(&self, website: &url::Url, token: &str) -> Result> { - let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); - match self.deserialize_from_file::( - &dir, "refresh", - FileBackend::sanitize_for_path(token).as_ref() - ).await? { - Some((path, _, token)) => { - if token.expired() { - if let Err(err) = tokio::fs::remove_file(path).await { - tracing::error!("Failed to remove expired token: {}", err); - } - Ok(None) - } else { - Ok(Some(token)) - } - }, - None => Ok(None) - } - } - - async fn list_refresh_tokens(&self, website: &url::Url) -> Result> { - let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); - self.list_files(&dir, "refresh").await - } - - async fn revoke_refresh_token(&self, website: &url::Url, token: &str) -> Result<()> { - match tokio::fs::remove_file( - self.path - .join(FileBackend::url_to_dir(website)) - .join("tokens") - .join(format!("refresh.{}", FileBackend::sanitize_for_path(token))) - ).await { - Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()), - result => result - } - } - - // Password management. - #[tracing::instrument(skip(password))] - async fn verify_password(&self, website: &url::Url, password: String) -> Result { - use argon2::{Argon2, password_hash::{PasswordHash, PasswordVerifier}}; - - let password_filename = self.path - .join(FileBackend::url_to_dir(website)) - .join("password"); - - tracing::debug!("Reading password for {} from {}", website, password_filename.display()); - - match tokio::fs::read_to_string(password_filename).await { - Ok(password_hash) => { - let parsed_hash = { - let hash = password_hash.trim(); - #[cfg(debug_assertions)] tracing::debug!("Password hash: {}", hash); - PasswordHash::new(hash) - .expect("Password hash should be valid!") - }; - Ok(Argon2::default().verify_password(password.as_bytes(), &parsed_hash).is_ok()) - }, - Err(err) => if err.kind() == std::io::ErrorKind::NotFound { - Ok(false) - } else { - Err(err) - } - } - } - - #[tracing::instrument(skip(password))] - async fn enroll_password(&self, website: &url::Url, password: String) -> Result<()> { - use argon2::{Argon2, password_hash::{rand_core::OsRng, PasswordHasher, SaltString}}; - - let password_filename = self.path - .join(FileBackend::url_to_dir(website)) - .join("password"); - - let salt = SaltString::generate(&mut OsRng); - let argon2 = Argon2::default(); - let password_hash = argon2.hash_password(password.as_bytes(), &salt) - .expect("Hashing a password should not error out") - .to_string(); - - tracing::debug!("Enrolling password for {} at {}", website, password_filename.display()); - tokio::fs::write(password_filename, password_hash.as_bytes()).await - } - - // WebAuthn credential management. - #[cfg(feature = "webauthn")] - async fn enroll_webauthn(&self, website: &url::Url, credential: Passkey) -> Result<()> { - todo!() - } - - #[cfg(feature = "webauthn")] - async fn list_webauthn_pubkeys(&self, website: &url::Url) -> Result> { - // TODO stub! - Ok(vec![]) - } - - #[cfg(feature = "webauthn")] - async fn persist_registration_challenge( - &self, - website: &url::Url, - state: PasskeyRegistration - ) -> Result { - todo!() - } - - #[cfg(feature = "webauthn")] - async fn retrieve_registration_challenge( - &self, - website: &url::Url, - challenge_id: &str - ) -> Result { - todo!() - } - - #[cfg(feature = "webauthn")] - async fn persist_authentication_challenge( - &self, - website: &url::Url, - state: PasskeyAuthentication - ) -> Result { - todo!() - } - - #[cfg(feature = "webauthn")] - async fn retrieve_authentication_challenge( - &self, - website: &url::Url, - challenge_id: &str - ) -> Result { - todo!() - } - - async fn list_user_credential_types(&self, website: &url::Url) -> Result> { - let mut creds = vec![]; - - match tokio::fs::metadata(self.path - .join(FileBackend::url_to_dir(website)) - .join("password")) - .await - { - Ok(_) => creds.push(EnrolledCredential::Password), - Err(err) => if err.kind() != std::io::ErrorKind::NotFound { - return Err(err) - } - } - - #[cfg(feature = "webauthn")] - if !self.list_webauthn_pubkeys(website).await?.is_empty() { - creds.push(EnrolledCredential::WebAuthn); - } - - Ok(creds) - } -} diff --git a/kittybox-rs/src/indieauth/mod.rs b/kittybox-rs/src/indieauth/mod.rs deleted file mode 100644 index 0ad2702..0000000 --- a/kittybox-rs/src/indieauth/mod.rs +++ /dev/null @@ -1,883 +0,0 @@ -use std::marker::PhantomData; - -use tracing::error; -use serde::Deserialize; -use axum::{ - extract::{Query, Json, Host, Form}, - response::{Html, IntoResponse, Response}, - http::StatusCode, TypedHeader, headers::{Authorization, authorization::Bearer}, - Extension -}; -#[cfg_attr(not(feature = "webauthn"), allow(unused_imports))] -use axum_extra::extract::cookie::{CookieJar, Cookie}; -use crate::database::Storage; -use kittybox_indieauth::{ - Metadata, IntrospectionEndpointAuthMethod, RevocationEndpointAuthMethod, - Scope, Scopes, PKCEMethod, Error, ErrorKind, ResponseType, - AuthorizationRequest, AuthorizationResponse, - GrantType, GrantRequest, GrantResponse, Profile, - TokenIntrospectionRequest, TokenIntrospectionResponse, TokenRevocationRequest, TokenData -}; -use std::str::FromStr; -use std::ops::Deref; - -pub mod backend; -#[cfg(feature = "webauthn")] -mod webauthn; -use backend::AuthBackend; - -const ACCESS_TOKEN_VALIDITY: u64 = 7 * 24 * 60 * 60; // 7 days -const REFRESH_TOKEN_VALIDITY: u64 = ACCESS_TOKEN_VALIDITY / 7 * 60; // 60 days -/// Internal scope for accessing the token introspection endpoint. -const KITTYBOX_TOKEN_STATUS: &str = "kittybox:token_status"; - -pub(crate) struct User(pub(crate) TokenData, pub(crate) PhantomData); -impl std::fmt::Debug for User { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_tuple("User").field(&self.0).finish() - } -} -impl std::ops::Deref for User { - type Target = TokenData; - - fn deref(&self) -> &Self::Target { - &self.0 - } -} - -pub enum IndieAuthResourceError { - InvalidRequest, - Unauthorized, - InvalidToken -} -impl axum::response::IntoResponse for IndieAuthResourceError { - fn into_response(self) -> axum::response::Response { - use IndieAuthResourceError::*; - - match self { - Unauthorized => ( - StatusCode::UNAUTHORIZED, - [("WWW-Authenticate", "Bearer")] - ).into_response(), - InvalidRequest => ( - StatusCode::BAD_REQUEST, - Json(&serde_json::json!({"error": "invalid_request"})) - ).into_response(), - InvalidToken => ( - StatusCode::UNAUTHORIZED, - [("WWW-Authenticate", "Bearer, error=\"invalid_token\"")], - Json(&serde_json::json!({"error": "unauthorized"})) - ).into_response() - } - } -} - -#[async_trait::async_trait] -impl axum::extract::FromRequestParts for User { - type Rejection = IndieAuthResourceError; - - async fn from_request_parts(req: &mut axum::http::request::Parts, state: &S) -> Result { - let TypedHeader(Authorization(token)) = - TypedHeader::>::from_request_parts(req, state) - .await - .map_err(|_| IndieAuthResourceError::Unauthorized)?; - - let axum::Extension(auth) = axum::Extension::::from_request_parts(req, state) - .await - .unwrap(); - - let Host(host) = Host::from_request_parts(req, state) - .await - .map_err(|_| IndieAuthResourceError::InvalidRequest)?; - - auth.get_token( - &format!("https://{host}/").parse().unwrap(), - token.token() - ) - .await - .unwrap() - .ok_or(IndieAuthResourceError::InvalidToken) - .map(|t| User(t, PhantomData)) - } -} - -pub async fn metadata( - Host(host): Host -) -> Metadata { - let issuer: url::Url = format!( - "{}://{}/", - if cfg!(debug_assertions) { - "http" - } else { - "https" - }, - host - ).parse().unwrap(); - - let indieauth: url::Url = issuer.join("/.kittybox/indieauth/").unwrap(); - Metadata { - issuer, - authorization_endpoint: indieauth.join("auth").unwrap(), - token_endpoint: indieauth.join("token").unwrap(), - introspection_endpoint: indieauth.join("token_status").unwrap(), - introspection_endpoint_auth_methods_supported: Some(vec![ - IntrospectionEndpointAuthMethod::Bearer - ]), - revocation_endpoint: Some(indieauth.join("revoke_token").unwrap()), - revocation_endpoint_auth_methods_supported: Some(vec![ - RevocationEndpointAuthMethod::None - ]), - scopes_supported: Some(vec![ - Scope::Create, - Scope::Update, - Scope::Delete, - Scope::Media, - Scope::Profile - ]), - response_types_supported: Some(vec![ResponseType::Code]), - grant_types_supported: Some(vec![GrantType::AuthorizationCode, GrantType::RefreshToken]), - service_documentation: None, - code_challenge_methods_supported: vec![PKCEMethod::S256], - authorization_response_iss_parameter_supported: Some(true), - userinfo_endpoint: Some(indieauth.join("userinfo").unwrap()), - } -} - -async fn authorization_endpoint_get( - Host(host): Host, - Query(request): Query, - Extension(db): Extension, - Extension(http): Extension, - Extension(auth): Extension -) -> Response { - let me = format!("https://{host}/").parse().unwrap(); - let h_app = { - tracing::debug!("Sending request to {} to fetch metadata", request.client_id); - match http.get(request.client_id.clone()).send().await { - Ok(response) => { - let url = response.url().clone(); - let text = response.text().await.unwrap(); - tracing::debug!("Received {} bytes in response", text.len()); - match microformats::from_html(&text, url) { - Ok(mf2) => { - if let Some(relation) = mf2.rels.items.get(&request.redirect_uri) { - if !relation.rels.iter().any(|i| i == "redirect_uri") { - return (StatusCode::BAD_REQUEST, - [("Content-Type", "text/plain")], - "The redirect_uri provided was declared as \ - something other than redirect_uri.") - .into_response() - } - } else if request.redirect_uri.origin() != request.client_id.origin() { - return (StatusCode::BAD_REQUEST, - [("Content-Type", "text/plain")], - "The redirect_uri didn't match the origin \ - and wasn't explicitly allowed. You were being tricked.") - .into_response() - } - - mf2.items.iter() - .cloned() - .find(|i| (**i).borrow().r#type.iter() - .any(|i| *i == microformats::types::Class::from_str("h-app").unwrap() - || *i == microformats::types::Class::from_str("h-x-app").unwrap())) - .map(|i| serde_json::to_value(i.borrow().deref()).unwrap()) - }, - Err(err) => { - tracing::error!("Error parsing application metadata: {}", err); - return (StatusCode::BAD_REQUEST, - [("Content-Type", "text/plain")], - "Parsing application metadata failed.").into_response() - } - } - }, - Err(err) => { - tracing::error!("Error fetching application metadata: {}", err); - return (StatusCode::INTERNAL_SERVER_ERROR, - [("Content-Type", "text/plain")], - "Fetching application metadata failed.").into_response() - } - } - }; - - tracing::debug!("Application metadata: {:#?}", h_app); - - Html(kittybox_frontend_renderer::Template { - title: "Confirm sign-in via IndieAuth", - blog_name: "Kittybox", - feeds: vec![], - user: None, - content: kittybox_frontend_renderer::AuthorizationRequestPage { - request, - credentials: auth.list_user_credential_types(&me).await.unwrap(), - user: db.get_post(me.as_str()).await.unwrap().unwrap(), - app: h_app - }.to_string(), - }.to_string()) - .into_response() -} - -#[derive(Deserialize, Debug)] -#[serde(untagged)] -enum Credential { - Password(String), - #[cfg(feature = "webauthn")] - WebAuthn(::webauthn::prelude::PublicKeyCredential) -} - -#[derive(Deserialize, Debug)] -struct AuthorizationConfirmation { - authorization_method: Credential, - request: AuthorizationRequest -} - -async fn verify_credential( - auth: &A, - website: &url::Url, - credential: Credential, - #[cfg_attr(not(feature = "webauthn"), allow(unused_variables))] - challenge_id: Option<&str> -) -> std::io::Result { - match credential { - Credential::Password(password) => auth.verify_password(website, password).await, - #[cfg(feature = "webauthn")] - Credential::WebAuthn(credential) => webauthn::verify( - auth, - website, - credential, - challenge_id.unwrap() - ).await - } -} - -#[tracing::instrument(skip(backend, confirmation))] -async fn authorization_endpoint_confirm( - Host(host): Host, - Extension(backend): Extension, - cookies: CookieJar, - Json(confirmation): Json, -) -> Response { - tracing::debug!("Received authorization confirmation from user"); - #[cfg(feature = "webauthn")] - let challenge_id = cookies.get(webauthn::CHALLENGE_ID_COOKIE) - .map(|cookie| cookie.value()); - #[cfg(not(feature = "webauthn"))] - let challenge_id = None; - - let website = format!("https://{}/", host).parse().unwrap(); - let AuthorizationConfirmation { - authorization_method: credential, - request: mut auth - } = confirmation; - match verify_credential(&backend, &website, credential, challenge_id).await { - Ok(verified) => if !verified { - error!("User failed verification, bailing out."); - return StatusCode::UNAUTHORIZED.into_response(); - }, - Err(err) => { - error!("Error while verifying credential: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - } - // Insert the correct `me` value into the request - // - // From this point, the `me` value that hits the backend is - // guaranteed to be authoritative and correct, and can be safely - // unwrapped. - auth.me = Some(website.clone()); - // Cloning these two values, because we can't destructure - // the AuthorizationRequest - we need it for the code - let state = auth.state.clone(); - let redirect_uri = auth.redirect_uri.clone(); - - let code = match backend.create_code(auth).await { - Ok(code) => code, - Err(err) => { - error!("Error creating authorization code: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - - let location = { - let mut uri = redirect_uri; - uri.set_query(Some(&serde_urlencoded::to_string( - AuthorizationResponse { code, state, iss: website } - ).unwrap())); - - uri - }; - - // DO NOT SET `StatusCode::FOUND` here! `fetch()` cannot read from - // redirects, it can only follow them or choose to receive an - // opaque response instead that is completely useless - (StatusCode::NO_CONTENT, - [("Location", location.as_str())], - #[cfg(feature = "webauthn")] - cookies.remove(Cookie::named(webauthn::CHALLENGE_ID_COOKIE)) - ) - .into_response() -} - -#[tracing::instrument(skip(backend, db))] -async fn authorization_endpoint_post( - Host(host): Host, - Extension(backend): Extension, - Extension(db): Extension, - Form(grant): Form, -) -> Response { - match grant { - GrantRequest::AuthorizationCode { - code, - client_id, - redirect_uri, - code_verifier - } => { - let request: AuthorizationRequest = match backend.get_code(&code).await { - Ok(Some(request)) => request, - Ok(None) => return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("The provided authorization code is invalid.".to_string()), - error_uri: None - }.into_response(), - Err(err) => { - tracing::error!("Error retrieving auth request: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - if client_id != request.client_id { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This authorization code isn't yours.".to_string()), - error_uri: None - }.into_response() - } - if redirect_uri != request.redirect_uri { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This redirect_uri doesn't match the one the code has been sent to.".to_string()), - error_uri: None - }.into_response() - } - if !request.code_challenge.verify(code_verifier) { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("The PKCE challenge failed.".to_string()), - // are RFCs considered human-readable? 😝 - error_uri: "https://datatracker.ietf.org/doc/html/rfc7636#section-4.6".parse().ok() - }.into_response() - } - let me: url::Url = format!("https://{}/", host).parse().unwrap(); - if request.me.unwrap() != me { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This authorization endpoint does not serve this user.".to_string()), - error_uri: None - }.into_response() - } - let profile = if request.scope.as_ref() - .map(|s| s.has(&Scope::Profile)) - .unwrap_or_default() - { - match get_profile( - db, - me.as_str(), - request.scope.as_ref() - .map(|s| s.has(&Scope::Email)) - .unwrap_or_default() - ).await { - Ok(profile) => { - tracing::debug!("Retrieved profile: {:?}", profile); - profile - }, - Err(err) => { - tracing::error!("Error retrieving profile from database: {}", err); - - return StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } - } else { - None - }; - - GrantResponse::ProfileUrl { me, profile }.into_response() - }, - _ => Error { - kind: ErrorKind::InvalidGrant, - msg: Some("The provided grant_type is unusable on this endpoint.".to_string()), - error_uri: "https://indieauth.spec.indieweb.org/#redeeming-the-authorization-code".parse().ok() - }.into_response() - } -} - -#[tracing::instrument(skip(backend, db))] -async fn token_endpoint_post( - Host(host): Host, - Extension(backend): Extension, - Extension(db): Extension, - Form(grant): Form, -) -> Response { - #[inline] - fn prepare_access_token(me: url::Url, client_id: url::Url, scope: Scopes) -> TokenData { - TokenData { - me, client_id, scope, - exp: (std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap() - + std::time::Duration::from_secs(ACCESS_TOKEN_VALIDITY)) - .as_secs() - .into(), - iat: std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap() - .as_secs() - .into() - } - } - - #[inline] - fn prepare_refresh_token(me: url::Url, client_id: url::Url, scope: Scopes) -> TokenData { - TokenData { - me, client_id, scope, - exp: (std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap() - + std::time::Duration::from_secs(REFRESH_TOKEN_VALIDITY)) - .as_secs() - .into(), - iat: std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap() - .as_secs() - .into() - } - } - - let me: url::Url = format!("https://{}/", host).parse().unwrap(); - - match grant { - GrantRequest::AuthorizationCode { - code, - client_id, - redirect_uri, - code_verifier - } => { - let request: AuthorizationRequest = match backend.get_code(&code).await { - Ok(Some(request)) => request, - Ok(None) => return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("The provided authorization code is invalid.".to_string()), - error_uri: None - }.into_response(), - Err(err) => { - tracing::error!("Error retrieving auth request: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - - tracing::debug!("Retrieved authorization request: {:?}", request); - - let scope = if let Some(scope) = request.scope { scope } else { - return Error { - kind: ErrorKind::InvalidScope, - msg: Some("Tokens cannot be issued if no scopes are requested.".to_string()), - error_uri: "https://indieauth.spec.indieweb.org/#access-token-response".parse().ok() - }.into_response(); - }; - if client_id != request.client_id { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This authorization code isn't yours.".to_string()), - error_uri: None - }.into_response() - } - if redirect_uri != request.redirect_uri { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This redirect_uri doesn't match the one the code has been sent to.".to_string()), - error_uri: None - }.into_response() - } - if !request.code_challenge.verify(code_verifier) { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("The PKCE challenge failed.".to_string()), - error_uri: "https://datatracker.ietf.org/doc/html/rfc7636#section-4.6".parse().ok() - }.into_response(); - } - - // Note: we can trust the `request.me` value, since we set - // it earlier before generating the authorization code - if request.me.unwrap() != me { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This authorization endpoint does not serve this user.".to_string()), - error_uri: None - }.into_response() - } - - let profile = if dbg!(scope.has(&Scope::Profile)) { - match get_profile( - db, - me.as_str(), - scope.has(&Scope::Email) - ).await { - Ok(profile) => dbg!(profile), - Err(err) => { - tracing::error!("Error retrieving profile from database: {}", err); - - return StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } - } else { - None - }; - - let access_token = match backend.create_token( - prepare_access_token(me.clone(), client_id.clone(), scope.clone()) - ).await { - Ok(token) => token, - Err(err) => { - tracing::error!("Error creating access token: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - // TODO: only create refresh token if user allows it - let refresh_token = match backend.create_refresh_token( - prepare_refresh_token(me.clone(), client_id, scope.clone()) - ).await { - Ok(token) => token, - Err(err) => { - tracing::error!("Error creating refresh token: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - - GrantResponse::AccessToken { - me, - profile, - access_token, - token_type: kittybox_indieauth::TokenType::Bearer, - scope: Some(scope), - expires_in: Some(ACCESS_TOKEN_VALIDITY), - refresh_token: Some(refresh_token) - }.into_response() - }, - GrantRequest::RefreshToken { - refresh_token, - client_id, - scope - } => { - let data = match backend.get_refresh_token(&me, &refresh_token).await { - Ok(Some(token)) => token, - Ok(None) => return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This refresh token is not valid.".to_string()), - error_uri: None - }.into_response(), - Err(err) => { - tracing::error!("Error retrieving refresh token: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - }; - - if data.client_id != client_id { - return Error { - kind: ErrorKind::InvalidGrant, - msg: Some("This refresh token is not yours.".to_string()), - error_uri: None - }.into_response(); - } - - let scope = if let Some(scope) = scope { - if !data.scope.has_all(scope.as_ref()) { - return Error { - kind: ErrorKind::InvalidScope, - msg: Some("You can't request additional scopes through the refresh token grant.".to_string()), - error_uri: None - }.into_response(); - } - - scope - } else { - // Note: check skipped because of redundancy (comparing a scope list with itself) - data.scope - }; - - - let profile = if scope.has(&Scope::Profile) { - match get_profile( - db, - data.me.as_str(), - scope.has(&Scope::Email) - ).await { - Ok(profile) => profile, - Err(err) => { - tracing::error!("Error retrieving profile from database: {}", err); - - return StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } - } else { - None - }; - - let access_token = match backend.create_token( - prepare_access_token(data.me.clone(), client_id.clone(), scope.clone()) - ).await { - Ok(token) => token, - Err(err) => { - tracing::error!("Error creating access token: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - - let old_refresh_token = refresh_token; - let refresh_token = match backend.create_refresh_token( - prepare_refresh_token(data.me.clone(), client_id, scope.clone()) - ).await { - Ok(token) => token, - Err(err) => { - tracing::error!("Error creating refresh token: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - }; - if let Err(err) = backend.revoke_refresh_token(&me, &old_refresh_token).await { - tracing::error!("Error revoking refresh token: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response(); - } - - GrantResponse::AccessToken { - me: data.me, - profile, - access_token, - token_type: kittybox_indieauth::TokenType::Bearer, - scope: Some(scope), - expires_in: Some(ACCESS_TOKEN_VALIDITY), - refresh_token: Some(refresh_token) - }.into_response() - } - } -} - -#[tracing::instrument(skip(backend, token_request))] -async fn introspection_endpoint_post( - Host(host): Host, - TypedHeader(Authorization(auth_token)): TypedHeader>, - Extension(backend): Extension, - Form(token_request): Form, -) -> Response { - use serde_json::json; - - let me: url::Url = format!("https://{}/", host).parse().unwrap(); - - // Check authentication first - match backend.get_token(&me, auth_token.token()).await { - Ok(Some(token)) => if !token.scope.has(&Scope::custom(KITTYBOX_TOKEN_STATUS)) { - return (StatusCode::UNAUTHORIZED, Json(json!({ - "error": kittybox_indieauth::ResourceErrorKind::InsufficientScope - }))).into_response(); - }, - Ok(None) => return (StatusCode::UNAUTHORIZED, Json(json!({ - "error": kittybox_indieauth::ResourceErrorKind::InvalidToken - }))).into_response(), - Err(err) => { - tracing::error!("Error retrieving token data for introspection: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } - let response: TokenIntrospectionResponse = match backend.get_token(&me, &token_request.token).await { - Ok(maybe_data) => maybe_data.into(), - Err(err) => { - tracing::error!("Error retrieving token data: {}", err); - return StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - }; - - response.into_response() -} - -async fn revocation_endpoint_post( - Host(host): Host, - Extension(backend): Extension, - Form(revocation): Form, -) -> impl IntoResponse { - let me: url::Url = format!("https://{}/", host).parse().unwrap(); - - if let Err(err) = tokio::try_join!( - backend.revoke_token(&me, &revocation.token), - backend.revoke_refresh_token(&me, &revocation.token) - ) { - tracing::error!("Error revoking token: {}", err); - - StatusCode::INTERNAL_SERVER_ERROR - } else { - StatusCode::OK - } -} - -async fn get_profile( - db: D, - url: &str, - email: bool -) -> crate::database::Result> { - Ok(db.get_post(url).await?.map(|mut mf2| { - // Ruthlessly manually destructure the MF2 document to save memory - let name = match mf2["properties"]["name"][0].take() { - serde_json::Value::String(s) => Some(s), - _ => None - }; - let url = match mf2["properties"]["uid"][0].take() { - serde_json::Value::String(s) => s.parse().ok(), - _ => None - }; - let photo = match mf2["properties"]["photo"][0].take() { - serde_json::Value::String(s) => s.parse().ok(), - _ => None - }; - let email = if email { - match mf2["properties"]["email"][0].take() { - serde_json::Value::String(s) => Some(s), - _ => None - } - } else { - None - }; - - Profile { name, url, photo, email } - })) -} - -async fn userinfo_endpoint_get( - Host(host): Host, - TypedHeader(Authorization(auth_token)): TypedHeader>, - Extension(backend): Extension, - Extension(db): Extension -) -> Response { - use serde_json::json; - - let me: url::Url = format!("https://{}/", host).parse().unwrap(); - - match backend.get_token(&me, auth_token.token()).await { - Ok(Some(token)) => { - if token.expired() { - return (StatusCode::UNAUTHORIZED, Json(json!({ - "error": kittybox_indieauth::ResourceErrorKind::InvalidToken - }))).into_response(); - } - if !token.scope.has(&Scope::Profile) { - return (StatusCode::UNAUTHORIZED, Json(json!({ - "error": kittybox_indieauth::ResourceErrorKind::InsufficientScope - }))).into_response(); - } - - match get_profile(db, me.as_str(), token.scope.has(&Scope::Email)).await { - Ok(Some(profile)) => profile.into_response(), - Ok(None) => Json(json!({ - // We do this because ResourceErrorKind is IndieAuth errors only - "error": "invalid_request" - })).into_response(), - Err(err) => { - tracing::error!("Error retrieving profile from database: {}", err); - - StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } - }, - Ok(None) => Json(json!({ - "error": kittybox_indieauth::ResourceErrorKind::InvalidToken - })).into_response(), - Err(err) => { - tracing::error!("Error reading token: {}", err); - - StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } -} - -#[must_use] -pub fn router(backend: A, db: D, http: reqwest::Client) -> axum::Router { - use axum::routing::{Router, get, post}; - - Router::new() - .nest( - "/.kittybox/indieauth", - Router::new() - .route("/metadata", - get(metadata)) - .route( - "/auth", - get(authorization_endpoint_get::) - .post(authorization_endpoint_post::)) - .route( - "/auth/confirm", - post(authorization_endpoint_confirm::)) - .route( - "/token", - post(token_endpoint_post::)) - .route( - "/token_status", - post(introspection_endpoint_post::)) - .route( - "/revoke_token", - post(revocation_endpoint_post::)) - .route( - "/userinfo", - get(userinfo_endpoint_get::)) - - .route("/webauthn/pre_register", - get( - #[cfg(feature = "webauthn")] webauthn::webauthn_pre_register::, - #[cfg(not(feature = "webauthn"))] || std::future::ready(axum::http::StatusCode::NOT_FOUND) - ) - ) - .layer(tower_http::cors::CorsLayer::new() - .allow_methods([ - axum::http::Method::GET, - axum::http::Method::POST - ]) - .allow_origin(tower_http::cors::Any)) - .layer(Extension(backend)) - // I don't really like the fact that I have to use the whole database - // If I could, I would've designed a separate trait for getting profiles - // And made databases implement it, for example - .layer(Extension(db)) - .layer(Extension(http)) - ) - .route( - "/.well-known/oauth-authorization-server", - get(|| std::future::ready( - (StatusCode::FOUND, - [("Location", - "/.kittybox/indieauth/metadata")] - ).into_response() - )) - ) -} - -#[cfg(test)] -mod tests { - #[test] - fn test_deserialize_authorization_confirmation() { - use super::{Credential, AuthorizationConfirmation}; - - let confirmation = serde_json::from_str::(r#"{ - "request":{ - "response_type": "code", - "client_id": "https://quill.p3k.io/", - "redirect_uri": "https://quill.p3k.io/", - "state": "10101010", - "code_challenge": "awooooooooooo", - "code_challenge_method": "S256", - "scope": "create+media" - }, - "authorization_method": "swordfish" - }"#).unwrap(); - - match confirmation.authorization_method { - Credential::Password(password) => assert_eq!(password.as_str(), "swordfish"), - #[allow(unreachable_patterns)] - other => panic!("Incorrect credential: {:?}", other) - } - assert_eq!(confirmation.request.state.as_ref(), "10101010"); - } -} diff --git a/kittybox-rs/src/indieauth/webauthn.rs b/kittybox-rs/src/indieauth/webauthn.rs deleted file mode 100644 index ea3ad3d..0000000 --- a/kittybox-rs/src/indieauth/webauthn.rs +++ /dev/null @@ -1,140 +0,0 @@ -use axum::{ - extract::{Json, Host}, - response::{IntoResponse, Response}, - http::StatusCode, Extension, TypedHeader, headers::{authorization::Bearer, Authorization} -}; -use axum_extra::extract::cookie::{CookieJar, Cookie}; - -use super::backend::AuthBackend; -use crate::database::Storage; - -pub(crate) const CHALLENGE_ID_COOKIE: &str = "kittybox_webauthn_challenge_id"; - -macro_rules! bail { - ($msg:literal, $err:expr) => { - { - ::tracing::error!($msg, $err); - return ::axum::http::StatusCode::INTERNAL_SERVER_ERROR.into_response() - } - } -} - -pub async fn webauthn_pre_register( - Host(host): Host, - Extension(db): Extension, - Extension(auth): Extension, - cookies: CookieJar -) -> Response { - let uid = format!("https://{}/", host.clone()); - let uid_url: url::Url = uid.parse().unwrap(); - // This will not find an h-card in onboarding! - let display_name = match db.get_post(&uid).await { - Ok(hcard) => match hcard { - Some(mut hcard) => { - match hcard["properties"]["uid"][0].take() { - serde_json::Value::String(name) => name, - _ => String::default() - } - }, - None => String::default() - }, - Err(err) => bail!("Error retrieving h-card: {}", err) - }; - - let webauthn = webauthn::WebauthnBuilder::new( - &host, - &uid_url - ) - .unwrap() - .rp_name("Kittybox") - .build() - .unwrap(); - - let (challenge, state) = match webauthn.start_passkey_registration( - // Note: using a nil uuid here is fine - // Because the user corresponds to a website anyway - // We do not track multiple users - webauthn::prelude::Uuid::nil(), - &uid, - &display_name, - Some(vec![]) - ) { - Ok((challenge, state)) => (challenge, state), - Err(err) => bail!("Error generating WebAuthn registration data: {}", err) - }; - - match auth.persist_registration_challenge(&uid_url, state).await { - Ok(challenge_id) => ( - cookies.add( - Cookie::build(CHALLENGE_ID_COOKIE, challenge_id) - .secure(true) - .finish() - ), - Json(challenge) - ).into_response(), - Err(err) => bail!("Failed to persist WebAuthn challenge: {}", err) - } -} - -pub async fn webauthn_register( - Host(host): Host, - Json(credential): Json, - // TODO determine if we can use a cookie maybe? - user_credential: Option>>, - Extension(auth): Extension -) -> Response { - let uid = format!("https://{}/", host.clone()); - let uid_url: url::Url = uid.parse().unwrap(); - - let pubkeys = match auth.list_webauthn_pubkeys(&uid_url).await { - Ok(pubkeys) => pubkeys, - Err(err) => bail!("Error enumerating existing WebAuthn credentials: {}", err) - }; - - if !pubkeys.is_empty() { - if let Some(TypedHeader(Authorization(token))) = user_credential { - // TODO check validity of the credential - } else { - return StatusCode::UNAUTHORIZED.into_response() - } - } - - return StatusCode::OK.into_response() -} - -pub(crate) async fn verify( - auth: &A, - website: &url::Url, - credential: webauthn::prelude::PublicKeyCredential, - challenge_id: &str -) -> std::io::Result { - let host = website.host_str().unwrap(); - - let webauthn = webauthn::WebauthnBuilder::new( - host, - website - ) - .unwrap() - .rp_name("Kittybox") - .build() - .unwrap(); - - match webauthn.finish_passkey_authentication( - &credential, - &auth.retrieve_authentication_challenge(&website, challenge_id).await? - ) { - Err(err) => { - tracing::error!("WebAuthn error: {}", err); - Ok(false) - }, - Ok(authentication_result) => { - let counter = authentication_result.counter(); - let cred_id = authentication_result.cred_id(); - - if authentication_result.needs_update() { - todo!() - } - Ok(true) - } - } -} diff --git a/kittybox-rs/src/lib.rs b/kittybox-rs/src/lib.rs deleted file mode 100644 index c1bd965..0000000 --- a/kittybox-rs/src/lib.rs +++ /dev/null @@ -1,93 +0,0 @@ -#![forbid(unsafe_code)] -#![warn(clippy::todo)] - -/// Database abstraction layer for Kittybox, allowing the CMS to work with any kind of database. -pub mod database; -pub mod frontend; -pub mod media; -pub mod micropub; -pub mod indieauth; -pub mod webmentions; - -pub mod companion { - use std::{collections::HashMap, sync::Arc}; - use axum::{ - extract::{Extension, Path}, - response::{IntoResponse, Response} - }; - - #[derive(Debug, Clone, Copy)] - struct Resource { - data: &'static [u8], - mime: &'static str - } - - impl IntoResponse for &Resource { - fn into_response(self) -> Response { - (axum::http::StatusCode::OK, - [("Content-Type", self.mime)], - self.data).into_response() - } - } - - // TODO replace with the "phf" crate someday - type ResourceTable = Arc>; - - #[tracing::instrument] - async fn map_to_static( - Path(name): Path, - Extension(resources): Extension - ) -> Response { - tracing::debug!("Searching for {} in the resource table...", name); - match resources.get(name.as_str()) { - Some(res) => res.into_response(), - None => { - #[cfg(debug_assertions)] tracing::error!("Not found"); - - (axum::http::StatusCode::NOT_FOUND, - [("Content-Type", "text/plain")], - "Not found. Sorry.".as_bytes()).into_response() - } - } - } - - #[must_use] - pub fn router() -> axum::Router { - let resources: ResourceTable = { - let mut map = HashMap::new(); - - macro_rules! register_resource { - ($map:ident, $prefix:expr, ($filename:literal, $mime:literal)) => {{ - $map.insert($filename, Resource { - data: include_bytes!(concat!($prefix, $filename)), - mime: $mime - }) - }}; - ($map:ident, $prefix:expr, ($filename:literal, $mime:literal), $( ($f:literal, $m:literal) ),+) => {{ - register_resource!($map, $prefix, ($filename, $mime)); - register_resource!($map, $prefix, $(($f, $m)),+); - }}; - } - - register_resource! { - map, - concat!(env!("OUT_DIR"), "/", "companion", "/"), - ("index.html", "text/html; charset=\"utf-8\""), - ("main.js", "text/javascript"), - ("micropub_api.js", "text/javascript"), - ("indieauth.js", "text/javascript"), - ("base64.js", "text/javascript"), - ("style.css", "text/css") - }; - - Arc::new(map) - }; - - axum::Router::new() - .route( - "/:filename", - axum::routing::get(map_to_static) - .layer(Extension(resources)) - ) - } -} diff --git a/kittybox-rs/src/main.rs b/kittybox-rs/src/main.rs deleted file mode 100644 index 6389489..0000000 --- a/kittybox-rs/src/main.rs +++ /dev/null @@ -1,489 +0,0 @@ -use kittybox::database::FileStorage; -use std::{env, time::Duration, sync::Arc}; -use tracing::error; - -fn init_media(auth_backend: A, blobstore_uri: &str) -> axum::Router { - match blobstore_uri.split_once(':').unwrap().0 { - "file" => { - let folder = std::path::PathBuf::from( - blobstore_uri.strip_prefix("file://").unwrap() - ); - let blobstore = kittybox::media::storage::file::FileStore::new(folder); - - kittybox::media::router::<_, _>(blobstore, auth_backend) - }, - other => unimplemented!("Unsupported backend: {other}") - } -} - -async fn compose_kittybox_with_auth( - http: reqwest::Client, - auth_backend: A, - backend_uri: &str, - blobstore_uri: &str, - job_queue_uri: &str, - jobset: &Arc>>, - cancellation_token: &tokio_util::sync::CancellationToken -) -> (axum::Router, kittybox::webmentions::SupervisedTask) -where A: kittybox::indieauth::backend::AuthBackend -{ - match backend_uri.split_once(':').unwrap().0 { - "file" => { - let database = { - let folder = backend_uri.strip_prefix("file://").unwrap(); - let path = std::path::PathBuf::from(folder); - - match kittybox::database::FileStorage::new(path).await { - Ok(db) => db, - Err(err) => { - error!("Error creating database: {:?}", err); - std::process::exit(1); - } - } - }; - - // Technically, if we don't construct the micropub router, - // we could use some wrapper that makes the database - // read-only. - // - // This would allow to exclude all code to write to the - // database and separate reader and writer processes of - // Kittybox to improve security. - let homepage: axum::routing::MethodRouter<_> = axum::routing::get( - kittybox::frontend::homepage:: - ) - .layer(axum::Extension(database.clone())); - let fallback = axum::routing::get( - kittybox::frontend::catchall:: - ) - .layer(axum::Extension(database.clone())); - - let micropub = kittybox::micropub::router( - database.clone(), - http.clone(), - auth_backend.clone(), - Arc::clone(jobset) - ); - let onboarding = kittybox::frontend::onboarding::router( - database.clone(), http.clone(), Arc::clone(jobset) - ); - - - let (webmention, task) = kittybox::webmentions::router( - kittybox::webmentions::queue::PostgresJobQueue::new(job_queue_uri).await.unwrap(), - database.clone(), - http.clone(), - cancellation_token.clone() - ); - - let router = axum::Router::new() - .route("/", homepage) - .fallback(fallback) - .route("/.kittybox/micropub", micropub) - .route("/.kittybox/onboarding", onboarding) - .nest("/.kittybox/media", init_media(auth_backend.clone(), blobstore_uri)) - .merge(kittybox::indieauth::router(auth_backend.clone(), database.clone(), http.clone())) - .merge(webmention) - .route( - "/.kittybox/health", - axum::routing::get(health_check::) - .layer(axum::Extension(database)) - ); - - (router, task) - }, - "redis" => unimplemented!("Redis backend is not supported."), - #[cfg(feature = "postgres")] - "postgres" => { - use kittybox::database::PostgresStorage; - - let database = { - match PostgresStorage::new(backend_uri).await { - Ok(db) => db, - Err(err) => { - error!("Error creating database: {:?}", err); - std::process::exit(1); - } - } - }; - - // Technically, if we don't construct the micropub router, - // we could use some wrapper that makes the database - // read-only. - // - // This would allow to exclude all code to write to the - // database and separate reader and writer processes of - // Kittybox to improve security. - let homepage: axum::routing::MethodRouter<_> = axum::routing::get( - kittybox::frontend::homepage:: - ) - .layer(axum::Extension(database.clone())); - let fallback = axum::routing::get( - kittybox::frontend::catchall:: - ) - .layer(axum::Extension(database.clone())); - - let micropub = kittybox::micropub::router( - database.clone(), - http.clone(), - auth_backend.clone(), - Arc::clone(jobset) - ); - let onboarding = kittybox::frontend::onboarding::router( - database.clone(), http.clone(), Arc::clone(jobset) - ); - - let (webmention, task) = kittybox::webmentions::router( - kittybox::webmentions::queue::PostgresJobQueue::new(job_queue_uri).await.unwrap(), - database.clone(), - http.clone(), - cancellation_token.clone() - ); - - let router = axum::Router::new() - .route("/", homepage) - .fallback(fallback) - .route("/.kittybox/micropub", micropub) - .route("/.kittybox/onboarding", onboarding) - .nest("/.kittybox/media", init_media(auth_backend.clone(), blobstore_uri)) - .merge(kittybox::indieauth::router(auth_backend.clone(), database.clone(), http.clone())) - .merge(webmention) - .route( - "/.kittybox/health", - axum::routing::get(health_check::) - .layer(axum::Extension(database)) - ); - - (router, task) - }, - other => unimplemented!("Unsupported backend: {other}") - } -} - -async fn compose_kittybox( - backend_uri: &str, - blobstore_uri: &str, - authstore_uri: &str, - job_queue_uri: &str, - jobset: &Arc>>, - cancellation_token: &tokio_util::sync::CancellationToken -) -> (axum::Router, kittybox::webmentions::SupervisedTask) { - let http: reqwest::Client = { - #[allow(unused_mut)] - let mut builder = reqwest::Client::builder() - .user_agent(concat!( - env!("CARGO_PKG_NAME"), - "/", - env!("CARGO_PKG_VERSION") - )); - if let Ok(certs) = std::env::var("KITTYBOX_CUSTOM_PKI_ROOTS") { - // TODO: add a root certificate if there's an environment variable pointing at it - for path in certs.split(':') { - let metadata = match tokio::fs::metadata(path).await { - Ok(metadata) => metadata, - Err(err) if err.kind() == std::io::ErrorKind::NotFound => { - tracing::error!("TLS root certificate {} not found, skipping...", path); - continue; - } - Err(err) => panic!("Error loading TLS certificates: {}", err) - }; - if metadata.is_dir() { - let mut dir = tokio::fs::read_dir(path).await.unwrap(); - while let Ok(Some(file)) = dir.next_entry().await { - let pem = tokio::fs::read(file.path()).await.unwrap(); - builder = builder.add_root_certificate( - reqwest::Certificate::from_pem(&pem).unwrap() - ); - } - } else { - let pem = tokio::fs::read(path).await.unwrap(); - builder = builder.add_root_certificate( - reqwest::Certificate::from_pem(&pem).unwrap() - ); - } - } - } - - builder.build().unwrap() - }; - - let (router, task) = match authstore_uri.split_once(':').unwrap().0 { - "file" => { - let auth_backend = { - let folder = authstore_uri - .strip_prefix("file://") - .unwrap(); - kittybox::indieauth::backend::fs::FileBackend::new(folder) - }; - - compose_kittybox_with_auth(http, auth_backend, backend_uri, blobstore_uri, job_queue_uri, jobset, cancellation_token).await - } - other => unimplemented!("Unsupported backend: {other}") - }; - - let router = router - .route( - "/.kittybox/static/:path", - axum::routing::get(kittybox::frontend::statics) - ) - .route("/.kittybox/coffee", teapot_route()) - .nest("/.kittybox/micropub/client", kittybox::companion::router()) - .layer(tower_http::trace::TraceLayer::new_for_http()) - .layer(tower_http::catch_panic::CatchPanicLayer::new()); - - (router, task) -} - -fn teapot_route() -> axum::routing::MethodRouter { - axum::routing::get(|| async { - use axum::http::{header, StatusCode}; - (StatusCode::IM_A_TEAPOT, [(header::CONTENT_TYPE, "text/plain")], "Sorry, can't brew coffee yet!") - }) -} - -async fn health_check( - //axum::Extension(auth): axum::Extension, - //axum::Extension(blob): axum::Extension, - axum::Extension(data): axum::Extension, -) -> impl axum::response::IntoResponse -where - //A: kittybox::indieauth::backend::AuthBackend, - //B: kittybox::media::storage::MediaStore, - D: kittybox::database::Storage -{ - (axum::http::StatusCode::OK, std::borrow::Cow::Borrowed("OK")) -} - -#[tokio::main] -async fn main() { - use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry}; - - let tracing_registry = Registry::default() - .with(EnvFilter::from_default_env()) - .with( - #[cfg(debug_assertions)] - tracing_tree::HierarchicalLayer::new(2) - .with_bracketed_fields(true) - .with_indent_lines(true) - .with_verbose_exit(true), - #[cfg(not(debug_assertions))] - tracing_subscriber::fmt::layer().json() - .with_ansi(std::io::IsTerminal::is_terminal(&std::io::stdout().lock())) - ); - // In debug builds, also log to JSON, but to file. - #[cfg(debug_assertions)] - let tracing_registry = tracing_registry.with( - tracing_subscriber::fmt::layer() - .json() - .with_writer({ - let instant = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap(); - move || std::fs::OpenOptions::new() - .append(true) - .create(true) - .open( - format!( - "{}.log.json", - instant - .as_secs_f64() - .to_string() - .replace('.', "_") - ) - ).unwrap() - }) - ); - tracing_registry.init(); - - tracing::info!("Starting the kittybox server..."); - - let backend_uri: String = env::var("BACKEND_URI") - .unwrap_or_else(|_| { - error!("BACKEND_URI is not set, cannot find a database"); - std::process::exit(1); - }); - let blobstore_uri: String = env::var("BLOBSTORE_URI") - .unwrap_or_else(|_| { - error!("BLOBSTORE_URI is not set, can't find media store"); - std::process::exit(1); - }); - - let authstore_uri: String = env::var("AUTH_STORE_URI") - .unwrap_or_else(|_| { - error!("AUTH_STORE_URI is not set, can't find authentication store"); - std::process::exit(1); - }); - - let job_queue_uri: String = env::var("JOB_QUEUE_URI") - .unwrap_or_else(|_| { - error!("JOB_QUEUE_URI is not set, can't find job queue"); - std::process::exit(1); - }); - - let cancellation_token = tokio_util::sync::CancellationToken::new(); - let jobset = Arc::new(tokio::sync::Mutex::new(tokio::task::JoinSet::new())); - - let (router, webmentions_task) = compose_kittybox( - backend_uri.as_str(), - blobstore_uri.as_str(), - authstore_uri.as_str(), - job_queue_uri.as_str(), - &jobset, - &cancellation_token - ).await; - - let mut servers: Vec> = vec![]; - - let build_hyper = |tcp: std::net::TcpListener| { - tracing::info!("Listening on {}", tcp.local_addr().unwrap()); - // Set the socket to non-blocking so tokio can poll it - // properly -- this is the async magic! - tcp.set_nonblocking(true).unwrap(); - - hyper::server::Server::from_tcp(tcp).unwrap() - // Otherwise Chrome keeps connections open for too long - .tcp_keepalive(Some(Duration::from_secs(30 * 60))) - .serve(router.clone().into_make_service()) - }; - - let mut listenfd = listenfd::ListenFd::from_env(); - for i in 0..(listenfd.len()) { - match listenfd.take_tcp_listener(i) { - Ok(Some(tcp)) => servers.push(build_hyper(tcp)), - Ok(None) => {}, - Err(err) => tracing::error!("Error binding to socket in fd {}: {}", i, err) - } - } - // TODO this requires the `hyperlocal` crate - //#[rustfmt::skip] - /*#[cfg(unix)] { - let build_hyper_unix = |unix: std::os::unix::net::UnixListener| { - { - use std::os::linux::net::SocketAddrExt; - - let local_addr = unix.local_addr().unwrap(); - if let Some(pathname) = local_addr.as_pathname() { - tracing::info!("Listening on unix:{}", pathname.display()); - } else if let Some(name) = { - #[cfg(linux)] - local_addr.as_abstract_name(); - #[cfg(not(linux))] - None::<&[u8]> - } { - tracing::info!("Listening on unix:@{}", String::from_utf8_lossy(name)); - } else { - tracing::info!("Listening on unnamed unix socket"); - } - } - unix.set_nonblocking(true).unwrap(); - - hyper::server::Server::builder(unix) - .serve(router.clone().into_make_service()) - }; - for i in 0..(listenfd.len()) { - match listenfd.take_unix_listener(i) { - Ok(Some(unix)) => servers.push(build_hyper_unix(unix)), - Ok(None) => {}, - Err(err) => tracing::error!("Error binding to socket in fd {}: {}", i, err) - } - } - }*/ - if servers.is_empty() { - servers.push(build_hyper({ - let listen_addr = env::var("SERVE_AT") - .ok() - .unwrap_or_else(|| "[::]:8080".to_string()) - .parse::() - .unwrap_or_else(|e| { - error!("Cannot parse SERVE_AT: {}", e); - std::process::exit(1); - }); - - std::net::TcpListener::bind(listen_addr).unwrap() - })) - } - // Drop the remaining copy of the router - // to get rid of an extra reference to `jobset` - drop(router); - // Polling streams mutates them - let mut servers_futures = Box::pin(servers.into_iter() - .map( - #[cfg(not(tokio_unstable))] |server| tokio::task::spawn( - server.with_graceful_shutdown(cancellation_token.clone().cancelled_owned()) - ), - #[cfg(tokio_unstable)] |server| { - tokio::task::Builder::new() - .name(format!("Kittybox HTTP acceptor: {}", server.local_addr()).as_str()) - .spawn( - server.with_graceful_shutdown( - cancellation_token.clone().cancelled_owned() - ) - ) - .unwrap() - } - ) - .collect::>>>() - ); - - #[cfg(not(unix))] - let shutdown_signal = tokio::signal::ctrl_c(); - #[cfg(unix)] - let shutdown_signal = { - use tokio::signal::unix::{signal, SignalKind}; - - async move { - let mut interrupt = signal(SignalKind::interrupt()) - .expect("Failed to set up SIGINT handler"); - let mut terminate = signal(SignalKind::terminate()) - .expect("Failed to setup SIGTERM handler"); - - tokio::select! { - _ = terminate.recv() => {}, - _ = interrupt.recv() => {}, - } - } - }; - use futures_util::stream::StreamExt; - - let exitcode: i32 = tokio::select! { - // Poll the servers stream for errors. - // If any error out, shut down the entire operation - // - // We do this because there might not be a good way - // to recover from some errors without external help - Some(Err(e)) = servers_futures.next() => { - tracing::error!("Error in HTTP server: {}", e); - tracing::error!("Shutting down because of error."); - cancellation_token.cancel(); - - 1 - } - _ = cancellation_token.cancelled() => { - tracing::info!("Signal caught from watchdog."); - - 0 - } - _ = shutdown_signal => { - tracing::info!("Shutdown requested by signal."); - cancellation_token.cancel(); - - 0 - } - }; - - tracing::info!("Waiting for unfinished background tasks..."); - - let _ = tokio::join!( - webmentions_task, - Box::pin(futures_util::future::join_all( - servers_futures.iter_mut().collect::>() - )), - ); - let mut jobset: tokio::task::JoinSet<()> = Arc::try_unwrap(jobset) - .expect("Dangling jobset references present") - .into_inner(); - while (jobset.join_next().await).is_some() {} - tracing::info!("Shutdown complete, exiting."); - std::process::exit(exitcode); - -} diff --git a/kittybox-rs/src/media/mod.rs b/kittybox-rs/src/media/mod.rs deleted file mode 100644 index 71f875e..0000000 --- a/kittybox-rs/src/media/mod.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::convert::TryFrom; - -use axum::{ - extract::{Extension, Host, multipart::Multipart, Path}, - response::{IntoResponse, Response}, - headers::{Header, HeaderValue, IfNoneMatch, HeaderMapExt}, - TypedHeader, -}; -use kittybox_util::error::{MicropubError, ErrorType}; -use kittybox_indieauth::Scope; -use crate::indieauth::{User, backend::AuthBackend}; - -pub mod storage; -use storage::{MediaStore, MediaStoreError, Metadata, ErrorKind}; -pub use storage::file::FileStore; - -impl From for MicropubError { - fn from(err: MediaStoreError) -> Self { - Self { - error: ErrorType::InternalServerError, - error_description: format!("{}", err) - } - } -} - -#[tracing::instrument(skip(blobstore))] -pub(crate) async fn upload( - Extension(blobstore): Extension, - user: User, - mut upload: Multipart -) -> Response { - if !user.check_scope(&Scope::Media) { - return MicropubError { - error: ErrorType::NotAuthorized, - error_description: "Interacting with the media storage requires the \"media\" scope.".to_owned() - }.into_response(); - } - let host = user.me.host().unwrap().to_string() + &user.me.port().map(|i| format!(":{}", i)).unwrap_or_default(); - let field = match upload.next_field().await { - Ok(Some(field)) => field, - Ok(None) => { - return MicropubError { - error: ErrorType::InvalidRequest, - error_description: "Send multipart/form-data with one field named file".to_owned() - }.into_response(); - }, - Err(err) => { - return MicropubError { - error: ErrorType::InternalServerError, - error_description: format!("Error while parsing multipart/form-data: {}", err) - }.into_response(); - }, - }; - let metadata: Metadata = (&field).into(); - match blobstore.write_streaming(&host, metadata, field).await { - Ok(filename) => IntoResponse::into_response(( - axum::http::StatusCode::CREATED, - [ - ("Location", user.me.join( - &format!(".kittybox/media/uploads/{}", filename) - ).unwrap().as_str()) - ] - )), - Err(err) => MicropubError::from(err).into_response() - } -} - -#[tracing::instrument(skip(blobstore))] -pub(crate) async fn serve( - Host(host): Host, - Path(path): Path, - if_none_match: Option>, - Extension(blobstore): Extension -) -> Response { - use axum::http::StatusCode; - tracing::debug!("Searching for file..."); - match blobstore.read_streaming(&host, path.as_str()).await { - Ok((metadata, stream)) => { - tracing::debug!("Metadata: {:?}", metadata); - - let etag = if let Some(etag) = metadata.etag { - let etag = format!("\"{}\"", etag).parse::().unwrap(); - - if let Some(TypedHeader(if_none_match)) = if_none_match { - tracing::debug!("If-None-Match: {:?}", if_none_match); - // If-None-Match is a negative precondition that - // returns 304 when it doesn't match because it - // only matches when file is different - if !if_none_match.precondition_passes(&etag) { - return StatusCode::NOT_MODIFIED.into_response() - } - } - - Some(etag) - } else { None }; - - let mut r = Response::builder(); - { - let headers = r.headers_mut().unwrap(); - headers.insert( - "Content-Type", - HeaderValue::from_str( - metadata.content_type - .as_deref() - .unwrap_or("application/octet-stream") - ).unwrap() - ); - if let Some(length) = metadata.length { - headers.insert( - "Content-Length", - HeaderValue::from_str(&length.to_string()).unwrap() - ); - } - if let Some(etag) = etag { - headers.typed_insert(etag); - } - } - r.body(axum::body::StreamBody::new(stream)) - .unwrap() - .into_response() - }, - Err(err) => match err.kind() { - ErrorKind::NotFound => { - IntoResponse::into_response(StatusCode::NOT_FOUND) - }, - _ => { - tracing::error!("{}", err); - IntoResponse::into_response(StatusCode::INTERNAL_SERVER_ERROR) - } - } - } -} - -#[must_use] -pub fn router(blobstore: S, auth: A) -> axum::Router { - axum::Router::new() - .route("/", axum::routing::post(upload::)) - .route("/uploads/*file", axum::routing::get(serve::)) - .layer(axum::Extension(blobstore)) - .layer(axum::Extension(auth)) -} diff --git a/kittybox-rs/src/media/storage/file.rs b/kittybox-rs/src/media/storage/file.rs deleted file mode 100644 index 0aaaa3b..0000000 --- a/kittybox-rs/src/media/storage/file.rs +++ /dev/null @@ -1,434 +0,0 @@ -use super::{Metadata, ErrorKind, MediaStore, MediaStoreError, Result}; -use async_trait::async_trait; -use std::{path::PathBuf, fmt::Debug}; -use tokio::fs::OpenOptions; -use tokio::io::{BufReader, BufWriter, AsyncWriteExt, AsyncSeekExt}; -use futures::{StreamExt, TryStreamExt}; -use std::ops::{Bound, RangeBounds, Neg}; -use std::pin::Pin; -use sha2::Digest; -use futures::FutureExt; -use tracing::{debug, error}; - -const BUF_CAPACITY: usize = 16 * 1024; - -#[derive(Clone)] -pub struct FileStore { - base: PathBuf, -} - -impl From for MediaStoreError { - fn from(source: tokio::io::Error) -> Self { - Self { - msg: format!("file I/O error: {}", source), - kind: match source.kind() { - std::io::ErrorKind::NotFound => ErrorKind::NotFound, - _ => ErrorKind::Backend - }, - source: Some(Box::new(source)), - } - } -} - -impl FileStore { - pub fn new>(base: T) -> Self { - Self { base: base.into() } - } - - async fn mktemp(&self) -> Result<(PathBuf, BufWriter)> { - kittybox_util::fs::mktemp(&self.base, "temp", 16) - .await - .map(|(name, file)| (name, BufWriter::new(file))) - .map_err(Into::into) - } -} - -#[async_trait] -impl MediaStore for FileStore { - - #[tracing::instrument(skip(self, content))] - async fn write_streaming( - &self, - domain: &str, - mut metadata: Metadata, - mut content: T, - ) -> Result - where - T: tokio_stream::Stream> + Unpin + Send + Debug - { - let (tempfilepath, mut tempfile) = self.mktemp().await?; - debug!("Temporary file opened for storing pending upload: {}", tempfilepath.display()); - let mut hasher = sha2::Sha256::new(); - let mut length: usize = 0; - - while let Some(chunk) = content.next().await { - let chunk = chunk.map_err(|err| MediaStoreError { - kind: ErrorKind::Backend, - source: Some(Box::new(err)), - msg: "Failed to read a data chunk".to_owned() - })?; - debug!("Read {} bytes from the stream", chunk.len()); - length += chunk.len(); - let (write_result, _hasher) = tokio::join!( - { - let chunk = chunk.clone(); - let tempfile = &mut tempfile; - async move { - tempfile.write_all(&*chunk).await - } - }, - { - let chunk = chunk.clone(); - tokio::task::spawn_blocking(move || { - hasher.update(&*chunk); - - hasher - }).map(|r| r.unwrap()) - } - ); - if let Err(err) = write_result { - error!("Error while writing pending upload: {}", err); - drop(tempfile); - // this is just cleanup, nothing fails if it fails - // though temporary files might take up space on the hard drive - // We'll clean them when maintenance time comes - #[allow(unused_must_use)] - { tokio::fs::remove_file(tempfilepath).await; } - return Err(err.into()); - } - hasher = _hasher; - } - // Manually flush the buffer and drop the handle to close the file - tempfile.flush().await?; - tempfile.into_inner().sync_all().await?; - - let hash = hasher.finalize(); - debug!("Pending upload hash: {}", hex::encode(&hash)); - let filename = format!( - "{}/{}/{}/{}/{}", - hex::encode([hash[0]]), - hex::encode([hash[1]]), - hex::encode([hash[2]]), - hex::encode([hash[3]]), - hex::encode(&hash[4..32]) - ); - let domain_str = domain.to_string(); - let filepath = self.base.join(domain_str.as_str()).join(&filename); - let metafilename = filename.clone() + ".json"; - let metapath = self.base.join(domain_str.as_str()).join(&metafilename); - let metatemppath = self.base.join(domain_str.as_str()).join(metafilename + ".tmp"); - metadata.length = std::num::NonZeroUsize::new(length); - metadata.etag = Some(hex::encode(&hash)); - debug!("File path: {}, metadata: {}", filepath.display(), metapath.display()); - { - let parent = filepath.parent().unwrap(); - tokio::fs::create_dir_all(parent).await?; - } - let mut meta = OpenOptions::new() - .create_new(true) - .write(true) - .open(&metatemppath) - .await?; - meta.write_all(&serde_json::to_vec(&metadata).unwrap()).await?; - tokio::fs::rename(tempfilepath, filepath).await?; - tokio::fs::rename(metatemppath, metapath).await?; - Ok(filename) - } - - #[tracing::instrument(skip(self))] - async fn read_streaming( - &self, - domain: &str, - filename: &str, - ) -> Result<(Metadata, Pin> + Send>>)> { - debug!("Domain: {}, filename: {}", domain, filename); - let path = self.base.join(domain).join(filename); - debug!("Path: {}", path.display()); - - let file = OpenOptions::new() - .read(true) - .open(path) - .await?; - let meta = self.metadata(domain, filename).await?; - - Ok((meta, Box::pin( - tokio_util::io::ReaderStream::new( - // TODO: determine if BufReader provides benefit here - // From the logs it looks like we're reading 4KiB at a time - // Buffering file contents seems to double download speed - // How to benchmark this? - BufReader::with_capacity(BUF_CAPACITY, file) - ) - // Sprinkle some salt in form of protective log wrapping - .inspect_ok(|chunk| debug!("Read {} bytes from file", chunk.len())) - ))) - } - - #[tracing::instrument(skip(self))] - async fn metadata(&self, domain: &str, filename: &str) -> Result { - let metapath = self.base.join(domain).join(format!("{}.json", filename)); - debug!("Metadata path: {}", metapath.display()); - - let meta = serde_json::from_slice(&tokio::fs::read(metapath).await?) - .map_err(|err| MediaStoreError { - kind: ErrorKind::Json, - msg: format!("{}", err), - source: Some(Box::new(err)) - })?; - - Ok(meta) - } - - #[tracing::instrument(skip(self))] - async fn stream_range( - &self, - domain: &str, - filename: &str, - range: (Bound, Bound) - ) -> Result> + Send>>> { - let path = self.base.join(format!("{}/{}", domain, filename)); - let metapath = self.base.join(format!("{}/{}.json", domain, filename)); - debug!("Path: {}, metadata: {}", path.display(), metapath.display()); - - let mut file = OpenOptions::new() - .read(true) - .open(path) - .await?; - - let start = match range { - (Bound::Included(bound), _) => { - debug!("Seeking {} bytes forward...", bound); - file.seek(std::io::SeekFrom::Start(bound)).await? - } - (Bound::Excluded(_), _) => unreachable!(), - (Bound::Unbounded, Bound::Included(bound)) => { - // Seek to the end minus the bounded bytes - debug!("Seeking {} bytes back from the end...", bound); - file.seek(std::io::SeekFrom::End(i64::try_from(bound).unwrap().neg())).await? - }, - (Bound::Unbounded, Bound::Unbounded) => 0, - (_, Bound::Excluded(_)) => unreachable!() - }; - - let stream = Box::pin(tokio_util::io::ReaderStream::new(BufReader::with_capacity(BUF_CAPACITY, file))) - .map_ok({ - let mut bytes_read = 0usize; - let len = match range { - (_, Bound::Unbounded) => None, - (Bound::Unbounded, Bound::Included(bound)) => Some(bound), - (_, Bound::Included(bound)) => Some(bound + 1 - start), - (_, Bound::Excluded(_)) => unreachable!() - }; - move |chunk| { - debug!("Read {} bytes from file, {} in this chunk", bytes_read, chunk.len()); - bytes_read += chunk.len(); - if let Some(len) = len.map(|len| len.try_into().unwrap()) { - if bytes_read > len { - if bytes_read - len > chunk.len() { - return None - } - debug!("Truncating last {} bytes", bytes_read - len); - return Some(chunk.slice(..chunk.len() - (bytes_read - len))) - } - } - - Some(chunk) - } - }) - .try_take_while(|x| std::future::ready(Ok(x.is_some()))) - // Will never panic, because the moment the stream yields - // a None, it is considered exhausted. - .map_ok(|x| x.unwrap()); - - return Ok(Box::pin(stream)) - } - - - async fn delete(&self, domain: &str, filename: &str) -> Result<()> { - let path = self.base.join(format!("{}/{}", domain, filename)); - - Ok(tokio::fs::remove_file(path).await?) - } -} - -#[cfg(test)] -mod tests { - use super::{Metadata, FileStore, MediaStore}; - use std::ops::Bound; - use tokio::io::AsyncReadExt; - - #[tokio::test] - #[tracing_test::traced_test] - async fn test_ranges() { - let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); - let store = FileStore::new(tempdir.path()); - - let file: &[u8] = include_bytes!("./file.rs"); - let stream = tokio_stream::iter(file.chunks(100).map(|i| Ok(bytes::Bytes::copy_from_slice(i)))); - let metadata = Metadata { - filename: Some("file.rs".to_string()), - content_type: Some("text/plain".to_string()), - length: None, - etag: None, - }; - - // write through the interface - let filename = store.write_streaming( - "fireburn.ru", - metadata, stream - ).await.unwrap(); - - tracing::debug!("Writing complete."); - - // Ensure the file is there - let content = tokio::fs::read( - tempdir.path() - .join("fireburn.ru") - .join(&filename) - ).await.unwrap(); - assert_eq!(content, file); - - tracing::debug!("Reading range from the start..."); - // try to read range - let range = { - let stream = store.stream_range( - "fireburn.ru", &filename, - (Bound::Included(0), Bound::Included(299)) - ).await.unwrap(); - - let mut reader = tokio_util::io::StreamReader::new(stream); - - let mut buf = Vec::default(); - reader.read_to_end(&mut buf).await.unwrap(); - - buf - }; - - assert_eq!(range.len(), 300); - assert_eq!(range.as_slice(), &file[..=299]); - - tracing::debug!("Reading range from the middle..."); - - let range = { - let stream = store.stream_range( - "fireburn.ru", &filename, - (Bound::Included(150), Bound::Included(449)) - ).await.unwrap(); - - let mut reader = tokio_util::io::StreamReader::new(stream); - - let mut buf = Vec::default(); - reader.read_to_end(&mut buf).await.unwrap(); - - buf - }; - - assert_eq!(range.len(), 300); - assert_eq!(range.as_slice(), &file[150..=449]); - - tracing::debug!("Reading range from the end..."); - let range = { - let stream = store.stream_range( - "fireburn.ru", &filename, - // Note: the `headers` crate parses bounds in a - // non-standard way, where unbounded start actually - // means getting things from the end... - (Bound::Unbounded, Bound::Included(300)) - ).await.unwrap(); - - let mut reader = tokio_util::io::StreamReader::new(stream); - - let mut buf = Vec::default(); - reader.read_to_end(&mut buf).await.unwrap(); - - buf - }; - - assert_eq!(range.len(), 300); - assert_eq!(range.as_slice(), &file[file.len()-300..file.len()]); - - tracing::debug!("Reading the whole file..."); - // try to read range - let range = { - let stream = store.stream_range( - "fireburn.ru", &("/".to_string() + &filename), - (Bound::Unbounded, Bound::Unbounded) - ).await.unwrap(); - - let mut reader = tokio_util::io::StreamReader::new(stream); - - let mut buf = Vec::default(); - reader.read_to_end(&mut buf).await.unwrap(); - - buf - }; - - assert_eq!(range.len(), file.len()); - assert_eq!(range.as_slice(), file); - } - - - #[tokio::test] - #[tracing_test::traced_test] - async fn test_streaming_read_write() { - let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); - let store = FileStore::new(tempdir.path()); - - let file: &[u8] = include_bytes!("./file.rs"); - let stream = tokio_stream::iter(file.chunks(100).map(|i| Ok(bytes::Bytes::copy_from_slice(i)))); - let metadata = Metadata { - filename: Some("style.css".to_string()), - content_type: Some("text/css".to_string()), - length: None, - etag: None, - }; - - // write through the interface - let filename = store.write_streaming( - "fireburn.ru", - metadata, stream - ).await.unwrap(); - println!("{}, {}", filename, tempdir.path() - .join("fireburn.ru") - .join(&filename) - .display()); - let content = tokio::fs::read( - tempdir.path() - .join("fireburn.ru") - .join(&filename) - ).await.unwrap(); - assert_eq!(content, file); - - // check internal metadata format - let meta: Metadata = serde_json::from_slice(&tokio::fs::read( - tempdir.path() - .join("fireburn.ru") - .join(filename.clone() + ".json") - ).await.unwrap()).unwrap(); - assert_eq!(meta.content_type.as_deref(), Some("text/css")); - assert_eq!(meta.filename.as_deref(), Some("style.css")); - assert_eq!(meta.length.map(|i| i.get()), Some(file.len())); - assert!(meta.etag.is_some()); - - // read back the data using the interface - let (metadata, read_back) = { - let (metadata, stream) = store.read_streaming( - "fireburn.ru", - &filename - ).await.unwrap(); - let mut reader = tokio_util::io::StreamReader::new(stream); - - let mut buf = Vec::default(); - reader.read_to_end(&mut buf).await.unwrap(); - - (metadata, buf) - }; - - assert_eq!(read_back, file); - assert_eq!(metadata.content_type.as_deref(), Some("text/css")); - assert_eq!(meta.filename.as_deref(), Some("style.css")); - assert_eq!(meta.length.map(|i| i.get()), Some(file.len())); - assert!(meta.etag.is_some()); - - } -} diff --git a/kittybox-rs/src/media/storage/mod.rs b/kittybox-rs/src/media/storage/mod.rs deleted file mode 100644 index 020999c..0000000 --- a/kittybox-rs/src/media/storage/mod.rs +++ /dev/null @@ -1,177 +0,0 @@ -use async_trait::async_trait; -use axum::extract::multipart::Field; -use tokio_stream::Stream; -use bytes::Bytes; -use serde::{Deserialize, Serialize}; -use std::ops::Bound; -use std::pin::Pin; -use std::fmt::Debug; -use std::num::NonZeroUsize; - -pub mod file; - -#[derive(Debug, Deserialize, Serialize)] -pub struct Metadata { - /// Content type of the file. If None, the content-type is considered undefined. - pub content_type: Option, - /// The original filename that was passed. - pub filename: Option, - /// The recorded length of the file. - pub length: Option, - /// The e-tag of a file. Note: it must be a strong e-tag, for example, a hash. - pub etag: Option, -} -impl From<&Field<'_>> for Metadata { - fn from(field: &Field<'_>) -> Self { - Self { - content_type: field.content_type() - .map(|i| i.to_owned()), - filename: field.file_name() - .map(|i| i.to_owned()), - length: None, - etag: None, - } - } -} - - -#[derive(Debug, Clone, Copy)] -pub enum ErrorKind { - Backend, - Permission, - Json, - NotFound, - Other, -} - -#[derive(Debug)] -pub struct MediaStoreError { - kind: ErrorKind, - source: Option>, - msg: String, -} - -impl MediaStoreError { - pub fn kind(&self) -> ErrorKind { - self.kind - } -} - -impl std::error::Error for MediaStoreError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.source - .as_ref() - .map(|i| i.as_ref() as &dyn std::error::Error) - } -} - -impl std::fmt::Display for MediaStoreError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}: {}", - match self.kind { - ErrorKind::Backend => "media storage backend error", - ErrorKind::Permission => "permission denied", - ErrorKind::Json => "failed to parse json", - ErrorKind::NotFound => "blob not found", - ErrorKind::Other => "unknown media storage error", - }, - self.msg - ) - } -} - -pub type Result = std::result::Result; - -#[async_trait] -pub trait MediaStore: 'static + Send + Sync + Clone { - async fn write_streaming( - &self, - domain: &str, - metadata: Metadata, - content: T, - ) -> Result - where - T: tokio_stream::Stream> + Unpin + Send + Debug; - - async fn read_streaming( - &self, - domain: &str, - filename: &str, - ) -> Result<(Metadata, Pin> + Send>>)>; - - async fn stream_range( - &self, - domain: &str, - filename: &str, - range: (Bound, Bound) - ) -> Result> + Send>>> { - use futures::stream::TryStreamExt; - use tracing::debug; - let (metadata, mut stream) = self.read_streaming(domain, filename).await?; - let length = metadata.length.unwrap().get(); - - use Bound::*; - let (start, end): (usize, usize) = match range { - (Unbounded, Unbounded) => return Ok(stream), - (Included(start), Unbounded) => (start.try_into().unwrap(), length - 1), - (Unbounded, Included(end)) => (length - usize::try_from(end).unwrap(), length - 1), - (Included(start), Included(end)) => (start.try_into().unwrap(), end.try_into().unwrap()), - (_, _) => unreachable!() - }; - - stream = Box::pin( - stream.map_ok({ - let mut bytes_skipped = 0usize; - let mut bytes_read = 0usize; - - move |chunk| { - debug!("Skipped {}/{} bytes, chunk len {}", bytes_skipped, start, chunk.len()); - let chunk = if bytes_skipped < start { - let need_to_skip = start - bytes_skipped; - if chunk.len() < need_to_skip { - return None - } - debug!("Skipping {} bytes", need_to_skip); - bytes_skipped += need_to_skip; - - chunk.slice(need_to_skip..) - } else { - chunk - }; - - debug!("Read {} bytes from file, {} in this chunk", bytes_read, chunk.len()); - bytes_read += chunk.len(); - - if bytes_read > length { - if bytes_read - length > chunk.len() { - return None - } - debug!("Truncating last {} bytes", bytes_read - length); - return Some(chunk.slice(..chunk.len() - (bytes_read - length))) - } - - Some(chunk) - } - }) - .try_skip_while(|x| std::future::ready(Ok(x.is_none()))) - .try_take_while(|x| std::future::ready(Ok(x.is_some()))) - .map_ok(|x| x.unwrap()) - ); - - return Ok(stream); - } - - /// Read metadata for a file. - /// - /// The default implementation uses the `read_streaming` method - /// and drops the stream containing file content. - async fn metadata(&self, domain: &str, filename: &str) -> Result { - self.read_streaming(domain, filename) - .await - .map(|(meta, stream)| meta) - } - - async fn delete(&self, domain: &str, filename: &str) -> Result<()>; -} diff --git a/kittybox-rs/src/metrics.rs b/kittybox-rs/src/metrics.rs deleted file mode 100644 index e13fcb9..0000000 --- a/kittybox-rs/src/metrics.rs +++ /dev/null @@ -1,21 +0,0 @@ -#![allow(unused_imports, dead_code)] -use async_trait::async_trait; -use lazy_static::lazy_static; -use prometheus::Encoder; -use std::time::{Duration, Instant}; - -// TODO: Vendor in the Metrics struct from warp_prometheus and rework the path matching algorithm - -pub fn metrics(path_includes: Vec) -> warp::log::Log { - let metrics = warp_prometheus::Metrics::new(prometheus::default_registry(), &path_includes); - warp::log::custom(move |info| metrics.http_metrics(info)) -} - -pub fn gather() -> Vec { - let mut buffer: Vec = vec![]; - let encoder = prometheus::TextEncoder::new(); - let metric_families = prometheus::gather(); - encoder.encode(&metric_families, &mut buffer).unwrap(); - - buffer -} diff --git a/kittybox-rs/src/micropub/get.rs b/kittybox-rs/src/micropub/get.rs deleted file mode 100644 index 718714a..0000000 --- a/kittybox-rs/src/micropub/get.rs +++ /dev/null @@ -1,82 +0,0 @@ -use crate::database::{MicropubChannel, Storage}; -use crate::indieauth::User; -use crate::ApplicationState; -use tide::prelude::{json, Deserialize}; -use tide::{Request, Response, Result}; - -#[derive(Deserialize)] -struct QueryOptions { - q: String, - url: Option, -} - -pub async fn get_handler(req: Request>) -> Result -where - Backend: Storage + Send + Sync, -{ - let user = req.ext::().unwrap(); - let backend = &req.state().storage; - let media_endpoint = &req.state().media_endpoint; - let query = req.query::().unwrap_or(QueryOptions { - q: "".to_string(), - url: None, - }); - match &*query.q { - "config" => { - let channels: Vec; - match backend.get_channels(user.me.as_str()).await { - Ok(chans) => channels = chans, - Err(err) => return Ok(err.into()) - } - Ok(Response::builder(200).body(json!({ - "q": ["source", "config", "channel"], - "channels": channels, - "media-endpoint": media_endpoint - })).build()) - }, - "channel" => { - let channels: Vec; - match backend.get_channels(user.me.as_str()).await { - Ok(chans) => channels = chans, - Err(err) => return Ok(err.into()) - } - Ok(Response::builder(200).body(json!(channels)).build()) - } - "source" => { - if user.check_scope("create") || user.check_scope("update") || user.check_scope("delete") || user.check_scope("undelete") { - if let Some(url) = query.url { - match backend.get_post(&url).await { - Ok(post) => if let Some(post) = post { - Ok(Response::builder(200).body(post).build()) - } else { - Ok(Response::builder(404).build()) - }, - Err(err) => Ok(err.into()) - } - } else { - Ok(Response::builder(400).body(json!({ - "error": "invalid_request", - "error_description": "Please provide `url`." - })).build()) - } - } else { - Ok(Response::builder(401).body(json!({ - "error": "insufficient_scope", - "error_description": "You don't have the required scopes to proceed.", - "scope": "update" - })).build()) - } - }, - // TODO: ?q=food, ?q=geo, ?q=contacts - // Depends on indexing posts - // Errors - "" => Ok(Response::builder(400).body(json!({ - "error": "invalid_request", - "error_description": "No ?q= parameter specified. Try ?q=config maybe?" - })).build()), - _ => Ok(Response::builder(400).body(json!({ - "error": "invalid_request", - "error_description": "Unsupported ?q= query. Try ?q=config and see the q array for supported values." - })).build()) - } -} diff --git a/kittybox-rs/src/micropub/mod.rs b/kittybox-rs/src/micropub/mod.rs deleted file mode 100644 index 02eee6e..0000000 --- a/kittybox-rs/src/micropub/mod.rs +++ /dev/null @@ -1,846 +0,0 @@ -use std::collections::HashMap; -use std::sync::Arc; - -use crate::database::{MicropubChannel, Storage, StorageError}; -use crate::indieauth::backend::AuthBackend; -use crate::indieauth::User; -use crate::micropub::util::form_to_mf2_json; -use axum::extract::{BodyStream, Query, Host}; -use axum::headers::ContentType; -use axum::response::{IntoResponse, Response}; -use axum::TypedHeader; -use axum::{http::StatusCode, Extension}; -use serde::{Deserialize, Serialize}; -use serde_json::json; -use tokio::sync::Mutex; -use tokio::task::JoinSet; -use tracing::{debug, error, info, warn}; -use kittybox_indieauth::{Scope, TokenData}; -use kittybox_util::{MicropubError, ErrorType}; - -#[derive(Serialize, Deserialize, Debug, PartialEq)] -#[serde(rename_all = "kebab-case")] -enum QueryType { - Source, - Config, - Channel, - SyndicateTo, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct MicropubQuery { - q: QueryType, - url: Option, -} - -impl From for MicropubError { - fn from(err: StorageError) -> Self { - Self { - error: match err.kind() { - crate::database::ErrorKind::NotFound => ErrorType::NotFound, - _ => ErrorType::InternalServerError, - }, - error_description: format!("Backend error: {}", err), - } - } -} - -mod util; -pub(crate) use util::normalize_mf2; - -#[derive(Debug)] -struct FetchedPostContext { - url: url::Url, - mf2: serde_json::Value, - webmention: Option, -} - -fn populate_reply_context( - mf2: &serde_json::Value, - prop: &str, - ctxs: &[FetchedPostContext], -) -> Option> { - mf2["properties"][prop].as_array().map(|array| { - array - .iter() - // TODO: This seems to be O(n^2) and I don't like it. - // Switching `ctxs` to a hashmap might speed it up to O(n) - // The key would be the URL/UID - .map(|i| ctxs - .iter() - .find(|ctx| Some(ctx.url.as_str()) == i.as_str()) - .and_then(|ctx| ctx.mf2["items"].get(0)) - .unwrap_or(i)) - .cloned() - .collect::>() - }) -} - -#[tracing::instrument(skip(db))] -async fn background_processing( - db: D, - mf2: serde_json::Value, - http: reqwest::Client, -) -> () { - // TODO: Post-processing the post (aka second write pass) - // - [x] Download rich reply contexts - // - [ ] Syndicate the post if requested, add links to the syndicated copies - // - [ ] Send WebSub notifications to the hub (if we happen to have one) - // - [x] Send webmentions - - use futures_util::StreamExt; - - let uid: &str = mf2["properties"]["uid"][0].as_str().unwrap(); - - let context_props = ["in-reply-to", "like-of", "repost-of", "bookmark-of"]; - let mut context_urls: Vec = vec![]; - for prop in &context_props { - if let Some(array) = mf2["properties"][prop].as_array() { - context_urls.extend( - array - .iter() - .filter_map(|v| v.as_str()) - .filter_map(|v| v.parse::().ok()), - ); - } - } - // TODO parse HTML in e-content and add links found here - context_urls.sort_unstable_by_key(|u| u.to_string()); - context_urls.dedup(); - - // TODO: Make a stream to fetch all these posts and convert them to MF2 - let post_contexts = { - let http = &http; - tokio_stream::iter(context_urls.into_iter()) - .then(move |url: url::Url| http.get(url).send()) - .filter_map(|response| futures::future::ready(response.ok())) - .filter(|response| futures::future::ready(response.status() == 200)) - .filter_map(|response: reqwest::Response| async move { - // 1. We need to preserve the URL - // 2. We need to get the HTML for MF2 processing - // 3. We need to get the webmention endpoint address - // All of that can be done in one go. - let url = response.url().clone(); - // TODO parse link headers - let links = response - .headers() - .get_all(hyper::http::header::LINK) - .iter() - .cloned() - .collect::>(); - let html = response.text().await; - if html.is_err() { - return None; - } - let html = html.unwrap(); - let mf2 = microformats::from_html(&html, url.clone()).unwrap(); - // TODO use first Link: header if available - let webmention: Option = mf2 - .rels - .by_rels() - .get("webmention") - .and_then(|i| i.first().cloned()); - - dbg!(Some(FetchedPostContext { - url, - mf2: serde_json::to_value(mf2).unwrap(), - webmention - })) - }) - .collect::>() - .await - }; - - let mut update = MicropubUpdate { - replace: Some(Default::default()), - ..Default::default() - }; - for prop in context_props { - if let Some(json) = populate_reply_context(&mf2, prop, &post_contexts) { - update.replace.as_mut().unwrap().insert(prop.to_owned(), json); - } - } - if !update.replace.as_ref().unwrap().is_empty() { - if let Err(err) = db.update_post(uid, update).await { - error!("Failed to update post with rich reply contexts: {}", err); - } - } - - // At this point we can start syndicating the post. - // Currently we don't really support any syndication endpoints, but still! - /*if let Some(syndicate_to) = mf2["properties"]["mp-syndicate-to"].as_array() { - let http = &http; - tokio_stream::iter(syndicate_to) - .filter_map(|i| futures::future::ready(i.as_str())) - .for_each_concurrent(3, |s: &str| async move { - #[allow(clippy::match_single_binding)] - match s { - _ => { - todo!("Syndicate to generic webmention-aware service {}", s); - } - // TODO special handling for non-webmention-aware services like the birdsite - } - }) - .await; - }*/ - - { - let http = &http; - tokio_stream::iter( - post_contexts - .into_iter() - .filter(|ctx| ctx.webmention.is_some()), - ) - .for_each_concurrent(2, |ctx| async move { - let mut map = std::collections::HashMap::new(); - map.insert("source", uid); - map.insert("target", ctx.url.as_str()); - - match http - .post(ctx.webmention.unwrap().clone()) - .form(&map) - .send() - .await - { - Ok(res) => { - if !res.status().is_success() { - warn!( - "Failed to send a webmention for {}: got HTTP {}", - ctx.url, - res.status() - ); - } else { - info!( - "Sent a webmention to {}, got HTTP {}", - ctx.url, - res.status() - ) - } - } - Err(err) => warn!("Failed to send a webmention for {}: {}", ctx.url, err), - } - }) - .await; - } -} - -// TODO actually save the post to the database and schedule post-processing -pub(crate) async fn _post( - user: &TokenData, - uid: String, - mf2: serde_json::Value, - db: D, - http: reqwest::Client, - jobset: Arc>>, -) -> Result { - // Here, we have the following guarantees: - // - The MF2-JSON document is normalized (guaranteed by normalize_mf2) - // - The MF2-JSON document contains a UID - // - The MF2-JSON document's URL list contains its UID - // - The MF2-JSON document's "content" field contains an HTML blob, if present - // - The MF2-JSON document's publishing datetime is present - // - The MF2-JSON document's target channels are set - // - The MF2-JSON document's author is set - - // Security check! Do we have an OAuth2 scope to proceed? - if !user.check_scope(&Scope::Create) { - return Err(MicropubError { - error: ErrorType::InvalidScope, - error_description: "Not enough privileges - try acquiring the \"create\" scope." - .to_owned(), - }); - } - - // Security check #2! Are we posting to our own website? - if !uid.starts_with(user.me.as_str()) - || mf2["properties"]["channel"] - .as_array() - .unwrap_or(&vec![]) - .iter() - .any(|url| !url.as_str().unwrap().starts_with(user.me.as_str())) - { - return Err(MicropubError { - error: ErrorType::Forbidden, - error_description: "You're posting to a website that's not yours.".to_owned(), - }); - } - - // Security check #3! Are we overwriting an existing document? - if db.post_exists(&uid).await? { - return Err(MicropubError { - error: ErrorType::AlreadyExists, - error_description: "UID clash was detected, operation aborted.".to_owned(), - }); - } - let user_domain = format!( - "{}{}", - user.me.host_str().unwrap(), - user.me.port() - .map(|port| format!(":{}", port)) - .unwrap_or_default() - ); - // Save the post - tracing::debug!("Saving post to database..."); - db.put_post(&mf2, &user_domain).await?; - - let mut channels = mf2["properties"]["channel"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap_or("")) - .filter(|i| !i.is_empty()); - - let default_channel = user - .me - .join(util::DEFAULT_CHANNEL_PATH) - .unwrap() - .to_string(); - let vcards_channel = user - .me - .join(util::CONTACTS_CHANNEL_PATH) - .unwrap() - .to_string(); - let food_channel = user.me.join(util::FOOD_CHANNEL_PATH).unwrap().to_string(); - let default_channels = vec![default_channel, vcards_channel, food_channel]; - - for chan in &mut channels { - debug!("Adding post {} to channel {}", uid, chan); - if db.post_exists(chan).await? { - db.add_to_feed(chan, &uid).await?; - } else if default_channels.iter().any(|i| chan == i) { - util::create_feed(&db, &uid, chan, user).await?; - } else { - warn!("Ignoring non-existent channel: {}", chan); - } - } - - let reply = - IntoResponse::into_response((StatusCode::ACCEPTED, [("Location", uid.as_str())])); - - #[cfg(not(tokio_unstable))] - jobset.lock().await.spawn(background_processing(db, mf2, http)); - #[cfg(tokio_unstable)] - jobset.lock().await.build_task() - .name(format!("Kittybox background processing for post {}", uid.as_str()).as_str()) - .spawn(background_processing(db, mf2, http)); - - Ok(reply) -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(rename_all = "snake_case")] -enum ActionType { - Delete, - Update, -} - -#[derive(Serialize, Deserialize, Debug)] -#[serde(untagged)] -pub enum MicropubPropertyDeletion { - Properties(Vec), - Values(HashMap>) -} -#[derive(Serialize, Deserialize)] -struct MicropubFormAction { - action: ActionType, - url: String, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct MicropubAction { - action: ActionType, - url: String, - #[serde(flatten)] - #[serde(skip_serializing_if = "Option::is_none")] - update: Option -} - -#[derive(Serialize, Deserialize, Debug, Default)] -pub struct MicropubUpdate { - #[serde(skip_serializing_if = "Option::is_none")] - pub replace: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub add: Option>>, - #[serde(skip_serializing_if = "Option::is_none")] - pub delete: Option, - -} - -impl From for MicropubAction { - fn from(a: MicropubFormAction) -> Self { - debug_assert!(matches!(a.action, ActionType::Delete)); - Self { - action: a.action, - url: a.url, - update: None - } - } -} - -#[tracing::instrument(skip(db))] -async fn post_action( - action: MicropubAction, - db: D, - user: User, -) -> Result<(), MicropubError> { - let uri = if let Ok(uri) = action.url.parse::() { - uri - } else { - return Err(MicropubError { - error: ErrorType::InvalidRequest, - error_description: "Your URL doesn't parse properly.".to_owned(), - }); - }; - - if uri.authority().unwrap() - != user - .me - .as_str() - .parse::() - .unwrap() - .authority() - .unwrap() - { - return Err(MicropubError { - error: ErrorType::Forbidden, - error_description: "Don't tamper with others' posts!".to_owned(), - }); - } - - match action.action { - ActionType::Delete => { - if !user.check_scope(&Scope::Delete) { - return Err(MicropubError { - error: ErrorType::InvalidScope, - error_description: "You need a \"delete\" scope for this.".to_owned(), - }); - } - - db.delete_post(&action.url).await? - } - ActionType::Update => { - if !user.check_scope(&Scope::Update) { - return Err(MicropubError { - error: ErrorType::InvalidScope, - error_description: "You need an \"update\" scope for this.".to_owned(), - }); - } - - db.update_post( - &action.url, - action.update.ok_or(MicropubError { - error: ErrorType::InvalidRequest, - error_description: "Update request is not set.".to_owned(), - })? - ) - .await? - } - } - - Ok(()) -} - -enum PostBody { - Action(MicropubAction), - MF2(serde_json::Value), -} - -#[tracing::instrument] -async fn dispatch_body( - mut body: BodyStream, - content_type: ContentType, -) -> Result { - let body: Vec = { - debug!("Buffering body..."); - use tokio_stream::StreamExt; - let mut buf = Vec::default(); - - while let Some(chunk) = body.next().await { - buf.extend_from_slice(&chunk.unwrap()) - } - - buf - }; - - debug!("Content-Type: {:?}", content_type); - if content_type == ContentType::json() { - if let Ok(action) = serde_json::from_slice::(&body) { - Ok(PostBody::Action(action)) - } else if let Ok(body) = serde_json::from_slice::(&body) { - // quick sanity check - if !body.is_object() || !body["type"].is_array() { - return Err(MicropubError { - error: ErrorType::InvalidRequest, - error_description: "Invalid MF2-JSON detected: `.` should be an object, `.type` should be an array of MF2 types".to_owned() - }); - } - - Ok(PostBody::MF2(body)) - } else { - Err(MicropubError { - error: ErrorType::InvalidRequest, - error_description: "Invalid JSON object passed.".to_owned(), - }) - } - } else if content_type == ContentType::form_url_encoded() { - if let Ok(body) = serde_urlencoded::from_bytes::(&body) { - Ok(PostBody::Action(body.into())) - } else if let Ok(body) = serde_urlencoded::from_bytes::>(&body) { - Ok(PostBody::MF2(form_to_mf2_json(body))) - } else { - Err(MicropubError { - error: ErrorType::InvalidRequest, - error_description: "Invalid form-encoded data. Try h=entry&content=Hello!" - .to_owned(), - }) - } - } else { - Err(MicropubError::new( - ErrorType::UnsupportedMediaType, - "This Content-Type is not recognized. Try application/json instead?", - )) - } -} - -#[tracing::instrument(skip(db, http))] -pub(crate) async fn post( - Extension(db): Extension, - Extension(http): Extension, - Extension(jobset): Extension>>>, - TypedHeader(content_type): TypedHeader, - user: User, - body: BodyStream, -) -> axum::response::Response { - match dispatch_body(body, content_type).await { - Ok(PostBody::Action(action)) => match post_action(action, db, user).await { - Ok(()) => Response::default(), - Err(err) => err.into_response(), - }, - Ok(PostBody::MF2(mf2)) => { - let (uid, mf2) = normalize_mf2(mf2, &user); - match _post(&user, uid, mf2, db, http, jobset).await { - Ok(response) => response, - Err(err) => err.into_response(), - } - } - Err(err) => err.into_response(), - } -} - -#[tracing::instrument(skip(db))] -pub(crate) async fn query( - Extension(db): Extension, - query: Option>, - Host(host): Host, - user: User, -) -> axum::response::Response { - // We handle the invalid query case manually to return a - // MicropubError instead of HTTP 422 - let query = if let Some(Query(query)) = query { - query - } else { - return MicropubError::new( - ErrorType::InvalidRequest, - "Invalid query provided. Try ?q=config to see what you can do." - ).into_response(); - }; - - if axum::http::Uri::try_from(user.me.as_str()) - .unwrap() - .authority() - .unwrap() - != &host - { - return MicropubError::new( - ErrorType::NotAuthorized, - "This website doesn't belong to you.", - ) - .into_response(); - } - - let user_domain = format!( - "{}{}", - user.me.host_str().unwrap(), - user.me.port() - .map(|port| format!(":{}", port)) - .unwrap_or_default() - ); - match query.q { - QueryType::Config => { - let channels: Vec = match db.get_channels(user.me.as_str()).await { - Ok(chans) => chans, - Err(err) => { - return MicropubError::new( - ErrorType::InternalServerError, - &format!("Error fetching channels: {}", err), - ) - .into_response() - } - }; - - axum::response::Json(json!({ - "q": [ - QueryType::Source, - QueryType::Config, - QueryType::Channel, - QueryType::SyndicateTo - ], - "channels": channels, - "_kittybox_authority": user.me.as_str(), - "syndicate-to": [], - "media-endpoint": user.me.join("/.kittybox/media").unwrap().as_str() - })) - .into_response() - } - QueryType::Source => { - match query.url { - Some(url) => { - match db.get_post(&url).await { - Ok(some) => match some { - Some(post) => axum::response::Json(&post).into_response(), - None => MicropubError::new( - ErrorType::NotFound, - "The specified MF2 object was not found in database.", - ) - .into_response(), - }, - Err(err) => MicropubError::new( - ErrorType::InternalServerError, - &format!("Backend error: {}", err), - ) - .into_response(), - } - } - None => { - // Here, one should probably attempt to query at least the main feed and collect posts - // Using a pre-made query function can't be done because it does unneeded filtering - // Don't implement for now, this is optional - MicropubError::new( - ErrorType::InvalidRequest, - "Querying for post list is not implemented yet.", - ) - .into_response() - } - } - } - QueryType::Channel => match db.get_channels(&user_domain).await { - Ok(chans) => axum::response::Json(json!({ "channels": chans })).into_response(), - Err(err) => MicropubError::new( - ErrorType::InternalServerError, - &format!("Error fetching channels: {}", err), - ) - .into_response(), - }, - QueryType::SyndicateTo => { - axum::response::Json(json!({ "syndicate-to": [] })).into_response() - } - } -} - -#[must_use] -pub fn router( - storage: S, - http: reqwest::Client, - auth: A, - jobset: Arc>> -) -> axum::routing::MethodRouter -where - S: Storage + 'static, - A: AuthBackend -{ - axum::routing::get(query::) - .post(post::) - .layer::<_, _, std::convert::Infallible>(tower_http::cors::CorsLayer::new() - .allow_methods([ - axum::http::Method::GET, - axum::http::Method::POST, - ]) - .allow_origin(tower_http::cors::Any)) - .layer::<_, _, std::convert::Infallible>(axum::Extension(storage)) - .layer::<_, _, std::convert::Infallible>(axum::Extension(http)) - .layer::<_, _, std::convert::Infallible>(axum::Extension(auth)) - .layer::<_, _, std::convert::Infallible>(axum::Extension(jobset)) -} - -#[cfg(test)] -#[allow(dead_code)] -impl MicropubQuery { - fn config() -> Self { - Self { - q: QueryType::Config, - url: None, - } - } - - fn source(url: &str) -> Self { - Self { - q: QueryType::Source, - url: Some(url.to_owned()), - } - } -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use crate::{database::Storage, micropub::MicropubError}; - use hyper::body::HttpBody; - use serde_json::json; - use tokio::sync::Mutex; - - use super::FetchedPostContext; - use kittybox_indieauth::{Scopes, Scope, TokenData}; - use axum::extract::Host; - - #[test] - fn test_populate_reply_context() { - let already_expanded_reply_ctx = json!({ - "type": ["h-entry"], - "properties": { - "content": ["Hello world!"] - } - }); - let mf2 = json!({ - "type": ["h-entry"], - "properties": { - "like-of": [ - "https://fireburn.ru/posts/example", - already_expanded_reply_ctx, - "https://fireburn.ru/posts/non-existent" - ] - } - }); - let test_ctx = json!({ - "type": ["h-entry"], - "properties": { - "content": ["This is a post which was reacted to."] - } - }); - let reply_contexts = vec![FetchedPostContext { - url: "https://fireburn.ru/posts/example".parse().unwrap(), - mf2: json!({ "items": [test_ctx] }), - webmention: None, - }]; - - let like_of = super::populate_reply_context(&mf2, "like-of", &reply_contexts).unwrap(); - - assert_eq!(like_of[0], test_ctx); - assert_eq!(like_of[1], already_expanded_reply_ctx); - assert_eq!(like_of[2], "https://fireburn.ru/posts/non-existent"); - } - - #[tokio::test] - async fn test_post_reject_scope() { - let db = crate::database::MemoryStorage::new(); - - let post = json!({ - "type": ["h-entry"], - "properties": { - "content": ["Hello world!"] - } - }); - let user = TokenData { - me: "https://localhost:8080/".parse().unwrap(), - client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), - scope: Scopes::new(vec![Scope::Profile]), - iat: None, exp: None - }; - let (uid, mf2) = super::normalize_mf2(post, &user); - - let err = super::_post(&user, uid, mf2, db.clone(), reqwest::Client::new(), Arc::new(Mutex::new(tokio::task::JoinSet::new()))) - .await - .unwrap_err(); - - assert_eq!(err.error, super::ErrorType::InvalidScope); - - let hashmap = db.mapping.read().await; - assert!(hashmap.is_empty()); - } - - #[tokio::test] - async fn test_post_reject_different_user() { - let db = crate::database::MemoryStorage::new(); - - let post = json!({ - "type": ["h-entry"], - "properties": { - "content": ["Hello world!"], - "uid": ["https://fireburn.ru/posts/hello"], - "url": ["https://fireburn.ru/posts/hello"] - } - }); - let user = TokenData { - me: "https://aaronparecki.com/".parse().unwrap(), - client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), - scope: Scopes::new(vec![Scope::Profile, Scope::Create, Scope::Update, Scope::Media]), - iat: None, exp: None - }; - let (uid, mf2) = super::normalize_mf2(post, &user); - - let err = super::_post(&user, uid, mf2, db.clone(), reqwest::Client::new(), Arc::new(Mutex::new(tokio::task::JoinSet::new()))) - .await - .unwrap_err(); - - assert_eq!(err.error, super::ErrorType::Forbidden); - - let hashmap = db.mapping.read().await; - assert!(hashmap.is_empty()); - } - - #[tokio::test] - async fn test_post_mf2() { - let db = crate::database::MemoryStorage::new(); - - let post = json!({ - "type": ["h-entry"], - "properties": { - "content": ["Hello world!"] - } - }); - let user = TokenData { - me: "https://localhost:8080/".parse().unwrap(), - client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), - scope: Scopes::new(vec![Scope::Profile, Scope::Create]), - iat: None, exp: None - }; - let (uid, mf2) = super::normalize_mf2(post, &user); - - let res = super::_post(&user, uid, mf2, db.clone(), reqwest::Client::new(), Arc::new(Mutex::new(tokio::task::JoinSet::new()))) - .await - .unwrap(); - - assert!(res.headers().contains_key("Location")); - let location = res.headers().get("Location").unwrap(); - assert!(db.post_exists(location.to_str().unwrap()).await.unwrap()); - assert!(db - .post_exists("https://localhost:8080/feeds/main") - .await - .unwrap()); - } - - #[tokio::test] - async fn test_query_foreign_url() { - let mut res = super::query( - axum::Extension(crate::database::MemoryStorage::new()), - Some(axum::extract::Query(super::MicropubQuery::source( - "https://aaronparecki.com/feeds/main", - ))), - Host("aaronparecki.com".to_owned()), - crate::indieauth::User::( - TokenData { - me: "https://fireburn.ru/".parse().unwrap(), - client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), - scope: Scopes::new(vec![Scope::Profile, Scope::Create, Scope::Update, Scope::Media]), - iat: None, exp: None - }, std::marker::PhantomData - ) - ) - .await; - - assert_eq!(res.status(), 401); - let body = res.body_mut().data().await.unwrap().unwrap(); - let json: MicropubError = serde_json::from_slice(&body as &[u8]).unwrap(); - assert_eq!(json.error, super::ErrorType::NotAuthorized); - } -} diff --git a/kittybox-rs/src/micropub/util.rs b/kittybox-rs/src/micropub/util.rs deleted file mode 100644 index 940d7c3..0000000 --- a/kittybox-rs/src/micropub/util.rs +++ /dev/null @@ -1,444 +0,0 @@ -use crate::database::Storage; -use kittybox_indieauth::TokenData; -use chrono::prelude::*; -use core::iter::Iterator; -use newbase60::num_to_sxg; -use serde_json::json; -use std::convert::TryInto; - -pub(crate) const DEFAULT_CHANNEL_PATH: &str = "/feeds/main"; -const DEFAULT_CHANNEL_NAME: &str = "Main feed"; -pub(crate) const CONTACTS_CHANNEL_PATH: &str = "/feeds/vcards"; -const CONTACTS_CHANNEL_NAME: &str = "My address book"; -pub(crate) const FOOD_CHANNEL_PATH: &str = "/feeds/food"; -const FOOD_CHANNEL_NAME: &str = "My recipe book"; - -fn get_folder_from_type(post_type: &str) -> String { - (match post_type { - "h-feed" => "feeds/", - "h-card" => "vcards/", - "h-event" => "events/", - "h-food" => "food/", - _ => "posts/", - }) - .to_string() -} - -/// Reset the datetime to a proper datetime. -/// Do not attempt to recover the information. -/// Do not pass GO. Do not collect $200. -fn reset_dt(post: &mut serde_json::Value) -> DateTime { - let curtime: DateTime = Local::now(); - post["properties"]["published"] = json!([curtime.to_rfc3339()]); - chrono::DateTime::from(curtime) -} - -pub fn normalize_mf2(mut body: serde_json::Value, user: &TokenData) -> (String, serde_json::Value) { - // Normalize the MF2 object here. - let me = &user.me; - let folder = get_folder_from_type(body["type"][0].as_str().unwrap()); - let published: DateTime = - if let Some(dt) = body["properties"]["published"][0].as_str() { - // Check if the datetime is parsable. - match DateTime::parse_from_rfc3339(dt) { - Ok(dt) => dt, - Err(_) => reset_dt(&mut body), - } - } else { - // Set the datetime. - // Note: this code block duplicates functionality with the above failsafe. - // Consider refactoring it to a helper function? - reset_dt(&mut body) - }; - match body["properties"]["uid"][0].as_str() { - None => { - let uid = serde_json::Value::String( - me.join( - &(folder.clone() - + &num_to_sxg(published.timestamp_millis().try_into().unwrap())), - ) - .unwrap() - .to_string(), - ); - body["properties"]["uid"] = serde_json::Value::Array(vec![uid.clone()]); - match body["properties"]["url"].as_array_mut() { - Some(array) => array.push(uid), - None => body["properties"]["url"] = body["properties"]["uid"].clone(), - } - } - Some(uid_str) => { - let uid = uid_str.to_string(); - match body["properties"]["url"].as_array_mut() { - Some(array) => { - if !array.iter().any(|i| i.as_str().unwrap_or("") == uid) { - array.push(serde_json::Value::String(uid)) - } - } - None => body["properties"]["url"] = body["properties"]["uid"].clone(), - } - } - } - if let Some(slugs) = body["properties"]["mp-slug"].as_array() { - let new_urls = slugs - .iter() - .map(|i| i.as_str().unwrap_or("")) - .filter(|i| i != &"") - .map(|i| me.join(&((&folder).clone() + i)).unwrap().to_string()) - .collect::>(); - let urls = body["properties"]["url"].as_array_mut().unwrap(); - new_urls.iter().for_each(|i| urls.push(json!(i))); - } - let props = body["properties"].as_object_mut().unwrap(); - props.remove("mp-slug"); - - if body["properties"]["content"][0].is_string() { - // Convert the content to HTML using the `markdown` crate - body["properties"]["content"] = json!([{ - "html": markdown::to_html(body["properties"]["content"][0].as_str().unwrap()), - "value": body["properties"]["content"][0] - }]) - } - // TODO: apply this normalization to editing too - if body["properties"]["mp-channel"].is_array() { - let mut additional_channels = body["properties"]["mp-channel"].as_array().unwrap().clone(); - if let Some(array) = body["properties"]["channel"].as_array_mut() { - array.append(&mut additional_channels); - } else { - body["properties"]["channel"] = json!(additional_channels) - } - body["properties"] - .as_object_mut() - .unwrap() - .remove("mp-channel"); - } else if body["properties"]["mp-channel"].is_string() { - let chan = body["properties"]["mp-channel"] - .as_str() - .unwrap() - .to_owned(); - if let Some(array) = body["properties"]["channel"].as_array_mut() { - array.push(json!(chan)) - } else { - body["properties"]["channel"] = json!([chan]); - } - body["properties"] - .as_object_mut() - .unwrap() - .remove("mp-channel"); - } - if body["properties"]["channel"][0].as_str().is_none() { - match body["type"][0].as_str() { - Some("h-entry") => { - // Set the channel to the main channel... - // TODO find like posts and move them to separate private channel - let default_channel = me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string(); - - body["properties"]["channel"] = json!([default_channel]); - } - Some("h-card") => { - let default_channel = me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string(); - - body["properties"]["channel"] = json!([default_channel]); - } - Some("h-food") => { - let default_channel = me.join(FOOD_CHANNEL_PATH).unwrap().to_string(); - - body["properties"]["channel"] = json!([default_channel]); - } - // TODO h-event - /*"h-event" => { - let default_channel - },*/ - _ => { - body["properties"]["channel"] = json!([]); - } - } - } - body["properties"]["posted-with"] = json!([user.client_id]); - if body["properties"]["author"][0].as_str().is_none() { - body["properties"]["author"] = json!([me.as_str()]) - } - // TODO: maybe highlight #hashtags? - // Find other processing to do and insert it here - return ( - body["properties"]["uid"][0].as_str().unwrap().to_string(), - body, - ); -} - -pub(crate) fn form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value { - let mut mf2 = json!({"type": [], "properties": {}}); - for (k, v) in form { - if k == "h" { - mf2["type"] - .as_array_mut() - .unwrap() - .push(json!("h-".to_string() + &v)); - } else if k != "access_token" { - let key = k.strip_suffix("[]").unwrap_or(&k); - match mf2["properties"][key].as_array_mut() { - Some(prop) => prop.push(json!(v)), - None => mf2["properties"][key] = json!([v]), - } - } - } - if mf2["type"].as_array().unwrap().is_empty() { - mf2["type"].as_array_mut().unwrap().push(json!("h-entry")); - } - mf2 -} - -pub(crate) async fn create_feed( - storage: &impl Storage, - uid: &str, - channel: &str, - user: &TokenData, -) -> crate::database::Result<()> { - let path = url::Url::parse(channel).unwrap().path().to_string(); - - let name = match path.as_str() { - DEFAULT_CHANNEL_PATH => DEFAULT_CHANNEL_NAME, - CONTACTS_CHANNEL_PATH => CONTACTS_CHANNEL_NAME, - FOOD_CHANNEL_PATH => FOOD_CHANNEL_NAME, - _ => panic!("Tried to create an unknown default feed!"), - }; - - let (_, feed) = normalize_mf2( - json!({ - "type": ["h-feed"], - "properties": { - "name": [name], - "uid": [channel] - }, - }), - user, - ); - storage.put_post(&feed, user.me.as_str()).await?; - storage.add_to_feed(channel, uid).await -} - -#[cfg(test)] -mod tests { - use super::*; - use serde_json::json; - - fn token_data() -> TokenData { - TokenData { - me: "https://fireburn.ru/".parse().unwrap(), - client_id: "https://quill.p3k.io/".parse().unwrap(), - scope: kittybox_indieauth::Scopes::new(vec![kittybox_indieauth::Scope::Create]), - exp: Some(u64::MAX), - iat: Some(0) - } - } - - #[test] - fn test_form_to_mf2() { - assert_eq!( - super::form_to_mf2_json( - serde_urlencoded::from_str("h=entry&content=something%20interesting").unwrap() - ), - json!({ - "type": ["h-entry"], - "properties": { - "content": ["something interesting"] - } - }) - ) - } - - #[test] - fn test_no_replace_uid() { - let mf2 = json!({ - "type": ["h-card"], - "properties": { - "uid": ["https://fireburn.ru/"], - "name": ["Vika Nezrimaya"], - "note": ["A crazy programmer girl who wants some hugs"] - } - }); - - let (uid, normalized) = normalize_mf2( - mf2.clone(), - &token_data(), - ); - assert_eq!( - normalized["properties"]["uid"][0], mf2["properties"]["uid"][0], - "UID was replaced" - ); - assert_eq!( - normalized["properties"]["uid"][0], uid, - "Returned post location doesn't match UID" - ); - } - - #[test] - fn test_mp_channel() { - let mf2 = json!({ - "type": ["h-entry"], - "properties": { - "uid": ["https://fireburn.ru/posts/test"], - "content": [{"html": "

Hello world!

"}], - "mp-channel": ["https://fireburn.ru/feeds/test"] - } - }); - - let (_, normalized) = normalize_mf2( - mf2.clone(), - &token_data(), - ); - - assert_eq!( - normalized["properties"]["channel"], - mf2["properties"]["mp-channel"] - ); - } - - #[test] - fn test_mp_channel_as_string() { - let mf2 = json!({ - "type": ["h-entry"], - "properties": { - "uid": ["https://fireburn.ru/posts/test"], - "content": [{"html": "

Hello world!

"}], - "mp-channel": "https://fireburn.ru/feeds/test" - } - }); - - let (_, normalized) = normalize_mf2( - mf2.clone(), - &token_data(), - ); - - assert_eq!( - normalized["properties"]["channel"][0], - mf2["properties"]["mp-channel"] - ); - } - - #[test] - fn test_normalize_mf2() { - let mf2 = json!({ - "type": ["h-entry"], - "properties": { - "content": ["This is content!"] - } - }); - - let (uid, post) = normalize_mf2( - mf2, - &token_data(), - ); - assert_eq!( - post["properties"]["published"] - .as_array() - .expect("post['published'] is undefined") - .len(), - 1, - "Post doesn't have a published time" - ); - DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap()) - .expect("Couldn't parse date from rfc3339"); - assert!( - !post["properties"]["url"] - .as_array() - .expect("post['url'] is undefined") - .is_empty(), - "Post doesn't have any URLs" - ); - assert_eq!( - post["properties"]["uid"] - .as_array() - .expect("post['uid'] is undefined") - .len(), - 1, - "Post doesn't have a single UID" - ); - assert_eq!( - post["properties"]["uid"][0], uid, - "UID of a post and its supposed location don't match" - ); - assert!( - uid.starts_with("https://fireburn.ru/posts/"), - "The post namespace is incorrect" - ); - assert_eq!( - post["properties"]["content"][0]["html"] - .as_str() - .expect("Post doesn't have a rich content object") - .trim(), - "

This is content!

", - "Parsed Markdown content doesn't match expected HTML" - ); - assert_eq!( - post["properties"]["channel"][0], "https://fireburn.ru/feeds/main", - "Post isn't posted to the main channel" - ); - assert_eq!( - post["properties"]["author"][0], "https://fireburn.ru/", - "Post author is unknown" - ); - } - - #[test] - fn test_mp_slug() { - let mf2 = json!({ - "type": ["h-entry"], - "properties": { - "content": ["This is content!"], - "mp-slug": ["hello-post"] - }, - }); - - let (_, post) = normalize_mf2( - mf2, - &token_data(), - ); - assert!( - post["properties"]["url"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap()) - .any(|i| i == "https://fireburn.ru/posts/hello-post"), - "Didn't found an URL pointing to the location expected by the mp-slug semantics" - ); - assert!( - post["properties"]["mp-slug"].as_array().is_none(), - "mp-slug wasn't deleted from the array!" - ) - } - - #[test] - fn test_normalize_feed() { - let mf2 = json!({ - "type": ["h-feed"], - "properties": { - "name": "Main feed", - "mp-slug": ["main"] - } - }); - - let (uid, post) = normalize_mf2( - mf2, - &token_data(), - ); - assert_eq!( - post["properties"]["uid"][0], uid, - "UID of a post and its supposed location don't match" - ); - assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/"); - assert!( - post["properties"]["url"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap()) - .any(|i| i == "https://fireburn.ru/feeds/main"), - "Didn't found an URL pointing to the location expected by the mp-slug semantics" - ); - assert!( - post["properties"]["mp-slug"].as_array().is_none(), - "mp-slug wasn't deleted from the array!" - ) - } -} diff --git a/kittybox-rs/src/tokenauth.rs b/kittybox-rs/src/tokenauth.rs deleted file mode 100644 index 244a045..0000000 --- a/kittybox-rs/src/tokenauth.rs +++ /dev/null @@ -1,358 +0,0 @@ -use serde::{Deserialize, Serialize}; -use url::Url; - -#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] -pub struct User { - pub me: Url, - pub client_id: Url, - scope: String, -} - -#[derive(Debug, Clone, PartialEq, Copy)] -pub enum ErrorKind { - PermissionDenied, - NotAuthorized, - TokenEndpointError, - JsonParsing, - InvalidHeader, - Other, -} - -#[derive(Deserialize, Serialize, Debug, Clone)] -pub struct TokenEndpointError { - error: String, - error_description: String, -} - -#[derive(Debug)] -pub struct IndieAuthError { - source: Option>, - kind: ErrorKind, - msg: String, -} - -impl std::error::Error for IndieAuthError { - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.source - .as_ref() - .map(|e| e.as_ref() as &dyn std::error::Error) - } -} - -impl std::fmt::Display for IndieAuthError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}: {}", - match self.kind { - ErrorKind::TokenEndpointError => "token endpoint returned an error: ", - ErrorKind::JsonParsing => "error while parsing token endpoint response: ", - ErrorKind::NotAuthorized => "token endpoint did not recognize the token: ", - ErrorKind::PermissionDenied => "token endpoint rejected the token: ", - ErrorKind::InvalidHeader => "authorization header parsing error: ", - ErrorKind::Other => "token endpoint communication error: ", - }, - self.msg - ) - } -} - -impl From for IndieAuthError { - fn from(err: serde_json::Error) -> Self { - Self { - msg: format!("{}", err), - source: Some(Box::new(err)), - kind: ErrorKind::JsonParsing, - } - } -} - -impl From for IndieAuthError { - fn from(err: reqwest::Error) -> Self { - Self { - msg: format!("{}", err), - source: Some(Box::new(err)), - kind: ErrorKind::Other, - } - } -} - -impl From for IndieAuthError { - fn from(err: axum::extract::rejection::TypedHeaderRejection) -> Self { - Self { - msg: format!("{:?}", err.reason()), - source: Some(Box::new(err)), - kind: ErrorKind::InvalidHeader, - } - } -} - -impl axum::response::IntoResponse for IndieAuthError { - fn into_response(self) -> axum::response::Response { - let status_code: StatusCode = match self.kind { - ErrorKind::PermissionDenied => StatusCode::FORBIDDEN, - ErrorKind::NotAuthorized => StatusCode::UNAUTHORIZED, - ErrorKind::TokenEndpointError => StatusCode::INTERNAL_SERVER_ERROR, - ErrorKind::JsonParsing => StatusCode::BAD_REQUEST, - ErrorKind::InvalidHeader => StatusCode::UNAUTHORIZED, - ErrorKind::Other => StatusCode::INTERNAL_SERVER_ERROR, - }; - - let body = serde_json::json!({ - "error": match self.kind { - ErrorKind::PermissionDenied => "forbidden", - ErrorKind::NotAuthorized => "unauthorized", - ErrorKind::TokenEndpointError => "token_endpoint_error", - ErrorKind::JsonParsing => "invalid_request", - ErrorKind::InvalidHeader => "unauthorized", - ErrorKind::Other => "unknown_error", - }, - "error_description": self.msg - }); - - (status_code, axum::response::Json(body)).into_response() - } -} - -impl User { - pub fn check_scope(&self, scope: &str) -> bool { - self.scopes().any(|i| i == scope) - } - pub fn scopes(&self) -> std::str::SplitAsciiWhitespace<'_> { - self.scope.split_ascii_whitespace() - } - pub fn new(me: &str, client_id: &str, scope: &str) -> Self { - Self { - me: Url::parse(me).unwrap(), - client_id: Url::parse(client_id).unwrap(), - scope: scope.to_string(), - } - } -} - -use axum::{ - extract::{Extension, FromRequest, RequestParts, TypedHeader}, - headers::{ - authorization::{Bearer, Credentials}, - Authorization, - }, - http::StatusCode, -}; - -// this newtype is required due to axum::Extension retrieving items by type -// it's based on compiler magic matching extensions by their type's hashes -#[derive(Debug, Clone)] -pub struct TokenEndpoint(pub url::Url); - -#[async_trait::async_trait] -impl FromRequest for User -where - B: Send, -{ - type Rejection = IndieAuthError; - - #[cfg_attr( - all(debug_assertions, not(test)), - allow(unreachable_code, unused_variables) - )] - async fn from_request(req: &mut RequestParts) -> Result { - // Return a fake user if we're running a debug build - // I don't wanna bother with authentication - #[cfg(all(debug_assertions, not(test)))] - return Ok(User::new( - "http://localhost:8080/", - "https://quill.p3k.io/", - "create update delete media", - )); - - let TypedHeader(Authorization(token)) = - TypedHeader::>::from_request(req) - .await - .map_err(IndieAuthError::from)?; - - let Extension(TokenEndpoint(token_endpoint)): Extension = - Extension::from_request(req).await.unwrap(); - - let Extension(http): Extension = - Extension::from_request(req).await.unwrap(); - - match http - .get(token_endpoint) - .header("Authorization", token.encode()) - .header("Accept", "application/json") - .send() - .await - { - Ok(res) => match res.status() { - StatusCode::OK => match res.json::().await { - Ok(json) => match serde_json::from_value::(json.clone()) { - Ok(user) => Ok(user), - Err(err) => { - if let Some(false) = json["active"].as_bool() { - Err(IndieAuthError { - source: None, - kind: ErrorKind::NotAuthorized, - msg: "The token is not active for this user.".to_owned(), - }) - } else { - Err(IndieAuthError::from(err)) - } - } - }, - Err(err) => Err(IndieAuthError::from(err)), - }, - StatusCode::BAD_REQUEST => match res.json::().await { - Ok(err) => { - if err.error == "unauthorized" { - Err(IndieAuthError { - source: None, - kind: ErrorKind::NotAuthorized, - msg: err.error_description, - }) - } else { - Err(IndieAuthError { - source: None, - kind: ErrorKind::TokenEndpointError, - msg: err.error_description, - }) - } - } - Err(err) => Err(IndieAuthError::from(err)), - }, - _ => Err(IndieAuthError { - source: None, - msg: format!("Token endpoint returned {}", res.status()), - kind: ErrorKind::TokenEndpointError, - }), - }, - Err(err) => Err(IndieAuthError::from(err)), - } - } -} - -#[cfg(test)] -mod tests { - use super::User; - use axum::{ - extract::FromRequest, - http::{Method, Request}, - }; - use wiremock::{MockServer, Mock, ResponseTemplate}; - use wiremock::matchers::{method, path, header}; - - #[test] - fn user_scopes_are_checkable() { - let user = User::new( - "https://fireburn.ru/", - "https://quill.p3k.io/", - "create update media", - ); - - assert!(user.check_scope("create")); - assert!(!user.check_scope("delete")); - } - - #[inline] - fn get_http_client() -> reqwest::Client { - reqwest::Client::new() - } - - fn request>>( - auth: A, - endpoint: String, - ) -> Request<()> { - let request = Request::builder().method(Method::GET); - - match auth.into() { - Some(auth) => request.header("Authorization", auth), - None => request, - } - .extension(super::TokenEndpoint(endpoint.parse().unwrap())) - .extension(get_http_client()) - .body(()) - .unwrap() - } - - #[tokio::test] - async fn test_require_token_with_token() { - let server = MockServer::start().await; - - Mock::given(path("/token")) - .and(header("Authorization", "Bearer token")) - .respond_with(ResponseTemplate::new(200) - .set_body_json(User::new( - "https://fireburn.ru/", - "https://quill.p3k.io/", - "create update media", - )) - ) - .mount(&server) - .await; - - let request = request("Bearer token", format!("{}/token", &server.uri())); - let mut parts = axum::extract::RequestParts::new(request); - let user = User::from_request(&mut parts).await.unwrap(); - - assert_eq!(user.me.as_str(), "https://fireburn.ru/") - } - - #[tokio::test] - async fn test_require_token_fake_token() { - let server = MockServer::start().await; - - Mock::given(path("/refuse_token")) - .respond_with(ResponseTemplate::new(200) - .set_body_json(serde_json::json!({"active": false})) - ) - .mount(&server) - .await; - - let request = request("Bearer token", format!("{}/refuse_token", &server.uri())); - let mut parts = axum::extract::RequestParts::new(request); - let err = User::from_request(&mut parts).await.unwrap_err(); - - assert_eq!(err.kind, super::ErrorKind::NotAuthorized) - } - - #[tokio::test] - async fn test_require_token_no_token() { - let server = MockServer::start().await; - - Mock::given(path("/should_never_be_called")) - .respond_with(ResponseTemplate::new(500)) - .expect(0) - .mount(&server) - .await; - - let request = request(None, format!("{}/should_never_be_called", &server.uri())); - let mut parts = axum::extract::RequestParts::new(request); - let err = User::from_request(&mut parts).await.unwrap_err(); - - assert_eq!(err.kind, super::ErrorKind::InvalidHeader); - } - - #[tokio::test] - async fn test_require_token_400_error_unauthorized() { - let server = MockServer::start().await; - - Mock::given(path("/refuse_token_with_400")) - .and(header("Authorization", "Bearer token")) - .respond_with(ResponseTemplate::new(400) - .set_body_json(serde_json::json!({ - "error": "unauthorized", - "error_description": "The token provided was malformed" - })) - ) - .mount(&server) - .await; - - let request = request( - "Bearer token", - format!("{}/refuse_token_with_400", &server.uri()), - ); - let mut parts = axum::extract::RequestParts::new(request); - let err = User::from_request(&mut parts).await.unwrap_err(); - - assert_eq!(err.kind, super::ErrorKind::NotAuthorized); - } -} diff --git a/kittybox-rs/src/webmentions/check.rs b/kittybox-rs/src/webmentions/check.rs deleted file mode 100644 index f7322f7..0000000 --- a/kittybox-rs/src/webmentions/check.rs +++ /dev/null @@ -1,113 +0,0 @@ -use std::{cell::RefCell, rc::Rc}; -use microformats::{types::PropertyValue, html5ever::{self, tendril::TendrilSink}}; -use kittybox_util::MentionType; - -#[derive(thiserror::Error, Debug)] -pub enum Error { - #[error("microformats error: {0}")] - Microformats(#[from] microformats::Error), - // #[error("json error: {0}")] - // Json(#[from] serde_json::Error), - #[error("url parse error: {0}")] - UrlParse(#[from] url::ParseError), -} - -#[tracing::instrument] -pub fn check_mention(document: impl AsRef + std::fmt::Debug, base_url: &url::Url, link: &url::Url) -> Result, Error> { - tracing::debug!("Parsing MF2 markup..."); - // First, check the document for MF2 markup - let document = microformats::from_html(document.as_ref(), base_url.clone())?; - - // Get an iterator of all items - let items_iter = document.items.iter() - .map(AsRef::as_ref) - .map(RefCell::borrow); - - for item in items_iter { - tracing::debug!("Processing item: {:?}", item); - - let props = item.properties.borrow(); - for (prop, interaction_type) in [ - ("in-reply-to", MentionType::Reply), ("like-of", MentionType::Like), - ("bookmark-of", MentionType::Bookmark), ("repost-of", MentionType::Repost) - ] { - if let Some(propvals) = props.get(prop) { - tracing::debug!("Has a u-{} property", prop); - for val in propvals { - if let PropertyValue::Url(url) = val { - if url == link { - tracing::debug!("URL matches! Webmention is valid"); - return Ok(Some((interaction_type, serde_json::to_value(&*item).unwrap()))) - } - } - } - } - } - // Process `content` - tracing::debug!("Processing e-content..."); - if let Some(PropertyValue::Fragment(content)) = props.get("content") - .map(Vec::as_slice) - .unwrap_or_default() - .first() - { - tracing::debug!("Parsing HTML data..."); - let root = html5ever::parse_document(html5ever::rcdom::RcDom::default(), Default::default()) - .from_utf8() - .one(content.html.to_owned().as_bytes()) - .document; - - // This is a trick to unwrap recursion into a loop - // - // A list of unprocessed node is made. Then, in each - // iteration, the list is "taken" and replaced with an - // empty list, which is populated with nodes for the next - // iteration of the loop. - // - // Empty list means all nodes were processed. - let mut unprocessed_nodes: Vec> = root.children.borrow().iter().cloned().collect(); - while !unprocessed_nodes.is_empty() { - // "Take" the list out of its memory slot, replace it with an empty list - let nodes = std::mem::take(&mut unprocessed_nodes); - tracing::debug!("Processing list of {} nodes", nodes.len()); - 'nodes_loop: for node in nodes.into_iter() { - // Add children nodes to the list for the next iteration - unprocessed_nodes.extend(node.children.borrow().iter().cloned()); - - if let html5ever::rcdom::NodeData::Element { ref name, ref attrs, .. } = node.data { - // If it's not `
`, skip it - if name.local != *"a" { continue; } - let mut is_mention: bool = false; - for attr in attrs.borrow().iter() { - if attr.name.local == *"rel" { - // Don't count `rel="nofollow"` links β€” a web crawler should ignore them - // and so for purposes of driving visitors they are useless - if attr.value - .as_ref() - .split([',', ' ']) - .any(|v| v == "nofollow") - { - // Skip the entire node. - continue 'nodes_loop; - } - } - // if it's not ``, skip it - if attr.name.local != *"href" { continue; } - // Be forgiving in parsing URLs, and resolve them against the base URL - if let Ok(url) = base_url.join(attr.value.as_ref()) { - if &url == link { - is_mention = true; - } - } - } - if is_mention { - return Ok(Some((MentionType::Mention, serde_json::to_value(&*item).unwrap()))); - } - } - } - } - - } - } - - Ok(None) -} diff --git a/kittybox-rs/src/webmentions/mod.rs b/kittybox-rs/src/webmentions/mod.rs deleted file mode 100644 index 95ea870..0000000 --- a/kittybox-rs/src/webmentions/mod.rs +++ /dev/null @@ -1,195 +0,0 @@ -use axum::{Form, response::{IntoResponse, Response}, Extension}; -use axum::http::StatusCode; -use tracing::error; - -use crate::database::{Storage, StorageError}; -use self::queue::JobQueue; -pub mod queue; - -#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] -#[cfg_attr(feature = "sqlx", derive(sqlx::FromRow))] -pub struct Webmention { - source: String, - target: String, -} - -impl queue::JobItem for Webmention {} -impl queue::PostgresJobItem for Webmention { - const DATABASE_NAME: &'static str = "kittybox_webmention.incoming_webmention_queue"; - const NOTIFICATION_CHANNEL: &'static str = "incoming_webmention"; -} - -async fn accept_webmention>( - Extension(queue): Extension, - Form(webmention): Form, -) -> Response { - if let Err(err) = webmention.source.parse::() { - return (StatusCode::BAD_REQUEST, err.to_string()).into_response() - } - if let Err(err) = webmention.target.parse::() { - return (StatusCode::BAD_REQUEST, err.to_string()).into_response() - } - - match queue.put(&webmention).await { - Ok(id) => (StatusCode::ACCEPTED, [ - ("Location", format!("/.kittybox/webmention/{id}")) - ]).into_response(), - Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, [ - ("Content-Type", "text/plain") - ], err.to_string()).into_response() - } -} - -pub fn router, S: Storage + 'static>( - queue: Q, db: S, http: reqwest::Client, - cancellation_token: tokio_util::sync::CancellationToken -) -> (axum::Router, SupervisedTask) { - // Automatically spawn a background task to handle webmentions - let bgtask_handle = supervised_webmentions_task(queue.clone(), db, http, cancellation_token); - - let router = axum::Router::new() - .route("/.kittybox/webmention", - axum::routing::post(accept_webmention::) - ) - .layer(Extension(queue)); - - (router, bgtask_handle) -} - -#[derive(thiserror::Error, Debug)] -pub enum SupervisorError { - #[error("the task was explicitly cancelled")] - Cancelled -} - -pub type SupervisedTask = tokio::task::JoinHandle>; - -pub fn supervisor(mut f: F, cancellation_token: tokio_util::sync::CancellationToken) -> SupervisedTask -where - E: std::error::Error + std::fmt::Debug + Send + 'static, - A: std::future::Future> + Send + 'static, - F: FnMut() -> A + Send + 'static -{ - - let supervisor_future = async move { - loop { - // Don't spawn the task if we are already cancelled, but - // have somehow missed it (probably because the task - // crashed and we immediately received a cancellation - // request after noticing the crashed task) - if cancellation_token.is_cancelled() { - return Err(SupervisorError::Cancelled) - } - let task = tokio::task::spawn(f()); - tokio::select! { - _ = cancellation_token.cancelled() => { - tracing::info!("Shutdown of background task {:?} requested.", std::any::type_name::()); - return Err(SupervisorError::Cancelled) - } - task_result = task => match task_result { - Err(e) => tracing::error!("background task {:?} exited unexpectedly: {}", std::any::type_name::(), e), - Ok(Err(e)) => tracing::error!("background task {:?} returned error: {}", std::any::type_name::(), e), - Ok(Ok(_)) => unreachable!("task's Ok is Infallible") - } - } - tracing::debug!("Sleeping for a little while to back-off..."); - tokio::time::sleep(std::time::Duration::from_secs(5)).await; - } - }; - #[cfg(not(tokio_unstable))] - return tokio::task::spawn(supervisor_future); - #[cfg(tokio_unstable)] - return tokio::task::Builder::new() - .name(format!("supervisor for background task {}", std::any::type_name::()).as_str()) - .spawn(supervisor_future) - .unwrap(); -} - -mod check; - -#[derive(thiserror::Error, Debug)] -enum Error { - #[error("queue error: {0}")] - Queue(#[from] Q), - #[error("storage error: {0}")] - Storage(StorageError) -} - -async fn process_webmentions_from_queue, S: Storage + 'static>(queue: Q, db: S, http: reqwest::Client) -> Result> { - use futures_util::StreamExt; - use self::queue::Job; - - let mut stream = queue.into_stream().await?; - while let Some(item) = stream.next().await.transpose()? { - let job = item.job(); - let (source, target) = ( - job.source.parse::().unwrap(), - job.target.parse::().unwrap() - ); - - let (code, text) = match http.get(source.clone()).send().await { - Ok(response) => { - let code = response.status(); - if ![StatusCode::OK, StatusCode::GONE].iter().any(|i| i == &code) { - error!("error processing webmention: webpage fetch returned {}", code); - continue; - } - match response.text().await { - Ok(text) => (code, text), - Err(err) => { - error!("error processing webmention: error fetching webpage text: {}", err); - continue - } - } - } - Err(err) => { - error!("error processing webmention: error requesting webpage: {}", err); - continue - } - }; - - if code == StatusCode::GONE { - todo!("removing webmentions is not implemented yet"); - // db.remove_webmention(target.as_str(), source.as_str()).await.map_err(Error::::Storage)?; - } else { - // Verify webmention - let (mention_type, mut mention) = match tokio::task::block_in_place({ - || check::check_mention(text, &source, &target) - }) { - Ok(Some(mention_type)) => mention_type, - Ok(None) => { - error!("webmention {} -> {} invalid, rejecting", source, target); - item.done().await?; - continue; - } - Err(err) => { - error!("error processing webmention: error checking webmention: {}", err); - continue; - } - }; - - { - mention["type"] = serde_json::json!(["h-cite"]); - - if !mention["properties"].as_object().unwrap().contains_key("uid") { - let url = mention["properties"]["url"][0].as_str().unwrap_or_else(|| target.as_str()).to_owned(); - let props = mention["properties"].as_object_mut().unwrap(); - props.insert("uid".to_owned(), serde_json::Value::Array( - vec![serde_json::Value::String(url)]) - ); - } - } - - db.add_or_update_webmention(target.as_str(), mention_type, mention).await.map_err(Error::::Storage)?; - } - } - unreachable!() -} - -fn supervised_webmentions_task, S: Storage + 'static>( - queue: Q, db: S, - http: reqwest::Client, - cancellation_token: tokio_util::sync::CancellationToken -) -> SupervisedTask { - supervisor::, _, _>(move || process_webmentions_from_queue(queue.clone(), db.clone(), http.clone()), cancellation_token) -} diff --git a/kittybox-rs/src/webmentions/queue.rs b/kittybox-rs/src/webmentions/queue.rs deleted file mode 100644 index b811e71..0000000 --- a/kittybox-rs/src/webmentions/queue.rs +++ /dev/null @@ -1,303 +0,0 @@ -use std::{pin::Pin, str::FromStr}; - -use futures_util::{Stream, StreamExt}; -use sqlx::{postgres::PgListener, Executor}; -use uuid::Uuid; - -use super::Webmention; - -static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!("./migrations/webmention"); - -pub use kittybox_util::queue::{JobQueue, JobItem, Job}; - -pub trait PostgresJobItem: JobItem + sqlx::FromRow<'static, sqlx::postgres::PgRow> { - const DATABASE_NAME: &'static str; - const NOTIFICATION_CHANNEL: &'static str; -} - -#[derive(sqlx::FromRow)] -struct PostgresJobRow { - id: Uuid, - #[sqlx(flatten)] - job: T -} - -#[derive(Debug)] -pub struct PostgresJob { - id: Uuid, - job: T, - // This will normally always be Some, except on drop - txn: Option>, - runtime_handle: tokio::runtime::Handle, -} - - -impl Drop for PostgresJob { - // This is an emulation of "async drop" β€” the struct retains a - // runtime handle, which it uses to block on a future that does - // the actual cleanup. - // - // Of course, this is not portable between runtimes, but I don't - // care about that, since Kittybox is designed to work within the - // Tokio ecosystem. - fn drop(&mut self) { - tracing::error!("Job {:?} failed, incrementing attempts...", &self); - if let Some(mut txn) = self.txn.take() { - let id = self.id; - self.runtime_handle.spawn(async move { - tracing::debug!("Constructing query to increment attempts for job {}...", id); - // UPDATE "T::DATABASE_NAME" WHERE id = $1 SET attempts = attempts + 1 - sqlx::query_builder::QueryBuilder::new("UPDATE ") - // This is safe from a SQL injection standpoint, since it is a constant. - .push(T::DATABASE_NAME) - .push(" SET attempts = attempts + 1") - .push(" WHERE id = ") - .push_bind(id) - .build() - .execute(&mut *txn) - .await - .unwrap(); - sqlx::query_builder::QueryBuilder::new("NOTIFY ") - .push(T::NOTIFICATION_CHANNEL) - .build() - .execute(&mut *txn) - .await - .unwrap(); - txn.commit().await.unwrap(); - }); - } - } -} - -#[cfg(test)] -impl PostgresJob { - async fn attempts(&mut self) -> Result { - sqlx::query_builder::QueryBuilder::new("SELECT attempts FROM ") - .push(T::DATABASE_NAME) - .push(" WHERE id = ") - .push_bind(self.id) - .build_query_as::<(i32,)>() - // It's safe to unwrap here, because we "take" the txn only on drop or commit, - // where it's passed by value, not by reference. - .fetch_one(self.txn.as_deref_mut().unwrap()) - .await - .map(|(i,)| i as usize) - } -} - -#[async_trait::async_trait] -impl Job> for PostgresJob { - fn job(&self) -> &Webmention { - &self.job - } - async fn done(mut self) -> Result<(), as JobQueue>::Error> { - tracing::debug!("Deleting {} from the job queue", self.id); - sqlx::query("DELETE FROM kittybox_webmention.incoming_webmention_queue WHERE id = $1") - .bind(self.id) - .execute(self.txn.as_deref_mut().unwrap()) - .await?; - - self.txn.take().unwrap().commit().await - } -} - -pub struct PostgresJobQueue { - db: sqlx::PgPool, - _phantom: std::marker::PhantomData -} -impl Clone for PostgresJobQueue { - fn clone(&self) -> Self { - Self { - db: self.db.clone(), - _phantom: std::marker::PhantomData - } - } -} - -impl PostgresJobQueue { - pub async fn new(uri: &str) -> Result { - let mut options = sqlx::postgres::PgConnectOptions::from_str(uri)? - .options([("search_path", "kittybox_webmention")]); - if let Ok(password_file) = std::env::var("PGPASS_FILE") { - let password = tokio::fs::read_to_string(password_file).await.unwrap(); - options = options.password(&password); - } else if let Ok(password) = std::env::var("PGPASS") { - options = options.password(&password) - } - Self::from_pool( - sqlx::postgres::PgPoolOptions::new() - .max_connections(50) - .connect_with(options) - .await? - ).await - - } - - pub(crate) async fn from_pool(db: sqlx::PgPool) -> Result { - db.execute(sqlx::query("CREATE SCHEMA IF NOT EXISTS kittybox_webmention")).await?; - MIGRATOR.run(&db).await?; - Ok(Self { db, _phantom: std::marker::PhantomData }) - } -} - -#[async_trait::async_trait] -impl JobQueue for PostgresJobQueue { - type Job = PostgresJob; - type Error = sqlx::Error; - - async fn get_one(&self) -> Result, Self::Error> { - let mut txn = self.db.begin().await?; - - match sqlx::query_as::<_, PostgresJobRow>( - "SELECT id, source, target FROM kittybox_webmention.incoming_webmention_queue WHERE attempts < 5 FOR UPDATE SKIP LOCKED LIMIT 1" - ) - .fetch_optional(&mut *txn) - .await? - { - Some(job_row) => { - return Ok(Some(Self::Job { - id: job_row.id, - job: job_row.job, - txn: Some(txn), - runtime_handle: tokio::runtime::Handle::current(), - })) - }, - None => Ok(None) - } - } - - async fn put(&self, item: &Webmention) -> Result { - sqlx::query_scalar::<_, Uuid>("INSERT INTO kittybox_webmention.incoming_webmention_queue (source, target) VALUES ($1, $2) RETURNING id") - .bind(item.source.as_str()) - .bind(item.target.as_str()) - .fetch_one(&self.db) - .await - } - - async fn into_stream(self) -> Result> + Send>>, Self::Error> { - let mut listener = PgListener::connect_with(&self.db).await?; - listener.listen("incoming_webmention").await?; - - let stream: Pin> + Send>> = futures_util::stream::try_unfold((), { - let listener = std::sync::Arc::new(tokio::sync::Mutex::new(listener)); - move |_| { - let queue = self.clone(); - let listener = listener.clone(); - async move { - loop { - match queue.get_one().await? { - Some(item) => return Ok(Some((item, ()))), - None => { - listener.lock().await.recv().await?; - continue - } - } - } - } - } - }).boxed(); - - Ok(stream) - } -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use super::{Webmention, PostgresJobQueue, Job, JobQueue, MIGRATOR}; - use futures_util::StreamExt; - - #[sqlx::test(migrator = "MIGRATOR")] - #[tracing_test::traced_test] - async fn test_webmention_queue(pool: sqlx::PgPool) -> Result<(), sqlx::Error> { - let test_webmention = Webmention { - source: "https://fireburn.ru/posts/lorem-ipsum".to_owned(), - target: "https://aaronparecki.com/posts/dolor-sit-amet".to_owned() - }; - - let queue = PostgresJobQueue::::from_pool(pool).await?; - tracing::debug!("Putting webmention into queue"); - queue.put(&test_webmention).await?; - { - let mut job_description = queue.get_one().await?.unwrap(); - assert_eq!(job_description.job(), &test_webmention); - assert_eq!(job_description.attempts().await?, 0); - } - tracing::debug!("Creating a stream"); - let mut stream = queue.clone().into_stream().await?; - - { - let mut guard = stream.next().await.transpose()?.unwrap(); - assert_eq!(guard.job(), &test_webmention); - assert_eq!(guard.attempts().await?, 1); - if let Some(item) = queue.get_one().await? { - panic!("Unexpected item {:?} returned from job queue!", item) - }; - } - - { - let mut guard = stream.next().await.transpose()?.unwrap(); - assert_eq!(guard.job(), &test_webmention); - assert_eq!(guard.attempts().await?, 2); - guard.done().await?; - } - - match queue.get_one().await? { - Some(item) => panic!("Unexpected item {:?} returned from job queue!", item), - None => Ok(()) - } - } - - #[sqlx::test(migrator = "MIGRATOR")] - #[tracing_test::traced_test] - async fn test_no_hangups_in_queue(pool: sqlx::PgPool) -> Result<(), sqlx::Error> { - let test_webmention = Webmention { - source: "https://fireburn.ru/posts/lorem-ipsum".to_owned(), - target: "https://aaronparecki.com/posts/dolor-sit-amet".to_owned() - }; - - let queue = PostgresJobQueue::::from_pool(pool.clone()).await?; - tracing::debug!("Putting webmention into queue"); - queue.put(&test_webmention).await?; - tracing::debug!("Creating a stream"); - let mut stream = queue.clone().into_stream().await?; - - // Synchronisation barrier that will be useful later - let barrier = Arc::new(tokio::sync::Barrier::new(2)); - { - // Get one job guard from a queue - let mut guard = stream.next().await.transpose()?.unwrap(); - assert_eq!(guard.job(), &test_webmention); - assert_eq!(guard.attempts().await?, 0); - - tokio::task::spawn({ - let barrier = barrier.clone(); - async move { - // Wait for the signal to drop the guard! - barrier.wait().await; - - drop(guard) - } - }); - } - tokio::time::timeout(std::time::Duration::from_secs(1), stream.next()).await.unwrap_err(); - - let future = tokio::task::spawn( - tokio::time::timeout( - std::time::Duration::from_secs(10), async move { - stream.next().await.unwrap().unwrap() - } - ) - ); - // Let the other task drop the guard it is holding - barrier.wait().await; - let mut guard = future.await - .expect("Timeout on fetching item") - .expect("Job queue error"); - assert_eq!(guard.job(), &test_webmention); - assert_eq!(guard.attempts().await?, 1); - - Ok(()) - } -} diff --git a/kittybox-rs/templates/Cargo.toml b/kittybox-rs/templates/Cargo.toml deleted file mode 100644 index 38e73b3..0000000 --- a/kittybox-rs/templates/Cargo.toml +++ /dev/null @@ -1,33 +0,0 @@ -[package] -name = "kittybox-frontend-renderer" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[build-dependencies] -libflate = "^2.0.0" -walkdir = "^2.3.2" - -[dev-dependencies] -faker_rand = "^0.1.1" -rand = "^0.8.5" -[dev-dependencies.microformats] -version="^0.3.0" - -[dependencies] -ellipse = "^0.2.0" -http = "^0.2.7" -markup = "^0.13.1" -serde_json = "^1.0.64" -include_dir = "^0.7.2" -axum = "^0.6.18" -[dependencies.chrono] -version = "^0.4.19" -features = ["serde"] -[dependencies.kittybox-util] -version = "0.1.0" -path = "../util" -[dependencies.kittybox-indieauth] -version = "0.1.0" -path = "../indieauth" \ No newline at end of file diff --git a/kittybox-rs/templates/assets/jslicense.html b/kittybox-rs/templates/assets/jslicense.html deleted file mode 100644 index 90c681c..0000000 --- a/kittybox-rs/templates/assets/jslicense.html +++ /dev/null @@ -1,31 +0,0 @@ - - - - JavaScript licensing information for Kittybox - - -

All JavaScript included with Kittybox is licensed as free software, most of it under AGPL-3.0.

- - - - - - - - - - - - - - - - - - - - - -
onboarding.jsAGPL-3.0onboarding.ts (Kittybox source code)
indieauth.jsAGPL-3.0indieauth.ts (Kittybox source code)
lib.jsAGPL-3.0lib.ts (Kittybox source code)
indieauth.jsAGPL-3.0indieauth.ts (Kittybox source code)
- - diff --git a/kittybox-rs/templates/assets/onboarding.css b/kittybox-rs/templates/assets/onboarding.css deleted file mode 100644 index 6f191b9..0000000 --- a/kittybox-rs/templates/assets/onboarding.css +++ /dev/null @@ -1,33 +0,0 @@ -form.onboarding > ul#progressbar > li.active { - font-weight: bold; -} -form.onboarding > ul#progressbar { - display: flex; list-style: none; justify-content: space-around; -} - -form.onboarding > fieldset > div.switch_card_buttons { - display: flex; - justify-content: space-between; - width: 100%; -} -form.onboarding > fieldset > div.switch_card_buttons button:last-child { - margin-left: auto; -} -.form_group, .multi_input { - display: flex; - flex-direction: column; -} -.multi_input { - align-items: start; -} -.multi_input > input { - width: 100%; - align-self: stretch; -} -form.onboarding > fieldset > .form_group + * { - margin-top: .75rem; -} -form.onboarding textarea { - width: 100%; - resize: vertical; -} diff --git a/kittybox-rs/templates/assets/style.css b/kittybox-rs/templates/assets/style.css deleted file mode 100644 index 4cd7c09..0000000 --- a/kittybox-rs/templates/assets/style.css +++ /dev/null @@ -1,236 +0,0 @@ -@import url('https://fonts.googleapis.com/css2?family=Caveat:wght@500&family=Lato&display=swap'); - -:root { - font-family: var(--font-normal); - --font-normal: 'Lato', sans-serif; - --font-accent: 'Caveat', cursive; - --type-scale: 1.250; - - --primary-accent: purple; - --secondary-accent: gold; -} -* { - box-sizing: border-box; -} -body { - margin: 0; -} -h1, h2, h3, h4, h5, h6 { - font-family: var(--font-accent); -} -.titanic { - font-size: 3.815rem -} -h1, .xxxlarge { - margin-top: 0; - margin-bottom: 0; - font-size: 3.052rem; -} -h2, .xxlarge {font-size: 2.441rem;} -h3, .xlarge {font-size: 1.953rem;} -h4, .larger {font-size: 1.563rem;} -h5, .large {font-size: 1.25rem;} -h6, .normal {font-size: 1rem;} -small, .small { font-size: 0.8em; } - -nav#headerbar { - background: var(--primary-accent); - color: whitesmoke; - border-bottom: .75rem solid var(--secondary-accent); - padding: .3rem; - vertical-align: center; - /*position: sticky; - top: 0;*/ -} -nav#headerbar a#homepage { - font-weight: bolder; - font-family: var(--font-accent); - font-size: 2rem; -} -nav#headerbar > ul { - display: flex; - padding: inherit; - margin: inherit; - gap: .75em; -} -nav#headerbar > ul > li { - display: inline-flex; - flex-direction: column; - marker: none; - padding: inherit; - margin: inherit; - justify-content: center; -} -nav#headerbar > ul > li.shiftright { - margin-left: auto; -} -nav#headerbar a { - color: white; -} -body > main { - max-width: 60rem; - margin: auto; - padding: .75rem; -} -body > footer { - text-align: center; -} -.sidebyside { - display: flex; - flex-wrap: wrap; - gap: .75rem; - margin-top: .75rem; - margin-bottom: .75rem; -} -.sidebyside > * { - width: 100%; - margin-top: 0; - margin-bottom: 0; - border: .125rem solid black; - border-radius: .75rem; - padding: .75rem; - margin-top: 0 !important; - margin-bottom: 0 !important; - flex-basis: 28rem; - flex-grow: 1; -} -article > * + * { - margin-top: .75rem; -} -article > header { - padding-bottom: .75rem; - border-bottom: 1px solid gray; -} -article > footer { - border-top: 1px solid gray; -} -article.h-entry, article.h-feed, article.h-card, article.h-event { - border: 2px solid black; - border-radius: .75rem; - padding: .75rem; - margin-top: .75rem; - margin-bottom: .75rem; -} -.webinteractions > ul.counters { - display: inline-flex; - padding: inherit; - margin: inherit; - gap: .75em; - flex-wrap: wrap; -} -.webinteractions > ul.counters > li > .icon { - font-size: 1.5em; -} -.webinteractions > ul.counters > li { - display: inline-flex; - align-items: center; - gap: .5em; -} -article.h-entry > header.metadata ul { - padding-inline-start: unset; - margin: unset; -} -article.h-entry > header.metadata ul.categories { - flex-wrap: wrap; - display: inline-flex; - list-style-type: none; -} -article.h-entry > header.metadata ul.categories li { - display: inline; - margin-inline-start: unset; -} -article.h-entry > header.metadata ul li { - margin-inline-start: 2.5em; -} -article.h-entry .e-content pre { - border: 1px solid gray; - border-radius: 0.5em; - overflow-y: auto; - padding: 0.5em; -} -article.h-entry img.u-photo { - max-width: 80%; - max-height: 90vh; - display: block; - margin: auto; -} -article.h-entry img.u-photo + * { - margin-top: .75rem; -} -article.h-entry > header.metadata span + span::before { - content: " | " -} -li.p-category::before { - content: " #"; -} - -article.h-entry ul.categories { - gap: .2em; -} -article.h-card img.u-photo { - border-radius: 100%; - float: left; - height: 8rem; - border: 1px solid gray; - margin-right: .75em; - object-fit: cover; - aspect-ratio: 1; -} - -.mini-h-card img, #indieauth_page img { - height: 2em; - display: inline-block; - border: 2px solid gray; - border-radius: 100%; - margin-right: 0.5rem; -} - -.mini-h-card * { - vertical-align: middle; -} - -.mini-h-card a { - text-decoration: none; -} - -#indieauth_page > #introduction { - border: .125rem solid gray; - border-radius: .75rem; - margin: 1.25rem; - padding: .75rem; -} - -/* Rainbow hearts */ -/* TODO make toggleable by splitting into separate CSS file */ -@counter-style rainbow-hearts { - system: cyclic; - symbols: "❀️" "🧑" "πŸ’›" "πŸ’š" "πŸ’™" "πŸ’œ"; -} -body { - counter-reset: like-icons; -} -span.like-icon::before { - counter-increment: like-icons; - content: "" counter(like-icons, rainbow-hearts); -} -span.like-icon-label { - display: none; -} -ul.h-feed { - list-style: none; -} - -body > a#skip-to-content { - position: absolute; - width: 1px; - height: 1px; - margin: 0; - overflow: hidden; - clip: rect(1px, 1px, 1px, 1px); - background: white; - padding: 16px; -} -body > a#skip-to-content:focus { - width: auto; height: auto; clip: auto; - z-index: 999; -} diff --git a/kittybox-rs/templates/build.rs b/kittybox-rs/templates/build.rs deleted file mode 100644 index ccd5b19..0000000 --- a/kittybox-rs/templates/build.rs +++ /dev/null @@ -1,90 +0,0 @@ -use std::ffi::OsStr; - -use libflate::gzip::Encoder; -use walkdir::WalkDir; - -fn main() -> Result<(), std::io::Error> { - use std::env; - let out_dir = std::path::PathBuf::from(env::var("OUT_DIR").unwrap()); - - println!("cargo::rerun-if-changed=javascript/"); - if let Ok(exit) = std::process::Command::new("tsc") - .arg("--outDir") - .arg(&out_dir) - .current_dir("javascript") - .spawn()? - .wait() - { - if !exit.success() { - std::process::exit(exit.code().unwrap_or(1)) - } - } - - println!("cargo:rerun-if-changed=assets/"); - let assets_path = std::path::Path::new("assets"); - let mut assets = WalkDir::new(&assets_path) - .into_iter(); - while let Some(Ok(entry)) = assets.next() { - if entry.file_type().is_dir() { - if let Err(err) = std::fs::create_dir(&out_dir.join(entry.path())) { - if err.kind() != std::io::ErrorKind::AlreadyExists { - return Err(err) - } - } - } else { - std::fs::copy(entry.path(), &out_dir.join(entry.path().strip_prefix(assets_path).unwrap()))?; - } - } - - let walker = WalkDir::new(&out_dir) - .into_iter() - .map(Result::unwrap) - .filter(|e| { - e.file_type().is_file() && e.path().extension().unwrap() != "gz" - }); - for entry in walker { - let normal_path = entry.path(); - let gzip_path = normal_path.with_extension({ - let mut extension = normal_path - .extension() - .unwrap() - .to_owned(); - extension.push(OsStr::new(".gz")); - extension - }); - eprintln!( - "{} -> {}", - normal_path.strip_prefix(&out_dir).unwrap().display(), - gzip_path.strip_prefix(&out_dir).unwrap().display() - ); - { - let mut out_file = std::fs::OpenOptions::new() - .create(true) - .truncate(true) - .write(true) - .open(&gzip_path)?; - - let mut in_file = std::fs::File::open(&normal_path)?; - - let mut encoder = Encoder::new(&mut out_file)?; - std::io::copy(&mut in_file, &mut encoder)?; - encoder.finish().into_result()?; - } - - let normal_len: f64 = std::fs::metadata(&normal_path).unwrap().len() as f64; - let gzipped_len: f64 = std::fs::metadata(&gzip_path).unwrap().len() as f64; - let ratio = gzipped_len / normal_len; - eprintln!("Ratio: {}", ratio); - if ratio <= 0.9 { - std::fs::remove_file(&normal_path)? - } else { - println!( - "cargo:warning={} compression ratio is {} (> 0.9), leaving as is", - entry.path().display(), - ratio - ); - std::fs::remove_file(&gzip_path)? - } - } - Ok(()) -} diff --git a/kittybox-rs/templates/javascript/dist/indieauth.js b/kittybox-rs/templates/javascript/dist/indieauth.js deleted file mode 100644 index 297b4b5..0000000 --- a/kittybox-rs/templates/javascript/dist/indieauth.js +++ /dev/null @@ -1,118 +0,0 @@ -"use strict"; -const WEBAUTHN_TIMEOUT = 60 * 1000; -async function webauthn_create_credential() { - const response = await fetch("/.kittybox/webauthn/pre_register"); - const { challenge, rp, user } = await response.json(); - return await navigator.credentials.create({ - publicKey: { - challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), - rp: rp, - user: { - id: Uint8Array.from(user.cred_id, (c) => c.charCodeAt(0)), - name: user.name, - displayName: user.displayName - }, - pubKeyCredParams: [{ alg: -7, type: "public-key" }], - authenticatorSelection: {}, - timeout: WEBAUTHN_TIMEOUT, - attestation: "none" - } - }); -} -async function webauthn_authenticate() { - const response = await fetch("/.kittybox/webauthn/pre_auth"); - const { challenge, credentials } = await response.json(); - try { - return await navigator.credentials.get({ - publicKey: { - challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), - allowCredentials: credentials.map(cred => ({ - id: Uint8Array.from(cred.id, c => c.charCodeAt(0)), - type: cred.type - })), - timeout: WEBAUTHN_TIMEOUT - } - }); - } - catch (e) { - console.error("WebAuthn authentication failed:", e); - alert("Using your authenticator failed. (Check the DevTools for details)"); - throw e; - } -} -async function submit_handler(e) { - e.preventDefault(); - if (e.target != null && e.target instanceof HTMLFormElement) { - const form = e.target; - let scopes; - if (form.elements.namedItem("scope") === undefined) { - scopes = []; - } - else if (form.elements.namedItem("scope") instanceof Node) { - scopes = [form.elements.namedItem("scope")] - .filter((e) => e.checked) - .map((e) => e.value); - } - else { - scopes = Array.from(form.elements.namedItem("scope")) - .filter((e) => e.checked) - .map((e) => e.value); - } - const authorization_request = { - response_type: form.elements.namedItem("response_type").value, - client_id: form.elements.namedItem("client_id").value, - redirect_uri: form.elements.namedItem("redirect_uri").value, - state: form.elements.namedItem("state").value, - code_challenge: form.elements.namedItem("code_challenge").value, - code_challenge_method: form.elements.namedItem("code_challenge_method").value, - // I would love to leave that as a list, but such is the form of - // IndieAuth. application/x-www-form-urlencoded doesn't have - // lists, so scopes are space-separated instead. It is annoying. - scope: scopes.length > 0 ? scopes.join(" ") : undefined, - }; - let credential = null; - switch (form.elements.namedItem("auth_method").value) { - case "password": - credential = form.elements.namedItem("user_password").value; - if (credential.length == 0) { - alert("Please enter a password."); - return; - } - break; - case "webauthn": - // credential = await webauthn_authenticate(); - alert("WebAuthn isn't implemented yet!"); - return; - break; - default: - alert("Please choose an authentication method."); - return; - } - console.log("Authorization request:", authorization_request); - console.log("Authentication method:", credential); - const body = JSON.stringify({ - request: authorization_request, - authorization_method: credential - }); - console.log(body); - const response = await fetch(form.action, { - method: form.method, - body: body, - headers: { - "Content-Type": "application/json" - } - }); - if (response.ok) { - let location = response.headers.get("Location"); - if (location != null) { - window.location.href = location; - } - else { - throw "Error: didn't return a location"; - } - } - } - else { - return; - } -} diff --git a/kittybox-rs/templates/javascript/dist/webauthn/register.js b/kittybox-rs/templates/javascript/dist/webauthn/register.js deleted file mode 100644 index 3918c74..0000000 --- a/kittybox-rs/templates/javascript/dist/webauthn/register.js +++ /dev/null @@ -1 +0,0 @@ -"use strict"; diff --git a/kittybox-rs/templates/javascript/src/indieauth.ts b/kittybox-rs/templates/javascript/src/indieauth.ts deleted file mode 100644 index 01732b7..0000000 --- a/kittybox-rs/templates/javascript/src/indieauth.ts +++ /dev/null @@ -1,150 +0,0 @@ -import { unreachable } from "./lib.js"; - -const WEBAUTHN_TIMEOUT = 60 * 1000; - -interface KittyboxWebauthnPreRegistrationData { - challenge: string, - rp: PublicKeyCredentialRpEntity, - user: { - cred_id: string, - name: string, - displayName: string - } -} - -async function webauthn_create_credential() { - const response = await fetch("/.kittybox/webauthn/pre_register"); - const { challenge, rp, user }: KittyboxWebauthnPreRegistrationData = await response.json(); - - return await navigator.credentials.create({ - publicKey: { - challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), - rp: rp, - user: { - id: Uint8Array.from(user.cred_id, (c) => c.charCodeAt(0)), - name: user.name, - displayName: user.displayName - }, - pubKeyCredParams: [{alg: -7, type: "public-key"}], - authenticatorSelection: {}, - timeout: WEBAUTHN_TIMEOUT, - attestation: "none" - } - }); -} - -interface KittyboxWebauthnCredential { - id: string, - type: "public-key" -} - -interface KittyboxWebauthnPreAuthenticationData { - challenge: string, - credentials: KittyboxWebauthnCredential[] -} - -async function webauthn_authenticate() { - const response = await fetch("/.kittybox/webauthn/pre_auth"); - const { challenge, credentials } = await response.json() as unknown as KittyboxWebauthnPreAuthenticationData; - - try { - return await navigator.credentials.get({ - publicKey: { - challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), - allowCredentials: credentials.map(cred => ({ - id: Uint8Array.from(cred.id, c => c.charCodeAt(0)), - type: cred.type - })), - timeout: WEBAUTHN_TIMEOUT - } - }) - } catch (e) { - console.error("WebAuthn authentication failed:", e); - alert("Using your authenticator failed. (Check the DevTools for details)"); - throw e; - } -} - -export async function submit_handler(e: SubmitEvent) { - e.preventDefault(); - if (e.target != null && e.target instanceof HTMLFormElement) { - const form = e.target as HTMLFormElement; - - let scopes: Array; - let scope_elem = form.elements.namedItem("scope"); - if (scope_elem == null) { - scopes = [] - } else if (scope_elem instanceof Element) { - scopes = ([scope_elem] as Array) - .filter((e: HTMLInputElement) => e.checked) - .map((e: HTMLInputElement) => e.value); - } else if (scope_elem instanceof RadioNodeList) { - scopes = (Array.from(scope_elem) as Array) - .filter((e: HTMLInputElement) => e.checked) - .map((e: HTMLInputElement) => e.value); - } else { - unreachable("HTMLFormControlsCollection returned something that's not null, Element or RadioNodeList") - } - - const authorization_request = { - response_type: (form.elements.namedItem("response_type") as HTMLInputElement).value, - client_id: (form.elements.namedItem("client_id") as HTMLInputElement).value, - redirect_uri: (form.elements.namedItem("redirect_uri") as HTMLInputElement).value, - state: (form.elements.namedItem("state") as HTMLInputElement).value, - code_challenge: (form.elements.namedItem("code_challenge") as HTMLInputElement).value, - code_challenge_method: (form.elements.namedItem("code_challenge_method") as HTMLInputElement).value, - // I would love to leave that as a list, but such is the form of - // IndieAuth. application/x-www-form-urlencoded doesn't have - // lists, so scopes are space-separated instead. It is annoying. - scope: scopes.length > 0 ? scopes.join(" ") : undefined, - }; - - let credential = null; - switch ((form.elements.namedItem("auth_method") as HTMLInputElement).value) { - case "password": - credential = (form.elements.namedItem("user_password") as HTMLInputElement).value; - if (credential.length == 0) { - alert("Please enter a password.") - return - } - break; - case "webauthn": - // credential = await webauthn_authenticate(); - alert("WebAuthn isn't implemented yet!") - return - break - default: - alert("Please choose an authentication method.") - return - } - - console.log("Authorization request:", authorization_request); - console.log("Authentication method:", credential); - - const body = JSON.stringify({ - request: authorization_request, - authorization_method: credential - }); - console.log(body); - - const response = await fetch(form.action, { - method: form.method, - body: body, - headers: { - "Content-Type": "application/json" - } - }); - - if (response.ok) { - let location = response.headers.get("Location"); - if (location != null) { - window.location.href = location - } else { - throw "Error: didn't return a location" - } - } - } else { - return - } - -} diff --git a/kittybox-rs/templates/javascript/src/lib.ts b/kittybox-rs/templates/javascript/src/lib.ts deleted file mode 100644 index 38ba65b..0000000 --- a/kittybox-rs/templates/javascript/src/lib.ts +++ /dev/null @@ -1,3 +0,0 @@ -export function unreachable(msg: string): never { - throw new Error(msg); -} diff --git a/kittybox-rs/templates/javascript/src/onboarding.ts b/kittybox-rs/templates/javascript/src/onboarding.ts deleted file mode 100644 index 0b455eb..0000000 --- a/kittybox-rs/templates/javascript/src/onboarding.ts +++ /dev/null @@ -1,120 +0,0 @@ -const firstOnboardingCard = "intro"; - -function switchOnboardingCard(card: string) { - (Array.from(document.querySelectorAll("form.onboarding > fieldset")) as HTMLElement[]) - .map((node: HTMLElement) => { - if (node.id == card) { - node.style.display = "block"; - } else { - node.style.display = "none"; - } - }); - - (Array.from(document.querySelectorAll("form.onboarding > ul#progressbar > li")) as HTMLElement[]) - .map(node => { - if (node.id == card) { - node.classList.add("active") - } else { - node.classList.remove("active") - } - }); -}; - -interface Window { - kittybox_onboarding: { - switchOnboardingCard: (card: string) => void - } -} - -window.kittybox_onboarding = { - switchOnboardingCard -}; - -(document.querySelector("form.onboarding > ul#progressbar") as HTMLElement).style.display = ""; -switchOnboardingCard(firstOnboardingCard); - -function switchCardOnClick(event: MouseEvent) { - if (event.target instanceof HTMLElement) { - if (event.target.dataset.card !== undefined) { - switchOnboardingCard(event.target.dataset.card) - } - } -} - -function multiInputAddMore(event: (MouseEvent | { target: HTMLElement })) { - if (event.target instanceof HTMLElement) { - let parent = event.target.parentElement; - if (parent !== null) { - let template = (parent.querySelector("template") as HTMLTemplateElement).content.cloneNode(true); - parent.prepend(template); - } - } -} - -(Array.from( - document.querySelectorAll( - "form.onboarding > fieldset button.switch_card" - ) -) as HTMLButtonElement[]) - .map(button => { - button.addEventListener("click", switchCardOnClick) - }); - -(Array.from( - document.querySelectorAll( - "form.onboarding > fieldset div.multi_input > button.add_more" - ) -) as HTMLButtonElement[]) - .map(button => { - button.addEventListener("click", multiInputAddMore) - multiInputAddMore({ target: button }); - }); - -const form = document.querySelector("form.onboarding") as HTMLFormElement; -console.log(form); -form.onsubmit = async (event: SubmitEvent) => { - console.log(event); - event.preventDefault(); - const form = event.target as HTMLFormElement; - const json = { - user: { - type: ["h-card"], - properties: { - name: [(form.querySelector("#hcard_name") as HTMLInputElement).value], - pronoun: (Array.from( - form.querySelectorAll("#hcard_pronouns") - ) as HTMLInputElement[]) - .map(input => input.value).filter(i => i != ""), - url: (Array.from(form.querySelectorAll("#hcard_url")) as HTMLInputElement[]) - .map(input => input.value).filter(i => i != ""), - note: [(form.querySelector("#hcard_note") as HTMLInputElement).value] - } - }, - first_post: { - type: ["h-entry"], - properties: { - content: [(form.querySelector("#first_post_content") as HTMLTextAreaElement).value] - } - }, - blog_name: (form.querySelector("#blog_name") as HTMLInputElement).value, - feeds: (Array.from( - form.querySelectorAll(".multi_input#custom_feeds > fieldset.feed") - ) as HTMLElement[]) - .map(form => { - return { - name: (form.querySelector("#feed_name") as HTMLInputElement).value, - slug: (form.querySelector("#feed_slug") as HTMLInputElement).value - } - }).filter(feed => feed.name == "" || feed.slug == "") - }; - - await fetch("/.kittybox/onboarding", { - method: "POST", - body: JSON.stringify(json), - headers: { "Content-Type": "application/json" } - }).then(response => { - if (response.status == 201) { - window.location.href = window.location.href; - } - }) -} diff --git a/kittybox-rs/templates/javascript/src/webauthn/register.ts b/kittybox-rs/templates/javascript/src/webauthn/register.ts deleted file mode 100644 index e69de29..0000000 diff --git a/kittybox-rs/templates/javascript/tsconfig.json b/kittybox-rs/templates/javascript/tsconfig.json deleted file mode 100644 index 18b94c7..0000000 --- a/kittybox-rs/templates/javascript/tsconfig.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig to read more about this file */ - - /* Projects */ - // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ - // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ - // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ - // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ - // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ - // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ - - /* Language and Environment */ - "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ - // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ - // "jsx": "preserve", /* Specify what JSX code is generated. */ - // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ - // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ - // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ - // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ - // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ - // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ - // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ - // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ - // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ - - /* Modules */ - "module": "es2022", /* Specify what module code is generated. */ - // "rootDir": "./", /* Specify the root folder within your source files. */ - // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ - // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ - // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ - // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ - // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ - // "types": [], /* Specify type package names to be included without being referenced in a source file. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ - // "resolveJsonModule": true, /* Enable importing .json files. */ - // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ - - /* JavaScript Support */ - // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ - // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ - // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ - - /* Emit */ - // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ - // "declarationMap": true, /* Create sourcemaps for d.ts files. */ - // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ - // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ - // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ - "outDir": "./dist", /* Specify an output folder for all emitted files. */ - // "removeComments": true, /* Disable emitting comments. */ - // "noEmit": true, /* Disable emitting files from a compilation. */ - // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ - // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ - // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ - // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ - // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ - // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ - // "newLine": "crlf", /* Set the newline character for emitting files. */ - // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ - // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ - // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ - // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ - // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ - // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ - - /* Interop Constraints */ - // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ - // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ - "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ - // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ - "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ - - /* Type Checking */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ - // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ - // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ - // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ - // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ - // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ - // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ - // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ - // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ - // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ - // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ - // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ - // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ - // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ - // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ - // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ - // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ - - /* Completeness */ - // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ - "skipLibCheck": true /* Skip type checking all .d.ts files. */ - }, - "include": ["src/**/*"] -} diff --git a/kittybox-rs/templates/src/indieauth.rs b/kittybox-rs/templates/src/indieauth.rs deleted file mode 100644 index 6a46773..0000000 --- a/kittybox-rs/templates/src/indieauth.rs +++ /dev/null @@ -1,201 +0,0 @@ -use kittybox_indieauth::{AuthorizationRequest, Scope}; -use kittybox_util::auth::EnrolledCredential; - -markup::define! { - AuthorizationRequestPage( - request: AuthorizationRequest, - credentials: Vec, - app: Option, - user: serde_json::Value - ) { - script[type="module"] { - @markup::raw(r#"import { submit_handler } from "/.kittybox/static/indieauth.js"; - -document.getElementById("indieauth_page").addEventListener("submit", submit_handler); -"#) - } - main { - form #indieauth_page[action="/.kittybox/indieauth/auth/confirm", method="POST"] { - noscript { - p {"I know how annoyed you can be about scripts." } - p { "But WebAuthn doesn't work without JavaScript. And passwords are horribly insecure, and everyone knows it deep inside their heart." } - p { b { "Please enable JavaScript for this page to work properly 😭" } } - } - div #introduction { - h1."mini-h-card" { - "Hi, " - @if let Some(photo) = user["properties"]["photo"][0].as_str() { - img.user_avatar[src=photo]; - } else if let Some(photo) = user["properties"]["photo"][0].as_object() { - img[ - src=photo["value"].as_str().unwrap(), - alt=photo["alt"].as_str().unwrap(), - loading="lazy" - ]; - } - @user["properties"]["name"][0].as_str().unwrap_or("administrator") - } - - p."mini-h-card" { - @if let Some(icon) = app - .as_ref() - .and_then(|app| app["properties"]["logo"][0].as_str()) - { - img.app_icon[src=icon]; - } else if let Some(icon) = app - .as_ref() - .and_then(|app| app["properties"]["logo"][0].as_object()) - { - img.app_icon[src=icon["src"].as_str().unwrap(), alt=icon["alt"].as_str().unwrap()]; - } - span { - a[href=app - .as_ref() - .and_then(|app| app["properties"]["url"][0].as_str()) - .unwrap_or_else(|| request.client_id.as_str()) - ] { - @app - .as_ref() - .and_then(|app| app["properties"]["name"][0].as_str()) - .unwrap_or_else(|| request.client_id.as_str()) - } - " wants to confirm your identity." - } - } - } - - @if request.scope.is_some() { - p { - "An application just requested access to your website. This can give access to your data, including private content." - } - - p { - "You can review the permissions the application requested below. You are free to not grant any permissions that the application requested if you don't trust it, at the cost of potentially reducing its functionality." - } - } - - fieldset #scopes { - legend { "Permissions to grant the app:" } - div { - input[type="checkbox", disabled="true", checked="true"]; - label[for="identify"] { - "Identify you as the owner of " - @user["properties"]["uid"][0].as_str().unwrap() - } - } - @if let Some(scopes) = &request.scope { - @for scope in scopes.iter() { - div { - input[type="checkbox", name="scope", id=scope.as_ref(), value=scope.as_ref()]; - label[for=scope.as_ref()] { - @match scope { - Scope::Profile => { - "Access your publicly visible profile information" - } - Scope::Email => { - "Access your email address" - } - Scope::Create => { - "Create new content on your website" - } - Scope::Update => { - "Modify content on your website" - } - Scope::Delete => { - "Delete content on your website" - } - Scope::Media => { - "Interact with your media storage" - } - other => { - @markup::raw(format!( - "(custom or unknown scope) {}", - other.as_ref() - )) - } - } - } - } - } - } - } - - fieldset { - legend { "Choose your preferred authentication method:" } - div { - input[type="radio", - name="auth_method", - id="auth_with_webauthn", - disabled=!credentials.iter().any(|e| *e == EnrolledCredential::WebAuthn), - checked=credentials.iter().any(|e| *e == EnrolledCredential::WebAuthn) - ]; - label[for="auth_with_webauthn"] { "Use an authenticator device to log in" } - } - div { - input[type="radio", - name="auth_method", value="password", - id="auth_with_password", - disabled=!credentials.iter().any(|e| *e == EnrolledCredential::Password), - checked=credentials.iter().all(|e| *e == EnrolledCredential::Password) - ]; - label[for="auth_with_password"] { "Password" } - br; - input[type="password", name="user_password", id="user_password"]; - } - } - - input[type="submit", value="Authenticate"]; - br; - - details { - summary { "View detailed data about this request" } - - p { - "More info about meanings of these fields can be found in " - a[href="https://indieauth.spec.indieweb.org/20220212/#authorization-request"] { - "the IndieAuth specification" - } ", which this webpage uses." - } - fieldset { - div { - label[for="response_type"] { "Response type (will most likely be \"code\")" } - br; - input[name="response_type", id="response_type", readonly, - value=request.response_type.as_str()]; - } - div { - label[for="state"] { "Request state" } - br; - input[name="state", id="state", readonly, - value=request.state.as_ref()]; - } - div { - label[for="client_id"] { "Client ID" } - br; - input[name="client_id", id="client_id", readonly, - value=request.client_id.as_str()]; - } - div { - label[for="redirect_uri"] { "Redirect URI" } - br; - input[name="redirect_uri", id="redirect_uri", readonly, - value=request.redirect_uri.as_str()]; - } - div { - label[for="code_challenge"] { "PKCE code challenge" } - br; - input[name="code_challenge", id="code_challenge", readonly, - value=request.code_challenge.as_str()]; - } - div { - label[for="code_challenge_method"] { "PKCE method (should be S256)" } - br; - input[name="code_challenge_method", id="code_challenge_method", readonly, - value=request.code_challenge.method().as_str()]; - } - } - } - } - } - } -} diff --git a/kittybox-rs/templates/src/lib.rs b/kittybox-rs/templates/src/lib.rs deleted file mode 100644 index 8d5d5fa..0000000 --- a/kittybox-rs/templates/src/lib.rs +++ /dev/null @@ -1,367 +0,0 @@ -mod templates; -pub use templates::{ErrorPage, MainPage, Template}; -mod onboarding; -pub use onboarding::OnboardingPage; -mod indieauth; -pub use indieauth::AuthorizationRequestPage; -mod login; -pub use login::LoginPage; -mod mf2; -pub use mf2::{Entry, VCard, Feed, Food, POSTS_PER_PAGE}; - -pub mod assets { - use axum::response::{IntoResponse, Response}; - use axum::extract::Path; - use axum::http::StatusCode; - use axum::http::header::{CONTENT_TYPE, CONTENT_ENCODING, CACHE_CONTROL}; - - const ASSETS: include_dir::Dir<'static> = include_dir::include_dir!("$OUT_DIR/"); - const CACHE_FOR_A_DAY: &str = "max-age=86400"; - const GZIP: &str = "gzip"; - - pub async fn statics( - Path(path): Path - ) -> Response { - let content_type: &'static str = if path.ends_with(".js") { - "application/javascript" - } else if path.ends_with(".css") { - "text/css" - } else if path.ends_with(".html") { - "text/html; charset=\"utf-8\"" - } else { - "application/octet-stream" - }; - - match ASSETS.get_file(path.clone() + ".gz") { - Some(file) => (StatusCode::OK, - [(CONTENT_TYPE, content_type), - (CONTENT_ENCODING, GZIP), - (CACHE_CONTROL, CACHE_FOR_A_DAY)], - file.contents()).into_response(), - None => match ASSETS.get_file(path) { - Some(file) => (StatusCode::OK, - [(CONTENT_TYPE, content_type), - (CACHE_CONTROL, CACHE_FOR_A_DAY)], - file.contents()).into_response(), - None => StatusCode::NOT_FOUND.into_response() - } - } - } -} - -#[cfg(test)] -mod tests { - use faker_rand::en_us::internet::Domain; - use faker_rand::lorem::Word; - use microformats::types::{Document, Item, PropertyValue, Url}; - use serde_json::json; - use std::cell::RefCell; - use std::rc::Rc; - - enum PostType { - Note, - Article, - ReplyTo(serde_json::Value), - ReplyToLink(String), - LikeOf(serde_json::Value), - LikeOfLink(String), - } - - fn gen_hcard(domain: &str) -> serde_json::Value { - use faker_rand::en_us::names::FirstName; - - json!({ - "type": ["h-card"], - "properties": { - "name": [rand::random::().to_string()], - "photo": [format!("https://{domain}/media/me.png")], - "uid": [format!("https://{domain}/")], - "url": [format!("https://{domain}/")] - } - }) - } - - fn gen_random_post(domain: &str, kind: PostType) -> serde_json::Value { - use faker_rand::lorem::{Paragraph, Sentence}; - - fn html(content: Paragraph) -> serde_json::Value { - json!({ - "html": format!("

{}

", content), - "value": content.to_string() - }) - } - - let uid = format!( - "https://{domain}/posts/{}-{}-{}", - rand::random::(), - rand::random::(), - rand::random::() - ); - let dt = chrono::offset::Local::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true); - - match kind { - PostType::Note => { - let content = rand::random::(); - - json!({ - "type": ["h-entry"], - "properties": { - "content": [html(content)], - "published": [dt], - "uid": [&uid], "url": [&uid], - "author": [gen_hcard(domain)] - } - }) - } - PostType::Article => { - let content = rand::random::(); - let name = rand::random::(); - - json!({ - "type": ["h-entry"], - "properties": { - "content": [html(content)], - "published": [dt], - "uid": [&uid], "url": [&uid], - "author": [gen_hcard(domain)], - "name": [name.to_string()] - } - }) - } - PostType::ReplyTo(ctx) => { - let content = rand::random::(); - - json!({ - "type": ["h-entry"], - "properties": { - "content": [html(content)], - "published": [dt], - "uid": [&uid], "url": [&uid], - "author": [gen_hcard(domain)], - "in-reply-to": [{ - "type": ["h-cite"], - "properties": ctx["properties"] - }] - } - }) - } - PostType::ReplyToLink(link) => { - let content = rand::random::(); - - json!({ - "type": ["h-entry"], - "properties": { - "content": [html(content)], - "published": [dt], - "uid": [&uid], "url": [&uid], - "author": [gen_hcard(domain)], - "in-reply-to": [link] - } - }) - } - PostType::LikeOf(ctx) => { - json!({ - "type": ["h-entry"], - "properties": { - "published": [dt], - "author": [gen_hcard(domain)], - "uid": [&uid], "url": [&uid], - "like-of": [{ - "type": ["h-cite"], - "properties": ctx["properties"] - }] - } - }) - } - PostType::LikeOfLink(link) => { - json!({ - "type": ["h-entry"], - "properties": { - "published": [dt], - "author": [gen_hcard(domain)], - "uid": [&uid], "url": [&uid], - "like-of": [link] - } - }) - } - } - } - - fn check_dt_published(mf2: &serde_json::Value, item: &Rc>) { - use microformats::types::temporal::Value as TemporalValue; - - let _item = item.borrow(); - let props = _item.properties.borrow(); - assert!(props.contains_key("published")); - - if let Some(PropertyValue::Temporal(TemporalValue::Timestamp(item))) = - props.get("published").and_then(|v| v.first()) - { - use chrono::{DateTime, FixedOffset, NaiveDateTime}; - - // Faithfully reconstruct the original datetime - // I wonder why not just have an Enum that would - // get you either date, time or a datetime, - // potentially with an offset? - let offset = item.as_offset().unwrap().data; - let ndt: NaiveDateTime = item.as_date().unwrap().data - .and_time(item.as_time().unwrap().data) - // subtract the offset here, since we will add it back - - offset; - let dt = DateTime::::from_utc(ndt, offset); - - let expected: DateTime = chrono::DateTime::parse_from_rfc3339( - mf2["properties"]["published"][0].as_str().unwrap(), - ) - .unwrap(); - - assert_eq!(dt, expected); - } else { - unreachable!() - } - } - - fn check_e_content(mf2: &serde_json::Value, item: &Rc>) { - let _item = item.borrow(); - let props = _item.properties.borrow(); - assert!(props.contains_key("content")); - - if let Some(PropertyValue::Fragment(content)) = props.get("content").and_then(|v| v.first()) - { - assert_eq!( - content.html, - mf2["properties"]["content"][0]["html"].as_str().unwrap() - ); - } else { - unreachable!() - } - } - - #[test] - #[ignore = "see https://gitlab.com/maxburon/microformats-parser/-/issues/7"] - fn test_note() { - let mf2 = gen_random_post(&rand::random::().to_string(), PostType::Note); - - let html = crate::mf2::Entry { post: &mf2 }.to_string(); - - let url: Url = mf2 - .pointer("/properties/uid/0") - .and_then(|i| i.as_str()) - .and_then(|u| u.parse().ok()) - .unwrap(); - let parsed: Document = microformats::from_html(&html, url.clone()).unwrap(); - - if let Some(PropertyValue::Item(item)) = parsed.get_item_by_url(&url) { - let _item = item.borrow(); - let props = _item.properties.borrow(); - - check_e_content(&mf2, &item); - check_dt_published(&mf2, &item); - assert!(props.contains_key("uid")); - assert!(props.contains_key("url")); - assert!(props - .get("url") - .unwrap() - .iter() - .any(|i| i == props.get("uid").and_then(|v| v.first()).unwrap())); - // XXX: fails because of https://gitlab.com/maxburon/microformats-parser/-/issues/7 - assert!(!props.contains_key("name")); - } else { - unreachable!() - } - } - - #[test] - fn test_article() { - let mf2 = gen_random_post(&rand::random::().to_string(), PostType::Article); - let html = crate::mf2::Entry { post: &mf2 }.to_string(); - let url: Url = mf2 - .pointer("/properties/uid/0") - .and_then(|i| i.as_str()) - .and_then(|u| u.parse().ok()) - .unwrap(); - let parsed: Document = microformats::from_html(&html, url.clone()).unwrap(); - - if let Some(PropertyValue::Item(item)) = parsed.get_item_by_url(&url) { - let _item = item.borrow(); - let props = _item.properties.borrow(); - - check_e_content(&mf2, &item); - check_dt_published(&mf2, &item); - assert!(props.contains_key("uid")); - assert!(props.contains_key("url")); - assert!(props - .get("url") - .unwrap() - .iter() - .any(|i| i == props.get("uid").and_then(|v| v.first()).unwrap())); - assert!(props.contains_key("name")); - if let Some(PropertyValue::Plain(name)) = props.get("name").and_then(|v| v.first()) { - assert_eq!( - name, - mf2.pointer("/properties/name/0") - .and_then(|v| v.as_str()) - .unwrap() - ); - } else { - panic!("Name wasn't a plain property!"); - } - } else { - unreachable!() - } - } - - #[test] - fn test_like_of() { - for likeof in [ - PostType::LikeOf(gen_random_post( - &rand::random::().to_string(), - PostType::Note, - )), - PostType::LikeOfLink(format!( - "https://{}/posts/{}-{}-{}", - &rand::random::(), - &rand::random::(), - &rand::random::(), - &rand::random::(), - )), - ] { - let mf2 = gen_random_post(&rand::random::().to_string(), likeof); - let url: Url = mf2 - .pointer("/properties/uid/0") - .and_then(|i| i.as_str()) - .and_then(|u| u.parse().ok()) - .unwrap(); - let html = crate::mf2::Entry { post: &mf2 }.to_string(); - let parsed: Document = microformats::from_html(&html, url.clone()).unwrap(); - - if let Some(item) = parsed.items.get(0) { - let _item = item.borrow(); - let props = _item.properties.borrow(); - - check_dt_published(&mf2, item); - assert!(props.contains_key("like-of")); - match props.get("like-of").and_then(|v| v.first()) { - Some(PropertyValue::Url(url)) => { - assert_eq!( - url, - &mf2.pointer("/properties/like-of/0") - .and_then(|i| i.as_str()) - .or_else(|| mf2 - .pointer("/properties/like-of/0/properties/uid/0") - .and_then(|i| i.as_str())) - .and_then(|u| u.parse::().ok()) - .unwrap() - ); - } - Some(PropertyValue::Item(_cite)) => { - todo!() - } - other => panic!("Unexpected value in like-of: {:?}", other), - } - } else { - unreachable!() - } - } - } -} diff --git a/kittybox-rs/templates/src/login.rs b/kittybox-rs/templates/src/login.rs deleted file mode 100644 index 042c308..0000000 --- a/kittybox-rs/templates/src/login.rs +++ /dev/null @@ -1,17 +0,0 @@ -markup::define! { - LoginPage { - form[method="POST"] { - h1 { "Sign in with your website" } - p { - "Signing in to Kittybox might allow you to view private content " - "intended for your eyes only." - } - - section { - label[for="url"] { "Your website URL" } - input[id="url", name="url", placeholder="https://example.com/"]; - input[type="submit"]; - } - } - } -} diff --git a/kittybox-rs/templates/src/mf2.rs b/kittybox-rs/templates/src/mf2.rs deleted file mode 100644 index 33cd84a..0000000 --- a/kittybox-rs/templates/src/mf2.rs +++ /dev/null @@ -1,478 +0,0 @@ -use ellipse::Ellipse; - -pub static POSTS_PER_PAGE: usize = 20; - -/// Return a pretty location specifier from a geo: URI. -fn decode_geo_uri(uri: &str) -> String { - if let Some(part) = uri.split(':').collect::>().get(1) { - if let Some(part) = part.split(';').next() { - let mut parts = part.split(','); - let lat = parts.next().unwrap(); - let lon = parts.next().unwrap(); - - // TODO - format them as proper latitude and longitude - format!("{}, {}", lat, lon) - } else { - uri.to_string() - } - } else { - uri.to_string() - } -} - -markup::define! { - Entry<'a>(post: &'a serde_json::Value) { - @if post.pointer("/properties/like-of").is_none() && post.pointer("/properties/bookmark-of").is_none() { - @FullEntry { post } - } else { - // Show a mini-post. - @MiniEntry { post } - } - } - MiniEntry<'a>(post: &'a serde_json::Value) { - article."h-entry mini-entry" { - @if let Some(author) = post["properties"]["author"][0].as_object() { - span."mini-h-card"."u-author" { - a."u-author"[href=author["properties"]["uid"][0].as_str().unwrap()] { - @if let Some(photo) = author["properties"]["photo"][0].as_str() { - img[src=photo, loading="lazy"]; - } else if author["properties"]["photo"][0].is_object() { - img[ - src=author["properties"]["photo"][0]["value"].as_str().unwrap(), - alt=author["properties"]["photo"][0]["alt"].as_str().unwrap(), - loading="lazy" - ]; - } - @author["properties"]["name"][0].as_str().unwrap() - } - } - @if let Some(likeof) = post["properties"]["like-of"][0].as_str() { - " " - span."like-icon"["aria-label"="liked"] { - span."like-icon-label"["aria-hidden"="true"] { - "❀️" - } - } - " " - a."u-like-of"[href=likeof] { @likeof } - } else if let Some(likeof) = post["properties"]["like-of"][0].as_object() { - a."u-like-of"[href=likeof["properties"]["url"][0].as_str().unwrap()] { - @likeof["properties"]["name"][0] - .as_str() - .unwrap_or_else(|| likeof["properties"]["url"][0].as_str().unwrap()) - } - } - @if let Some(bookmarkof) = post["properties"]["bookmark-of"][0].as_str() { - " πŸ”– " - a."u-bookmark-of"[href=bookmarkof] { @bookmarkof } - } else if let Some(bookmarkof) = post["properties"]["bookmark-of"][0].as_object() { - a."u-bookmark-of"[href=bookmarkof["properties"]["url"][0].as_str().unwrap()] { - @bookmarkof["properties"]["name"][0] - .as_str() - .unwrap_or_else(|| bookmarkof["properties"]["url"][0].as_str().unwrap()) - } - } - " " - a."u-url"."u-uid"[href=post["properties"]["uid"][0].as_str().unwrap()] { - @if let Some(published) = post["properties"]["published"][0].as_str() { - time."dt-published"[datetime=published] { - @chrono::DateTime::parse_from_rfc3339(published) - .map(|dt| dt.format("on %a %b %e %T %Y").to_string()) - .unwrap_or("sometime in the past".to_string()) - } - } else { - "sometime in the past" - } - } - } - } - } - FullEntry<'a>(post: &'a serde_json::Value) { - article."h-entry" { - header.metadata { - @if let Some(name) = post["properties"]["name"][0].as_str() { - h1."p-name" { @name } - } - @if let Some(author) = post["properties"]["author"][0].as_object() { - section."mini-h-card" { - a.larger."u-author"[href=author["properties"]["uid"][0].as_str().unwrap()] { - @if let Some(photo) = author["properties"]["photo"][0].as_str() { - img[src=photo, loading="lazy"]; - } else if let Some(photo) = author["properties"]["photo"][0].as_object() { - img[ - src=photo["value"].as_str().unwrap(), - alt=photo["alt"].as_str().unwrap(), - loading="lazy" - ]; - } - - @author["properties"]["name"][0].as_str().unwrap() - } - } - } - div { - span { - a."u-url"."u-uid"[href=post["properties"]["uid"][0].as_str().unwrap()] { - @if let Some(published) = post["properties"]["published"][0].as_str() { - time."dt-published"[datetime=published] { - @chrono::DateTime::parse_from_rfc3339(published) - .map(|dt| dt.format("%a %b %e %T %Y").to_string()) - .unwrap_or("sometime in the past".to_string()) - } - } - } - } - @if post["properties"]["visibility"][0].as_str().unwrap_or("public") != "public" { - span."p-visibility"[value=post["properties"]["visibility"][0].as_str().unwrap()] { - @post["properties"]["visibility"][0].as_str().unwrap() - } - } - @if post["properties"]["category"].is_array() { - span { - ul.categories { - "Tagged: " - @for cat in post["properties"]["category"].as_array().unwrap() { - li."p-category" { @cat.as_str().unwrap() } - } - } - } - } - @if post["properties"]["in-reply-to"].is_array() { - span { - "In reply to: " - ul.replyctx { - @for ctx in post["properties"]["in-reply-to"].as_array().unwrap() { - @if let Some(ctx) = ctx.as_str() { - li { - a."u-in-reply-to"[href=ctx] { - @ctx.truncate_ellipse(48).as_ref() - } - } - } else if let Some(ctx) = ctx.as_object() { - li { - a."u-in-reply-to"[href=ctx["properties"]["uid"][0] - .as_str() - .unwrap_or_else(|| ctx["properties"]["url"][0].as_str().unwrap())] - { - @ctx["properties"]["uid"][0] - .as_str() - .unwrap_or_else(|| ctx["properties"]["url"][0].as_str().unwrap()) - .truncate_ellipse(48) - .as_ref() - } - } - } - } - } - } - } - } - @if post["properties"]["url"].as_array().unwrap().len() > 1 { - hr; - ul { - "Pretty permalinks for this post:" - @for url in post["properties"]["url"].as_array().unwrap().iter().filter(|i| **i != post["properties"]["uid"][0]).map(|i| i.as_str().unwrap()) { - li { - a."u-url"[href=url] { @url } - } - } - } - } - @if let Some(links) = post["properties"]["syndication"].as_array() { - @if !links.is_empty() { - hr; - ul { - "Also published on:" - @for url in links.iter().filter_map(|i| i.as_str()) { - li { a."u-syndication"[href=url] { @url } } - } - } - } - } - @if post["properties"]["location"].is_array() || post["properties"]["checkin"].is_array() { - div { - @if post["properties"]["checkin"].is_array() { - span { - "Check-in to: " - @if post["properties"]["checkin"][0].is_string() { - // It's a URL - a."u-checkin"[href=post["properties"]["checkin"][0].as_str().unwrap()] { - @post["properties"]["checkin"][0].as_str().unwrap().truncate_ellipse(24).as_ref() - } - } else { - a."u-checkin"[href=post["properties"]["checkin"][0]["properties"]["uid"][0].as_str().unwrap()] { - @post["properties"]["checkin"][0]["properties"]["name"][0].as_str().unwrap() - } - } - } - } - @if post["properties"]["location"].is_array() { - span { - "Location: " - @if post["properties"]["location"][0].is_string() { - // It's a geo: URL - // We need to decode it - a."u-location"[href=post["properties"]["location"][0].as_str().unwrap()] { - @decode_geo_uri(post["properties"]["location"][0].as_str().unwrap()) - } - } else { - // It's an inner h-geo object - a."u-location"[href=post["properties"]["location"][0]["value"].as_str().map(|x| x.to_string()).unwrap_or(format!("geo:{},{}", post["properties"]["location"][0]["properties"]["latitude"][0].as_str().unwrap(), post["properties"]["location"][0]["properties"]["longitude"][0].as_str().unwrap()))] { - // I'm a lazy bitch - @decode_geo_uri(&post["properties"]["location"][0]["value"].as_str().map(|x| x.to_string()).unwrap_or(format!("geo:{},{}", post["properties"]["location"][0]["properties"]["latitude"][0].as_str().unwrap(), post["properties"]["location"][0]["properties"]["longitude"][0].as_str().unwrap()))) - } - } - } - } - } - } - @if post["properties"]["ate"].is_array() || post["properties"]["drank"].is_array() { - div { - @if post["properties"]["ate"].is_array() { - span { ul { - "Ate:" - @for food in post["properties"]["ate"].as_array().unwrap() { - li { - @if food.is_string() { - // If this is a string, it's a URL. - a."u-ate"[href=food.as_str().unwrap()] { - @food.as_str().unwrap().truncate_ellipse(24).as_ref() - } - } else { - // This is a rich food object (mm, sounds tasty! I wanna eat something tasty) - a."u-ate"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] { - @food["properties"]["name"][0].as_str() - .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref()) - } - } - } - } - } } - } - @if post["properties"]["drank"].is_array() { - span { ul { - "Drank:" - @for food in post["properties"]["drank"].as_array().unwrap() { - li { - @if food.is_string() { - // If this is a string, it's a URL. - a."u-drank"[href=food.as_str().unwrap()] { - @food.as_str().unwrap().truncate_ellipse(24).as_ref() - } - } else { - // This is a rich food object (mm, sounds tasty! I wanna eat something tasty) - a."u-drank"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] { - @food["properties"]["name"][0].as_str() - .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref()) - } - } - } - } - } } - } - } - } - } - @PhotoGallery { photos: post["properties"]["photo"].as_array() } - @if post["properties"]["content"][0]["html"].is_string() { - main."e-content" { - @markup::raw(post["properties"]["content"][0]["html"].as_str().unwrap().trim()) - } - } - @WebInteractions { post } - } - } - VCard<'a>(card: &'a serde_json::Value) { - article."h-card" { - @if card["properties"]["photo"][0].is_string() { - img."u-photo"[src=card["properties"]["photo"][0].as_str().unwrap()]; - } else if card["properties"]["photo"][0].is_object() { - img."u-photo"[ - src=card["properties"]["photo"][0]["value"].as_str().unwrap(), - alt=card["properties"]["photo"][0]["alt"].as_str().unwrap() - ]; - } - h1 { - a."u-url"."u-uid"."p-name"[href=card["properties"]["uid"][0].as_str().unwrap()] { - @card["properties"]["name"][0].as_str().unwrap() - } - } - @if card["properties"]["pronoun"].is_array() { - span { - "(" - @for (i, pronoun) in card["properties"]["pronoun"].as_array().unwrap().iter().filter_map(|v| v.as_str()).enumerate() { - span."p-pronoun" { - @pronoun - } - // Insert commas between multiple sets of pronouns - @if i < (card["properties"]["pronoun"].as_array().unwrap().len() - 1) {", "} - } - ")" - } - } - @if card["properties"]["note"].is_array() { - p."p-note" { - @card["properties"]["note"][0]["value"].as_str().unwrap_or_else(|| card["properties"]["note"][0].as_str().unwrap()) - } - } - @if card["properties"]["url"].is_array() { - ul { - "Can be found elsewhere at:" - @for url in card["properties"]["url"] - .as_array() - .unwrap() - .iter() - .filter_map(|v| v.as_str()) - .filter(|v| v != &card["properties"]["uid"][0].as_str().unwrap()) - .filter(|v| !card["properties"]["author"][0].as_str().is_some_and(|a| v.starts_with(a))) - { - li { a."u-url"[href=url, rel="me"] { @url } } - } - } - } - } - } - Food<'a>(food: &'a serde_json::Value) { - article."h-food" { - header.metadata { - h1 { - a."p-name"."u-url"[href=food["properties"]["url"][0].as_str().unwrap()] { - @food["properties"]["name"][0].as_str().unwrap() - } - } - } - @PhotoGallery { photos: food["properties"]["photo"].as_array() } - } - } - Feed<'a>(feed: &'a serde_json::Value, cursor: Option<&'a str>) { - div."h-feed" { - div.metadata { - @if feed["properties"]["name"][0].is_string() { - h1."p-name".titanic { - a[href=feed["properties"]["uid"][0].as_str().unwrap(), rel="feed"] { - @feed["properties"]["name"][0].as_str().unwrap() - } - } - } - } - @if feed["children"].is_array() { - @for child in feed["children"].as_array().unwrap() { - @match child["type"][0].as_str().unwrap() { - "h-entry" => { @Entry { post: child } } - "h-feed" => { @Feed { feed: child, cursor: None } } - "h-food" => { @Food { food: child } } - //"h-event" => { } - "h-card" => { @VCard { card: child } } - something_else => { - p { - "There's supposed to be an " - @something_else - " object here. But Kittybox can't render it right now." - small { "Sorry! TToTT" } - } - } - } - } - } - @if let Some(cursor) = cursor { - a[rel="prev", href=format!("{}?after={}", feed["properties"]["uid"][0].as_str().unwrap(), cursor)] { - "Older posts" - } - } else { - p { - "Looks like you reached the end. Wanna jump back to the " - a[href=feed["properties"]["uid"][0].as_str().unwrap()] { - "beginning" - } "?" - } - } - } - } - - //======================================= - // Components library - //======================================= - PhotoGallery<'a>(photos: Option<&'a Vec>) { - @if let Some(photos) = photos { - @for photo in photos.iter() { - @if let Some(photo) = photo.as_str() { - img."u-photo"[src=photo, loading="lazy"]; - } else if photo.is_object() { - @if let Some(thumbnail) = photo["thumbnail"].as_str() { - a."u-photo"[href=photo["value"].as_str().unwrap()] { - img[src=thumbnail, - loading="lazy", - alt=photo["alt"].as_str().unwrap_or("") - ]; - } - } else { - img."u-photo"[src=photo["value"].as_str().unwrap(), - loading="lazy", - alt=photo["alt"].as_str().unwrap_or("") - ]; - } - } - } - } - } - WebInteractions<'a>(post: &'a serde_json::Value) { - footer.webinteractions { - p[style="display: none", "aria-hidden"="false"] { - "Webmention counters:" - } - ul.counters { - li { - span."icon like-icon"["aria-label"="likes"] { - span."like-icon-label"["aria-hidden"="true"] { - "❀️" - } - } - span.counter { @post["properties"]["like"].as_array().map(|a| a.len()).unwrap_or(0) } - } - li { - span.icon["aria-label"="replies"] { "πŸ’¬" } - span.counter { @post["properties"]["comment"].as_array().map(|a| a.len()).unwrap_or(0) } - } - li { - span.icon["aria-label"="reposts"] { "πŸ”„" } - span.counter { @post["properties"]["repost"].as_array().map(|a| a.len()).unwrap_or(0) } - } - li { - span.icon["aria-label"="bookmarks"] { "πŸ”–" } - span.counter { @post["properties"]["bookmark"].as_array().map(|a| a.len()).unwrap_or(0) } - } - } - /*@if ( - post["properties"]["like"].as_array().map(|a| a.len()).unwrap_or(0) - + post["properties"]["bookmark"].as_array().map(|a| a.len()).unwrap_or(0) - + post["properties"]["repost"].as_array().map(|a| a.len()).unwrap_or(0) - + post["properties"]["comment"].as_array().map(|a| a.len()).unwrap_or(0) - ) > 0 { - details { - summary { "Show comments and reactions" } - // TODO actually render facepiles and comments - @if let Some(likes) = post["properties"]["like"].as_array() { - @if !likes.is_empty() { - // Show a facepile of likes for a post - } - } - @if let Some(bookmarks) = post["properties"]["bookmark"].as_array() { - @if !bookmarks.is_empty() { - // Show a facepile of bookmarks for a post - } - } - @if let Some(reposts) = post["properties"]["repost"].as_array() { - @if !reposts.is_empty() { - // Show a facepile of reposts for a post - } - } - @if let Some(comments) = post["properties"]["comment"].as_array() { - @for comment in comments.iter() { - // Show all the comments recursively (so we could do Salmention with them) - } - } - } - }*/ - } - } -} diff --git a/kittybox-rs/templates/src/onboarding.rs b/kittybox-rs/templates/src/onboarding.rs deleted file mode 100644 index e2f07df..0000000 --- a/kittybox-rs/templates/src/onboarding.rs +++ /dev/null @@ -1,196 +0,0 @@ -markup::define! { - OnboardingPage { - h1[style="text-align: center"] { - "Welcome to Kittybox" - } - script[type="module", src="/.kittybox/static/onboarding.js"] {} - link[rel="stylesheet", href="/.kittybox/static/onboarding.css"]; - form.onboarding[action="", method="POST"] { - noscript { - p { - "Ok, let's be honest. Most of this software doesn't require JS to be enabled " - "to view pages (and in some cases, even edit them if logged in)." - } - p { "This page is a little bit different. It uses JavaScript to provide interactive features, such as:" } - ul { - li { "Multiple-input questions" } - li { "Answers spanning multiple fields" } - li { "Preview of files being uploaded" } - li { "Pretty pagination so you won't feel overwhelmed" } - } - p { - "Sadly, it's very hard or even impossible to recreate this without any JavaScript. " - "Good news though - the code is " b { "open-source AND free software" } - " (under GNU AGPLv3) " - "and I promise to not obfuscate it or minify it. " - a[href="/.kittybox/static/onboarding.js"] { "Here" } - "'s the link - you can try reading it so you'll be 200% sure " - "it won't steal your cookies or turn your kitty into a soulless monster." - @markup::raw("") - } - hr; - p { - "In other words: " - b { "please enable JavaScript for this page to work properly." } - small { "sorry T__T" } - } - } - ul #progressbar[style="display: none"] { - li #intro { "Introduction" } - li #hcard { "Your profile" } - li #settings { "Your website" } - li #firstpost { "Your first post" } - } - fieldset #intro[style="display: none"] { - legend { "Introduction" } - p { - "Kittybox is a CMS that can act as a member of the IndieWeb. " - "IndieWeb is a global distributed social network built on top of open Web standards " - "and composed of blogs around the Internet supporting these standards." - } - p { "There is no registration or centralized database of any sort - everyone owns their data and is responsible for it." } - p { "If you're seeing this page, it looks like your configuration is correct and we can proceed with the setup." } - - div.switch_card_buttons { - button.switch_card.next_card[type="button", "data-card"="hcard"] { "Next" } - } - } - - fieldset #hcard[style="display: none"] { - legend { "Your profile" } - p { "An h-card is an IndieWeb social profile, and we're gonna make you one!" } - p { "Thanks to some clever markup, it will be readable by both humans and machines looking at your homepage."} - p { - "If you make a mistake, don't worry, you're gonna be able to edit this later." - "The only mandatory field is your name." - } - - div.form_group { - label[for="hcard_name"] { "Your name" } - input #hcard_name[name="hcard_name", placeholder="Your name"]; - small { - "No need to write the name as in your passport, this is not a legal document " - "- just write how you want to be called on the network. This name will be also " - "shown whenever you leave a comment on someone else's post using your website." - } - } - - div.form_group { - label[for="pronouns"] { "Your pronouns" } - div.multi_input #pronouns { - template { - input #hcard_pronouns[name="hcard_pronouns", placeholder="they/them?"]; - } - button.add_more[type="button", "aria-label"="Add more"] { "[+] Add more" } - } - small { - "Write which pronouns you use for yourself. It's a free-form field " - "so don't feel constrained - but keep it compact, as it'll be shown in a lot of places." - } - } - - div.form_group { - label[for="urls"] { "Links to other pages of you" } - div.multi_input #urls { - template { - input #hcard_url[name="hcard_url", placeholder="https://example.com/"]; - } - button.add_more[type="button", "aria-label"="Add more"] { "[+] Add more" } - } - small { - "These URLs will help your readers find you elsewhere and will help you that whoever owns these pages owns your website too" - " in case the links are mutual. So make sure to put a link to your site in your other social profiles!" - } - } - - div.form_group { - label[for="hcard_note"] { "A little about yourself" } - textarea #hcard_note[name="hcard_note", placeholder="Loves cooking, plants, cats, dogs and racoons."] {} - small { "A little bit of introduction. Just one paragraph, and note, you can't use HTML here (yet)." } - // TODO: HTML e-note instead of p-note - } - - // TODO: u-photo upload - needs media endpoint cooperation - - div.switch_card_buttons { - button.switch_card.prev_card[type="button", "data-card"="intro"] { "Previous" } - button.switch_card.next_card[type="button", "data-card"="settings"] { "Next" } - } - } - - fieldset #settings[style="display: none"] { - legend { "Your website" } - p { "Ok, it's nice to know you more. Tell me about what you'll be writing and how you want to name your blog." } - // TODO: site-name, saved to settings - - div.form_group { - label[for="blog_name"] { "Your website's name"} - input #blog_name[name="blog_name", placeholder="Kitty Box!"]; - small { "It'll get shown in the title of your blog, in the upper left corner!" } - } - - div.form_group { - label[for="custom_feeds"] { "Custom feeds" } - small { - p { - "You can set up custom feeds to post your stuff to. " - "This is a nice way to organize stuff into huge folders, like all your trips or your quantified-self data." - } - p { - "Feeds can be followed individually, which makes it easy for users who are interested in certain types " - "of content you produce to follow your adventures in certain areas of your life without cluttering their " - "readers." - } - p { - "We will automatically create some feeds for you aside from these so you won't have to - including a main feed, " - "address book (for venues you go to and people you talk about), a cookbook for your recipes and some more." - // TODO: Put a link to documentation explaining feeds in more detail. - } - } - div.multi_input #custom_feeds { - template { - fieldset.feed { - div.form_group { - label[for="feed_name"] { "Name" } - input #feed_name[name="feed_name", placeholder="My cool feed"]; - small { "This is a name that will identify this feed to the user. Make it short and descriptive!" } - } - div.form_group { - label[for="feed_slug"] { "Slug" } - input #feed_slug[name="feed_slug", placeholder="my-cool-feed"]; - small { "This will form a pretty URL for the feed. For example: https://example.com/feeds/my-cool-feed" } - } - } - } - button.add_more[type="button", "aria-label"="Add more"] { "[+] Add More" } - } - } - - div.switch_card_buttons { - button.switch_card.prev_card[type="button", "data-card"="hcard"] { "Previous" } - button.switch_card.next_card[type="button", "data-card"="firstpost"] { "Next" } - } - } - - fieldset #firstpost[style="display: none"] { - legend { "Your first post" } - p { "Maybe you should start writing your first posts now. How about a short note?" } - p { "A note is a short-form post (not unlike a tweet - but without the actual character limit) that doesn't bear a title." } - p { - "Consider telling more about yourself, your skills and interests in this note " - @markup::raw("—") - " though you're free to write anything you want. (By the way, you can use " - a[href="https://daringfireball.net/projects/markdown/syntax"] { "Markdown" } - " here to spice up your note!)" - } - - textarea #first_post_content[style="width: 100%; height: 8em", placeholder="Hello! I am really excited about #IndieWeb"] {} - - div.switch_card_buttons { - button.switch_card.prev_card[type="button", "data-card"="settings"] { "Previous" } - button[type="submit"] { "Finish" } - } - } - } - } -} diff --git a/kittybox-rs/templates/src/templates.rs b/kittybox-rs/templates/src/templates.rs deleted file mode 100644 index 63c67c5..0000000 --- a/kittybox-rs/templates/src/templates.rs +++ /dev/null @@ -1,168 +0,0 @@ -use http::StatusCode; -use kittybox_util::MicropubChannel; - -use crate::{Feed, VCard}; - -markup::define! { - Template<'a>(title: &'a str, blog_name: &'a str, feeds: Vec, user: Option, content: String) { - @markup::doctype() - html { - head { - title { @title } - link[rel="preconnect", href="https://fonts.gstatic.com"]; - link[rel="stylesheet", href="/.kittybox/static/style.css"]; - meta[name="viewport", content="initial-scale=1, width=device-width"]; - - link[rel="micropub", href="/.kittybox/micropub"]; - link[rel="micropub_media", href="/.kittybox/media"]; - link[rel="indieauth_metadata", href="/.kittybox/indieauth/metadata"]; - link[rel="webmention", href="/.kittybox/webmention"]; - // legacy links for some dumb clients - link[rel="authorization_endpoint", href="/.kittybox/indieauth/auth"]; - link[rel="token_endpoint", href="/.kittybox/indieauth/token"]; - // LibreJS-compliant JS licensing info (because TypeScript is a bitch) - link[rel="jslicense", href="/.kittybox/static/jslicense.html"]; - /*@if let Some(endpoints) = endpoints { - @if let Some(webmention) = &endpoints.webmention { - link[rel="webmention", href=&webmention]; - } - @if let Some(microsub) = &endpoints.microsub { - link[rel="microsub", href=µsub]; - } - }*/ - } - body { - a[href="#main_content", id="skip-to-content"] { "Skip to content" } - // TODO Somehow compress headerbar into a menu when the screen space is tight - nav #headerbar { - ul { - li { a #homepage[href="/"] { @blog_name } } - @for feed in feeds.iter() { - li { a[href=&feed.uid] { @feed.name } } - } - li.shiftright { - @if user.is_none() { - a #login[href="/login"] { "Sign in" } - } else { - span { - @user.as_ref().unwrap() " - " a #logout[href="/logout"] { "Sign out" } - } - } - } - } - } - main #main_content { - @markup::raw(content) - } - footer { - p { - "Powered by " a[href="https://sr.ht/~vikanezrimaya/kittybox"] { - "Kittybox" - } - } - } - } - } - } - MainPage<'a>(feed: &'a serde_json::Value, card: &'a serde_json::Value, cursor: Option<&'a str>, webring: bool) { - .sidebyside { - @VCard { card } - #dynamicstuff { - div { - p { "This section will provide interesting statistics or tidbits about my life in this exact moment (with maybe a small delay)." } - p { "It will probably require JavaScript to self-update, but I promise to keep this widget lightweight and open-source!" } - p { small { - "JavaScript isn't a menace, stop fearing it or I will switch to WebAssembly " - "and knock your nico-nico-kneecaps so fast with its speed you won't even notice that... " - small { "omae ha mou shindeiru" } - @markup::raw("") - } } - } - @if *webring { - #webring { - a[href="https://xn--sr8hvo.ws/previous"] { "←" } - " An " - a[href="https://xn--sr8hvo.ws"] { - "IndieWeb Webring" - } - " πŸ•ΈπŸ’ " - a[href="https://xn--sr8hvo.ws/next"] { "β†’" } - } - } - } - } - @Feed { feed, cursor: *cursor } - } - ErrorPage(code: StatusCode, msg: Option) { - h1 { @format!("HTTP {code}") } - @match *code { - StatusCode::UNAUTHORIZED => { - p { "Looks like you need to authenticate yourself before seeing this page. Try logging in with IndieAuth using the Login button above!" } - } - StatusCode::FORBIDDEN => { - p { "Looks like you're forbidden from viewing this page." } - p { - "This might've been caused by being banned from viewing my website" - "or simply by trying to see what you're not supposed to see, " - "like a private post that's not intended for you. It's ok, it happens." - } - } - StatusCode::GONE => { - p { "Looks like the page you're trying to find is gone and is never coming back." } - } - StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS => { - p { "The page is there, but I can't legally provide it to you because the censorship said so." } - } - StatusCode::NOT_FOUND => { - p { "Looks like there's no such page. Maybe you or someone else mistyped a URL or my database experienced data loss." } - } - StatusCode::IM_A_TEAPOT => { - p { "Wait, do you seriously expect my website to brew you coffee? It's not a coffee machine!" } - - p { - small { - "I could brew you some coffee tho if we meet one day... " - small { - i { - "i-it's nothing personal, I just like brewing coffee, b-baka!!!~ >. { - @match msg { - None => { - p { - "There was an undescribed error in your request. " - "Please try again later or with a different request." - } - } - Some(msg) => { - p { - "There was a following error in your request:" - } - blockquote { pre { @msg } } - } - } - } - StatusCode::INTERNAL_SERVER_ERROR => { - @match msg { - None => { - p { "It seems like you have found an error. Not to worry, it has already been logged." } - } - Some(msg) => { - p { "The server encountered an error while processing your request:" } - blockquote { @msg } - p { "Don't worry, it has already been logged." } - } - } - } - _ => { - p { "It seems like you have found an error. Not to worry, it has already been logged." } - } - } - P { "For now, may I suggest to visit " a[href="/"] {"the main page"} " of this website?" } - - } -} diff --git a/kittybox-rs/util/Cargo.toml b/kittybox-rs/util/Cargo.toml deleted file mode 100644 index 0425849..0000000 --- a/kittybox-rs/util/Cargo.toml +++ /dev/null @@ -1,29 +0,0 @@ -[package] -name = "kittybox-util" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[features] -fs = ["rand", "tokio", "tokio/fs"] - -[dependencies] -serde = { version = "^1.0.170", features = ["derive"] } -serde_json = "^1.0.64" -axum-core = "^0.3.4" -http = "^0.2.7" -async-trait = "^0.1.50" -futures-util = "^0.3.14" -uuid = "^1.3.3" -[dependencies.rand] -version = "^0.8.5" -optional = true -[dependencies.tokio] -version = "^1.16.1" -features = ["tracing"] -optional = true -[dependencies.sqlx] -version = "0.7" -features = ["json"] -optional = true \ No newline at end of file diff --git a/kittybox-rs/util/src/error.rs b/kittybox-rs/util/src/error.rs deleted file mode 100644 index 1c95020..0000000 --- a/kittybox-rs/util/src/error.rs +++ /dev/null @@ -1,95 +0,0 @@ -use serde::{Deserialize, Serialize}; -use http::StatusCode; -use axum_core::response::{Response, IntoResponse}; - -#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] -#[serde(rename_all = "snake_case")] -/// Kinds of errors that can happen within a Micropub operation. -pub enum ErrorType { - /// An erroneous attempt to create something that already exists. - AlreadyExists, - /// Current user is expressly forbidden from performing this action. - Forbidden, - /// The Micropub server experienced an internal error. - InternalServerError, - /// The request was invalid or malformed. - InvalidRequest, - /// The provided OAuth2 scopes were insufficient to allow performing this action. - InvalidScope, - /// There was no token or other means of authorization in the request. - NotAuthorized, - /// Whatever was requested was not found. - NotFound, - /// The request payload was of a type unsupported by the Micropub endpoint. - UnsupportedMediaType, -} - -/// Representation of the Micropub API error. -#[derive(Serialize, Deserialize, Debug)] -pub struct MicropubError { - /// General kind of an error that occured. - pub error: ErrorType, - /// A human-readable error description intended for application developers. - // TODO use Cow<'static, str> to save on heap allocations - pub error_description: String, -} - -impl std::error::Error for MicropubError {} - -impl std::fmt::Display for MicropubError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str("Micropub error: ")?; - f.write_str(&self.error_description) - } -} - -impl From for MicropubError { - fn from(err: serde_json::Error) -> Self { - use ErrorType::*; - Self { - error: InvalidRequest, - error_description: err.to_string(), - } - } -} - -impl MicropubError { - /// Create a new Micropub error. - pub fn new(error: ErrorType, error_description: &str) -> Self { - Self { - error, - error_description: error_description.to_owned(), - } - } -} - -impl From<&MicropubError> for StatusCode { - fn from(err: &MicropubError) -> Self { - use ErrorType::*; - match err.error { - AlreadyExists => StatusCode::CONFLICT, - Forbidden => StatusCode::FORBIDDEN, - InternalServerError => StatusCode::INTERNAL_SERVER_ERROR, - InvalidRequest => StatusCode::BAD_REQUEST, - InvalidScope => StatusCode::UNAUTHORIZED, - NotAuthorized => StatusCode::UNAUTHORIZED, - NotFound => StatusCode::NOT_FOUND, - UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE, - } - } -} -impl From for StatusCode { - fn from(err: MicropubError) -> Self { - (&err).into() - } -} - -impl IntoResponse for MicropubError { - fn into_response(self) -> Response { - IntoResponse::into_response(( - StatusCode::from(&self), - [("Content-Type", "application/json")], - serde_json::to_string(&self).unwrap(), - )) - } -} diff --git a/kittybox-rs/util/src/lib.rs b/kittybox-rs/util/src/lib.rs deleted file mode 100644 index c49bdf5..0000000 --- a/kittybox-rs/util/src/lib.rs +++ /dev/null @@ -1,123 +0,0 @@ -#![warn(missing_docs)] -//! Small things that couldn't fit elsewhere in Kittybox, yet may be -//! useful on their own or in multiple Kittybox crates. -//! -//! Some things are gated behind features, namely: -//! - `fs` - enables use of filesystem-related utilities -use serde::{Deserialize, Serialize}; - -#[derive(Clone, Serialize, Deserialize)] -pub struct IndiewebEndpoints { - pub authorization_endpoint: String, - pub token_endpoint: String, - pub webmention: Option, - pub microsub: Option, -} - -/// Data structure representing a Micropub channel in the ?q=channels output. -#[derive(Serialize, Deserialize, PartialEq, Debug)] -#[cfg_attr(feature = "sqlx", derive(sqlx::FromRow))] -pub struct MicropubChannel { - /// The channel's UID. It is usually also a publically accessible permalink URL. - pub uid: String, - /// The channel's user-friendly name used to recognize it in lists. - pub name: String, -} - -#[derive(Debug, Default)] -/// Common types of webmentions. -pub enum MentionType { - /// Corresponds to a `u-in-reply-to` link. - Reply, - /// Corresponds to a `u-like-of` link. - Like, - /// Corresponds to a `u-repost-of` link. - Repost, - /// Corresponds to a `u-bookmark-of` link. - Bookmark, - /// A plain link without MF2 annotations. - #[default] - Mention -} - -/// Common errors from the IndieWeb protocols that can be reused between modules. -pub mod error; -pub use error::{ErrorType, MicropubError}; - -/// Common data-types useful in creating smart authentication systems. -pub mod auth { - #[derive(PartialEq, Eq, Hash, Clone, Copy)] - pub enum EnrolledCredential { - /// An indicator that a password is enrolled. Passwords can be - /// used to recover from a lost token. - Password, - /// An indicator that one or more WebAuthn credentials were - /// enrolled. - WebAuthn - } -} - -/// A collection of traits for implementing a robust job queue. -pub mod queue; - -#[cfg(feature = "fs")] -/// Commonly-used operations with the file system in Kittybox's -/// underlying storage mechanisms. -pub mod fs { - use std::io::{self, Result}; - use std::path::{Path, PathBuf}; - use rand::{Rng, distributions::Alphanumeric}; - use tokio::fs; - - /// Create a temporary file named `temp.[a-zA-Z0-9]{length}` in - /// the given location and immediately open it. Returns the - /// filename and the corresponding file handle. It is the caller's - /// responsibility to clean up the temporary file when it is no - /// longer needed. - /// - /// Uses [`OpenOptions::create_new`][fs::OpenOptions::create_new] - /// to detect filename collisions, in which case it will - /// automatically retry until the operation succeeds. - /// - /// # Errors - /// - /// Returns the underlying [`io::Error`] if the operation fails - /// due to reasons other than filename collision. - pub async fn mktemp(dir: T, basename: B, length: usize) -> Result<(PathBuf, fs::File)> - where - T: AsRef, - B: Into> - { - let dir = dir.as_ref(); - let basename = basename.into().unwrap_or(""); - fs::create_dir_all(dir).await?; - - loop { - let filename = dir.join(format!( - "{}{}{}", - basename, - if basename.is_empty() { "" } else { "." }, - { - let string = rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(length) - .collect::>(); - String::from_utf8(string).unwrap() - } - )); - - match fs::OpenOptions::new() - .create_new(true) - .write(true) - .open(&filename) - .await - { - Ok(file) => return Ok((filename, file)), - Err(err) => match err.kind() { - io::ErrorKind::AlreadyExists => continue, - _ => return Err(err) - } - } - } - } -} diff --git a/kittybox-rs/util/src/queue.rs b/kittybox-rs/util/src/queue.rs deleted file mode 100644 index c880597..0000000 --- a/kittybox-rs/util/src/queue.rs +++ /dev/null @@ -1,66 +0,0 @@ -use futures_util::Stream; -use std::pin::Pin; -use uuid::Uuid; - -#[async_trait::async_trait] -/// A job queue that can store and return jobs. -pub trait JobQueue: Send + Sync + Sized + Clone + 'static { - /// A type of job object that will be returned by the queue. - type Job: Job; - /// Error type that the queue can produce in its work. - type Error: std::error::Error + Send + Sync + Sized; - - /// Get one item from the job queue, if the job queue has pending - /// items available. - /// - /// # Errors - /// - /// Returns an error if a job queue failed in some way. Having no - /// items is not a failure, in which case `Ok(None)` is returned. - async fn get_one(&self) -> Result, Self::Error>; - /// Put an item into a job queue, returning its UUID. - async fn put(&self, item: &T) -> Result; - - /* - /// Check the amount of pending and stuck items in the job queue. - async fn len(&self) -> Result<(usize, usize), Self::Error>; - /// Returns whether the job queue has some pending items. - async fn is_empty(&self) -> Result { - Ok(self.len().await?.0 == 0) - } - /// Returns whether the job queue has some stuck items that - /// require manual cleanup. - async fn has_stuck(&self) -> Result { - Ok(self.len().await?.1 > 0) - } - */ - - /// Consume the job queue object and return a stream of unstuck - /// items from the job queue. - /// - /// Note that one item may be returned several times if it is not - /// marked as done. - async fn into_stream(self) -> Result> + Send>>, Self::Error>; -} - -#[async_trait::async_trait] -/// A job description yielded from a job queue. -/// -/// # Implementors -/// -/// On [`Drop`], the job should be returned to a job queue. If your -/// job queue tracks attempts, the counter should be incremented by -/// one. -/// -/// Figuring how to do this asynchronously from a synchronous trait -/// is left as an exercise to the reader. -pub trait Job>: Send + Sync + Sized { - /// Get the object describing the task itself. - fn job(&self) -> &T; - /// Mark the job as done and remove it from the job queue. - async fn done(self) -> Result<(), Q::Error>; -} - -/// An object describing the job itself, returned as part of a -/// [`Job`]. -pub trait JobItem: Send + Sync + Sized + std::fmt::Debug {} diff --git a/kittybox.nix b/kittybox.nix index 22e774c..1d66fc7 100644 --- a/kittybox.nix +++ b/kittybox.nix @@ -13,7 +13,7 @@ naersk.buildPackage rec { pname = "kittybox"; version = "0.1.0"; - src = lib.sources.sourceFilesBySuffices ./kittybox-rs [".rs" ".toml" ".sql" ".ts" ".css" ".html" ".lock" ".json"]; + src = lib.sources.sourceFilesBySuffices ./. [".rs" ".toml" ".sql" ".ts" ".css" ".html" ".lock" ".json"]; doCheck = stdenv.hostPlatform == stdenv.targetPlatform; cargoBuildOptions = x: x ++ [ diff --git a/migrations/0001_init.sql b/migrations/0001_init.sql new file mode 100644 index 0000000..c9915eb --- /dev/null +++ b/migrations/0001_init.sql @@ -0,0 +1,62 @@ +CREATE SCHEMA IF NOT EXISTS kittybox; + +CREATE TABLE kittybox.users ( + user_domain TEXT NOT NULL PRIMARY KEY, + site_name JSONB NOT NULL DEFAULT '"Kittybox"'::jsonb, + webring JSONB NOT NULL DEFAULT 'false'::jsonb +); + +CREATE TABLE kittybox.mf2_json ( + uid TEXT NOT NULL PRIMARY KEY, + mf2 JSONB NOT NULL, + owner TEXT NOT NULL -- REFERENCES kittybox.users(user_domain) +); + +CREATE INDEX mf2props ON kittybox.mf2_json USING GIN (mf2); +CREATE INDEX published_date ON kittybox.mf2_json ((mf2 #>> '{properties,published,0}')); + +CREATE TABLE kittybox.children ( + parent TEXT NOT NULL REFERENCES kittybox.mf2_json(uid) ON DELETE CASCADE, + child TEXT NOT NULL REFERENCES kittybox.mf2_json(uid) ON DELETE CASCADE, + UNIQUE(parent, child) +); + +CREATE INDEX fulltext ON kittybox.mf2_json USING GIN ( + to_tsvector('english', mf2['properties']['content']) +); + +CREATE FUNCTION kittybox.set_setting(user_domain text, setting text, val anyelement) RETURNS void AS $$ +BEGIN +EXECUTE format('INSERT INTO kittybox.users (user_domain, %I) VALUES ($1, $2) ON CONFLICT (user_domain) DO UPDATE SET %I = $2', setting, setting) + USING user_domain, val; + RETURN; +END; +$$ LANGUAGE plpgsql; + +CREATE FUNCTION kittybox.get_setting(user_domain text, setting text) RETURNS jsonb AS $$ +DECLARE + val jsonb; +BEGIN +EXECUTE format('SELECT %I FROM kittybox.users WHERE user_domain = $1', setting) USING user_domain INTO val; + +RETURN val; +END; +$$ LANGUAGE plpgsql; + +CREATE FUNCTION kittybox.hydrate_author(mf2 jsonb) RETURNS jsonb AS $$ +DECLARE + author jsonb; + author_uid text; +BEGIN + +author_uid := mf2 #>> '{properties,author,0}'; +IF NOT (author_uid IS NULL) THEN + SELECT mf2_json.mf2 INTO author FROM kittybox.mf2_json WHERE uid = author_uid; +END IF; +IF NOT FOUND THEN + RETURN mf2; +ELSE + RETURN jsonb_set(mf2, '{properties,author,0}', author); +END IF; +END; +$$ LANGUAGE plpgsql; diff --git a/migrations/webmention/0001_init.sql b/migrations/webmention/0001_init.sql new file mode 100644 index 0000000..9e7a192 --- /dev/null +++ b/migrations/webmention/0001_init.sql @@ -0,0 +1,15 @@ +CREATE SCHEMA IF NOT EXISTS kittybox_webmention; + +CREATE TABLE kittybox_webmention.incoming_webmention_queue ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + source TEXT NOT NULL, + target TEXT NOT NULL, + recv_timestamp TIMESTAMPTZ NOT NULL DEFAULT now(), + attempts INTEGER NOT NULL DEFAULT 0 +); + +CREATE INDEX webmention_jobs_by_attempts ON kittybox_webmention.incoming_webmention_queue (attempts); + +CREATE RULE notify_incoming_webmention AS +ON INSERT TO kittybox_webmention.incoming_webmention_queue +DO ALSO NOTIFY incoming_webmention; diff --git a/src/bin/kittybox-check-webmention.rs b/src/bin/kittybox-check-webmention.rs new file mode 100644 index 0000000..f02032c --- /dev/null +++ b/src/bin/kittybox-check-webmention.rs @@ -0,0 +1,152 @@ +use std::cell::{RefCell, Ref}; +use std::rc::Rc; + +use clap::Parser; +use microformats::types::PropertyValue; +use microformats::html5ever; +use microformats::html5ever::tendril::TendrilSink; + +#[derive(thiserror::Error, Debug)] +enum Error { + #[error("http request error: {0}")] + Http(#[from] reqwest::Error), + #[error("microformats error: {0}")] + Microformats(#[from] microformats::Error), + #[error("json error: {0}")] + Json(#[from] serde_json::Error), + #[error("url parse error: {0}")] + UrlParse(#[from] url::ParseError), +} + +use kittybox_util::MentionType; + +fn check_mention(document: impl AsRef, base_url: &url::Url, link: &url::Url) -> Result, Error> { + // First, check the document for MF2 markup + let document = microformats::from_html(document.as_ref(), base_url.clone())?; + + // Get an iterator of all items + let items_iter = document.items.iter() + .map(AsRef::as_ref) + .map(RefCell::borrow); + + for item in items_iter { + let props = item.properties.borrow(); + for (prop, interaction_type) in [ + ("in-reply-to", MentionType::Reply), ("like-of", MentionType::Like), + ("bookmark-of", MentionType::Bookmark), ("repost-of", MentionType::Repost) + ] { + if let Some(propvals) = props.get(prop) { + for val in propvals { + if let PropertyValue::Url(url) = val { + if url == link { + return Ok(Some(interaction_type)) + } + } + } + } + } + // Process `content` + if let Some(PropertyValue::Fragment(content)) = props.get("content") + .map(Vec::as_slice) + .unwrap_or_default() + .first() + { + let root = html5ever::parse_document(html5ever::rcdom::RcDom::default(), Default::default()) + .from_utf8() + .one(content.html.to_owned().as_bytes()) + .document; + + // This is a trick to unwrap recursion into a loop + // + // A list of unprocessed node is made. Then, in each + // iteration, the list is "taken" and replaced with an + // empty list, which is populated with nodes for the next + // iteration of the loop. + // + // Empty list means all nodes were processed. + let mut unprocessed_nodes: Vec> = root.children.borrow().iter().cloned().collect(); + while unprocessed_nodes.len() > 0 { + // "Take" the list out of its memory slot, replace it with an empty list + let nodes = std::mem::take(&mut unprocessed_nodes); + 'nodes_loop: for node in nodes.into_iter() { + // Add children nodes to the list for the next iteration + unprocessed_nodes.extend(node.children.borrow().iter().cloned()); + + if let html5ever::rcdom::NodeData::Element { ref name, ref attrs, .. } = node.data { + // If it's not `
`, skip it + if name.local != *"a" { continue; } + let mut is_mention: bool = false; + for attr in attrs.borrow().iter() { + if attr.name.local == *"rel" { + // Don't count `rel="nofollow"` links β€” a web crawler should ignore them + // and so for purposes of driving visitors they are useless + if attr.value + .as_ref() + .split([',', ' ']) + .any(|v| v == "nofollow") + { + // Skip the entire node. + continue 'nodes_loop; + } + } + // if it's not ``, skip it + if attr.name.local != *"href" { continue; } + // Be forgiving in parsing URLs, and resolve them against the base URL + if let Ok(url) = base_url.join(attr.value.as_ref()) { + if &url == link { + is_mention = true; + } + } + } + if is_mention { + return Ok(Some(MentionType::Mention)); + } + } + } + } + + } + } + + Ok(None) +} + +#[derive(Parser, Debug)] +#[clap( + name = "kittybox-check-webmention", + author = "Vika ", + version = env!("CARGO_PKG_VERSION"), + about = "Verify an incoming webmention" +)] +struct Args { + #[clap(value_parser)] + url: url::Url, + #[clap(value_parser)] + link: url::Url +} + +#[tokio::main] +async fn main() -> Result<(), self::Error> { + let args = Args::parse(); + + let http: reqwest::Client = { + #[allow(unused_mut)] + let mut builder = reqwest::Client::builder() + .user_agent(concat!( + env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION") + )); + + builder.build().unwrap() + }; + + let response = http.get(args.url.clone()).send().await?; + let text = response.text().await?; + + if let Some(mention_type) = check_mention(text, &args.url, &args.link)? { + println!("{:?}", mention_type); + + Ok(()) + } else { + std::process::exit(1) + } +} diff --git a/src/bin/kittybox-indieauth-helper.rs b/src/bin/kittybox-indieauth-helper.rs new file mode 100644 index 0000000..3377ec3 --- /dev/null +++ b/src/bin/kittybox-indieauth-helper.rs @@ -0,0 +1,233 @@ +use kittybox_indieauth::{ + AuthorizationRequest, PKCEVerifier, + PKCEChallenge, PKCEMethod, GrantRequest, Scope, + AuthorizationResponse, TokenData, GrantResponse +}; +use clap::Parser; +use std::{borrow::Cow, io::Write}; + +const DEFAULT_CLIENT_ID: &str = "https://kittybox.fireburn.ru/indieauth-helper.html"; +const DEFAULT_REDIRECT_URI: &str = "http://localhost:60000/callback"; + +#[derive(Debug, thiserror::Error)] +enum Error { + #[error("i/o error: {0}")] + IO(#[from] std::io::Error), + #[error("http request error: {0}")] + HTTP(#[from] reqwest::Error), + #[error("urlencoded encoding error: {0}")] + UrlencodedEncoding(#[from] serde_urlencoded::ser::Error), + #[error("url parsing error: {0}")] + UrlParse(#[from] url::ParseError), + #[error("indieauth flow error: {0}")] + IndieAuth(Cow<'static, str>) +} + +#[derive(Parser, Debug)] +#[clap( + name = "kittybox-indieauth-helper", + author = "Vika ", + version = env!("CARGO_PKG_VERSION"), + about = "Retrieve an IndieAuth token for debugging", + long_about = None +)] +struct Args { + /// Profile URL to use for initiating IndieAuth metadata discovery. + #[clap(value_parser)] + me: url::Url, + /// Scopes to request for the token. + /// + /// All IndieAuth scopes are supported, including arbitrary custom scopes. + #[clap(short, long)] + scope: Vec, + /// Client ID to use when requesting a token. + #[clap(short, long, value_parser, default_value = DEFAULT_CLIENT_ID)] + client_id: url::Url, + /// Redirect URI to declare. Note: This will break the flow, use only for testing UI. + #[clap(long, value_parser)] + redirect_uri: Option +} + +fn append_query_string( + url: &url::Url, + query: T +) -> Result { + let mut new_url = url.clone(); + let mut query = serde_urlencoded::to_string(query)?; + if let Some(old_query) = url.query() { + query.push('&'); + query.push_str(old_query); + } + new_url.set_query(Some(&query)); + + Ok(new_url) +} + +#[tokio::main] +async fn main() -> Result<(), Error> { + let args = Args::parse(); + + let http: reqwest::Client = { + #[allow(unused_mut)] + let mut builder = reqwest::Client::builder() + .user_agent(concat!( + env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION") + )); + + builder.build().unwrap() + }; + + let redirect_uri: url::Url = args.redirect_uri + .clone() + .unwrap_or_else(|| DEFAULT_REDIRECT_URI.parse().unwrap()); + + eprintln!("Checking .well-known for metadata..."); + let metadata = http.get(args.me.join("/.well-known/oauth-authorization-server")?) + .header("Accept", "application/json") + .send() + .await? + .json::() + .await?; + + let verifier = PKCEVerifier::new(); + + let authorization_request = AuthorizationRequest { + response_type: kittybox_indieauth::ResponseType::Code, + client_id: args.client_id.clone(), + redirect_uri: redirect_uri.clone(), + state: kittybox_indieauth::State::new(), + code_challenge: PKCEChallenge::new(&verifier, PKCEMethod::default()), + scope: Some(kittybox_indieauth::Scopes::new(args.scope)), + me: Some(args.me) + }; + + let indieauth_url = append_query_string( + &metadata.authorization_endpoint, + authorization_request + )?; + + eprintln!("Please visit the following URL in your browser:\n\n {}\n", indieauth_url.as_str()); + + if args.redirect_uri.is_some() { + eprintln!("Custom redirect URI specified, won't be able to catch authorization response."); + std::process::exit(0); + } + + // Prepare a callback + let (tx, rx) = tokio::sync::oneshot::channel::(); + let server = { + use axum::{routing::get, extract::Query, response::IntoResponse}; + + let tx = std::sync::Arc::new(tokio::sync::Mutex::new(Some(tx))); + + let router = axum::Router::new() + .route("/callback", axum::routing::get( + move |query: Option>| async move { + if let Some(Query(response)) = query { + if let Some(tx) = tx.lock_owned().await.take() { + tx.send(response).unwrap(); + + (axum::http::StatusCode::OK, + [("Content-Type", "text/plain")], + "Thank you! This window can now be closed.") + .into_response() + } else { + (axum::http::StatusCode::BAD_REQUEST, + [("Content-Type", "text/plain")], + "Oops. The callback was already received. Did you click twice?") + .into_response() + } + } else { + axum::http::StatusCode::BAD_REQUEST.into_response() + } + } + )); + + use std::net::{SocketAddr, IpAddr, Ipv4Addr}; + + let server = hyper::server::Server::bind( + &SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST),60000) + ) + .serve(router.into_make_service()); + + tokio::task::spawn(server) + }; + + let authorization_response = rx.await.unwrap(); + + // Clean up after the server + tokio::task::spawn(async move { + // Wait for the server to settle -- it might need to send its response + tokio::time::sleep(tokio::time::Duration::from_secs(1)).await; + // Abort the future -- this should kill the server + server.abort(); + }); + + eprintln!("Got authorization response: {:#?}", authorization_response); + eprint!("Checking issuer field..."); + std::io::stderr().lock().flush()?; + + if dbg!(authorization_response.iss.as_str()) == dbg!(metadata.issuer.as_str()) { + eprintln!(" Done"); + } else { + eprintln!(" Failed"); + #[cfg(not(debug_assertions))] + std::process::exit(1); + } + let grant_response: GrantResponse = http.post(metadata.token_endpoint) + .form(&GrantRequest::AuthorizationCode { + code: authorization_response.code, + client_id: args.client_id, + redirect_uri, + code_verifier: verifier + }) + .header("Accept", "application/json") + .send() + .await? + .json() + .await?; + + if let GrantResponse::AccessToken { + me, + profile, + access_token, + expires_in, + refresh_token, + token_type, + scope + } = grant_response { + eprintln!("Congratulations, {}, access token is ready! {}", + me.as_str(), + if let Some(exp) = expires_in { + format!("It expires in {exp} seconds.") + } else { + format!("It seems to have unlimited duration.") + } + ); + println!("{}", access_token); + if let Some(refresh_token) = refresh_token { + eprintln!("Save this refresh token, it will come in handy:"); + println!("{}", refresh_token); + }; + + if let Some(profile) = profile { + eprintln!("\nThe token endpoint returned some profile information:"); + if let Some(name) = profile.name { + eprintln!(" - Name: {name}") + } + if let Some(url) = profile.url { + eprintln!(" - URL: {url}") + } + if let Some(photo) = profile.photo { + eprintln!(" - Photo: {photo}") + } + if let Some(email) = profile.email { + eprintln!(" - Email: {email}") + } + } + + Ok(()) + } else { + return Err(Error::IndieAuth(Cow::Borrowed("IndieAuth token endpoint did not return an access token grant."))); + } +} diff --git a/src/bin/kittybox-mf2.rs b/src/bin/kittybox-mf2.rs new file mode 100644 index 0000000..4366cb8 --- /dev/null +++ b/src/bin/kittybox-mf2.rs @@ -0,0 +1,49 @@ +use clap::Parser; + +#[derive(Parser, Debug)] +#[clap( + name = "kittybox-mf2", + author = "Vika ", + version = env!("CARGO_PKG_VERSION"), + about = "Fetch HTML and turn it into MF2-JSON" +)] +struct Args { + #[clap(value_parser)] + url: url::Url, +} + +#[derive(thiserror::Error, Debug)] +enum Error { + #[error("http request error: {0}")] + Http(#[from] reqwest::Error), + #[error("microformats error: {0}")] + Microformats(#[from] microformats::Error), + #[error("json error: {0}")] + Json(#[from] serde_json::Error), + #[error("url parse error: {0}")] + UrlParse(#[from] url::ParseError), +} + +#[tokio::main] +async fn main() -> Result<(), Error> { + let args = Args::parse(); + + let http: reqwest::Client = { + #[allow(unused_mut)] + let mut builder = reqwest::Client::builder() + .user_agent(concat!( + env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION") + )); + + builder.build().unwrap() + }; + + let response = http.get(args.url.clone()).send().await?; + let text = response.text().await?; + + let mf2 = microformats::from_html(text.as_ref(), args.url)?; + + println!("{}", serde_json::to_string_pretty(&mf2)?); + + Ok(()) +} diff --git a/src/bin/kittybox_bulk_import.rs b/src/bin/kittybox_bulk_import.rs new file mode 100644 index 0000000..7e1f6af --- /dev/null +++ b/src/bin/kittybox_bulk_import.rs @@ -0,0 +1,66 @@ +use anyhow::{anyhow, bail, Context, Result}; +use std::fs::File; +use std::io; + +#[async_std::main] +async fn main() -> Result<()> { + let args = std::env::args().collect::>(); + if args.iter().skip(1).any(|s| s == "--help") { + println!("Usage: {} [file]", args[0]); + println!("\nIf launched with no arguments, reads from stdin."); + println!( + "\nUse KITTYBOX_AUTH_TOKEN environment variable to authorize to the Micropub endpoint." + ); + std::process::exit(0); + } + + let token = std::env::var("KITTYBOX_AUTH_TOKEN") + .map_err(|_| anyhow!("No auth token found! Use KITTYBOX_AUTH_TOKEN env variable."))?; + let data: Vec = (if args.len() == 2 || (args.len() == 3 && args[2] == "-") { + serde_json::from_reader(io::stdin()) + } else if args.len() == 3 { + serde_json::from_reader(File::open(&args[2]).with_context(|| "Error opening input file")?) + } else { + bail!("See `{} --help` for usage.", args[0]); + }) + .with_context(|| "Error while loading the input file")?; + + let url = surf::Url::parse(&args[1])?; + let client = surf::Client::new(); + + let iter = data.into_iter(); + + for post in iter { + println!( + "Processing {}...", + post["properties"]["url"][0] + .as_str() + .or_else(|| post["properties"]["published"][0] + .as_str() + .or_else(|| post["properties"]["name"][0] + .as_str() + .or(Some("")))) + .unwrap() + ); + match client + .post(&url) + .body(surf::http::Body::from_string(serde_json::to_string(&post)?)) + .header("Content-Type", "application/json") + .header("Authorization", format!("Bearer {}", &token)) + .send() + .await + { + Ok(mut response) => { + if response.status() == 201 || response.status() == 202 { + println!("Posted at {}", response.header("location").unwrap().last()); + } else { + println!("Error: {:?}", response.body_string().await); + } + } + Err(err) => { + println!("{}", err); + } + } + } + Ok(()) +} diff --git a/src/bin/kittybox_database_converter.rs b/src/bin/kittybox_database_converter.rs new file mode 100644 index 0000000..bc355c9 --- /dev/null +++ b/src/bin/kittybox_database_converter.rs @@ -0,0 +1,106 @@ +use anyhow::{anyhow, Context}; +use kittybox::database::FileStorage; +use kittybox::database::Storage; +use redis::{self, AsyncCommands}; +use std::collections::HashMap; + +/// Convert from a Redis storage to a new storage new_storage. +async fn convert_from_redis(from: String, new_storage: S) -> anyhow::Result<()> { + let db = redis::Client::open(from).context("Failed to open the Redis connection")?; + + let mut conn = db + .get_async_std_connection() + .await + .context("Failed to connect to Redis")?; + + // Rebinding to convince the borrow checker we're not smuggling stuff outta scope + let storage = &new_storage; + + let mut stream = conn.hscan::<_, String>("posts").await?; + + while let Some(key) = stream.next_item().await { + let value = serde_json::from_str::( + &stream + .next_item() + .await + .ok_or(anyhow!("Failed to find a corresponding value for the key"))?, + )?; + + println!("{}, {:?}", key, value); + + if value["see_other"].is_string() { + continue; + } + + let user = &(url::Url::parse(value["properties"]["uid"][0].as_str().unwrap()) + .unwrap() + .origin() + .ascii_serialization() + .clone() + + "/"); + if let Err(err) = storage.clone().put_post(&value, user).await { + eprintln!("Error saving post: {}", err); + } + } + + let mut stream: redis::AsyncIter = conn.scan_match("settings_*").await?; + while let Some(key) = stream.next_item().await { + let mut conn = db + .get_async_std_connection() + .await + .context("Failed to connect to Redis")?; + let user = key.strip_prefix("settings_").unwrap(); + match conn + .hgetall::<&str, HashMap>(&key) + .await + .context(format!("Failed getting settings from key {}", key)) + { + Ok(settings) => { + for (k, v) in settings.iter() { + if let Err(e) = storage + .set_setting(k, user, v) + .await + .with_context(|| format!("Failed setting {} for {}", k, user)) + { + eprintln!("{}", e); + } + } + } + Err(e) => { + eprintln!("{}", e); + } + } + } + + Ok(()) +} + +#[async_std::main] +async fn main() -> anyhow::Result<()> { + let mut args = std::env::args(); + args.next(); // skip argv[0] + let old_uri = args + .next() + .ok_or_else(|| anyhow!("No import source is provided."))?; + let new_uri = args + .next() + .ok_or_else(|| anyhow!("No import destination is provided."))?; + + let storage = if new_uri.starts_with("file:") { + let folder = new_uri.strip_prefix("file://").unwrap(); + let path = std::path::PathBuf::from(folder); + Box::new( + FileStorage::new(path) + .await + .context("Failed to construct the file storage")?, + ) + } else { + anyhow::bail!("Cannot construct the storage abstraction for destination storage. Check the storage type?"); + }; + + if old_uri.starts_with("redis") { + convert_from_redis(old_uri, *storage).await? + } + + Ok(()) +} diff --git a/src/database/file/mod.rs b/src/database/file/mod.rs new file mode 100644 index 0000000..27d3da1 --- /dev/null +++ b/src/database/file/mod.rs @@ -0,0 +1,733 @@ +//#![warn(clippy::unwrap_used)] +use crate::database::{ErrorKind, Result, settings, Storage, StorageError}; +use crate::micropub::{MicropubUpdate, MicropubPropertyDeletion}; +use async_trait::async_trait; +use futures::{stream, StreamExt, TryStreamExt}; +use kittybox_util::MentionType; +use serde_json::json; +use std::borrow::Cow; +use std::collections::HashMap; +use std::io::ErrorKind as IOErrorKind; +use std::path::{Path, PathBuf}; +use tokio::fs::{File, OpenOptions}; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; +use tokio::task::spawn_blocking; +use tracing::{debug, error}; + +impl From for StorageError { + fn from(source: std::io::Error) -> Self { + Self::with_source( + match source.kind() { + IOErrorKind::NotFound => ErrorKind::NotFound, + IOErrorKind::AlreadyExists => ErrorKind::Conflict, + _ => ErrorKind::Backend, + }, + Cow::Owned(format!("file I/O error: {}", &source)), + Box::new(source), + ) + } +} + +impl From for StorageError { + fn from(source: tokio::time::error::Elapsed) -> Self { + Self::with_source( + ErrorKind::Backend, + Cow::Borrowed("timeout on I/O operation"), + Box::new(source), + ) + } +} + +// Copied from https://stackoverflow.com/questions/39340924 +// This routine is adapted from the *old* Path's `path_relative_from` +// function, which works differently from the new `relative_from` function. +// In particular, this handles the case on unix where both paths are +// absolute but with only the root as the common directory. +fn path_relative_from(path: &Path, base: &Path) -> Option { + use std::path::Component; + + if path.is_absolute() != base.is_absolute() { + if path.is_absolute() { + Some(PathBuf::from(path)) + } else { + None + } + } else { + let mut ita = path.components(); + let mut itb = base.components(); + let mut comps: Vec = vec![]; + loop { + match (ita.next(), itb.next()) { + (None, None) => break, + (Some(a), None) => { + comps.push(a); + comps.extend(ita.by_ref()); + break; + } + (None, _) => comps.push(Component::ParentDir), + (Some(a), Some(b)) if comps.is_empty() && a == b => (), + (Some(a), Some(b)) if b == Component::CurDir => comps.push(a), + (Some(_), Some(b)) if b == Component::ParentDir => return None, + (Some(a), Some(_)) => { + comps.push(Component::ParentDir); + for _ in itb { + comps.push(Component::ParentDir); + } + comps.push(a); + comps.extend(ita.by_ref()); + break; + } + } + } + Some(comps.iter().map(|c| c.as_os_str()).collect()) + } +} + +#[allow(clippy::unwrap_used, clippy::expect_used)] +#[cfg(test)] +mod tests { + #[test] + fn test_relative_path_resolving() { + let path1 = std::path::Path::new("/home/vika/Projects/kittybox"); + let path2 = std::path::Path::new("/home/vika/Projects/nixpkgs"); + let relative_path = super::path_relative_from(path2, path1).unwrap(); + + assert_eq!(relative_path, std::path::Path::new("../nixpkgs")) + } +} + +// TODO: Check that the path ACTUALLY IS INSIDE THE ROOT FOLDER +// This could be checked by completely resolving the path +// and checking if it has a common prefix +fn url_to_path(root: &Path, url: &str) -> PathBuf { + let path = url_to_relative_path(url).to_logical_path(root); + if !path.starts_with(root) { + // TODO: handle more gracefully + panic!("Security error: {:?} is not a prefix of {:?}", path, root) + } else { + path + } +} + +fn url_to_relative_path(url: &str) -> relative_path::RelativePathBuf { + let url = url::Url::try_from(url).expect("Couldn't parse a URL"); + let mut path = relative_path::RelativePathBuf::new(); + let user_domain = format!( + "{}{}", + url.host_str().unwrap(), + url.port() + .map(|port| format!(":{}", port)) + .unwrap_or_default() + ); + path.push(user_domain + url.path() + ".json"); + + path +} + +fn modify_post(post: &serde_json::Value, update: MicropubUpdate) -> Result { + let mut post = post.clone(); + + let mut add_keys: HashMap> = HashMap::new(); + let mut remove_keys: Vec = vec![]; + let mut remove_values: HashMap> = HashMap::new(); + + if let Some(MicropubPropertyDeletion::Properties(delete)) = update.delete { + remove_keys.extend(delete.iter().cloned()); + } else if let Some(MicropubPropertyDeletion::Values(delete)) = update.delete { + for (k, v) in delete { + remove_values + .entry(k.to_string()) + .or_default() + .extend(v.clone()); + } + } + if let Some(add) = update.add { + for (k, v) in add { + add_keys.insert(k.to_string(), v.clone()); + } + } + if let Some(replace) = update.replace { + for (k, v) in replace { + remove_keys.push(k.to_string()); + add_keys.insert(k.to_string(), v.clone()); + } + } + + if let Some(props) = post["properties"].as_object_mut() { + for k in remove_keys { + props.remove(&k); + } + } + for (k, v) in remove_values { + let k = &k; + let props = if k == "children" { + &mut post + } else { + &mut post["properties"] + }; + v.iter().for_each(|v| { + if let Some(vec) = props[k].as_array_mut() { + if let Some(index) = vec.iter().position(|w| w == v) { + vec.remove(index); + } + } + }); + } + for (k, v) in add_keys { + tracing::debug!("Adding k/v to post: {} => {:?}", k, v); + let props = if k == "children" { + &mut post + } else { + &mut post["properties"] + }; + if let Some(prop) = props[&k].as_array_mut() { + if k == "children" { + v.into_iter().rev().for_each(|v| prop.insert(0, v)); + } else { + prop.extend(v.into_iter()); + } + } else { + props[&k] = serde_json::Value::Array(v) + } + } + Ok(post) +} + +#[derive(Clone, Debug)] +/// A backend using a folder with JSON files as a backing store. +/// Uses symbolic links to represent a many-to-one mapping of URLs to a post. +pub struct FileStorage { + root_dir: PathBuf, +} + +impl FileStorage { + /// Create a new storage wrapping a folder specified by root_dir. + pub async fn new(root_dir: PathBuf) -> Result { + // TODO check if the dir is writable + Ok(Self { root_dir }) + } +} + +async fn hydrate_author( + feed: &mut serde_json::Value, + user: &'_ Option, + storage: &S, +) { + let url = feed["properties"]["uid"][0] + .as_str() + .expect("MF2 value should have a UID set! Check if you used normalize_mf2 before recording the post!"); + if let Some(author) = feed["properties"]["author"].as_array().cloned() { + if !feed["type"] + .as_array() + .expect("MF2 value should have a type set!") + .iter() + .any(|i| i == "h-card") + { + let author_list: Vec = stream::iter(author.iter()) + .then(|i| async move { + if let Some(i) = i.as_str() { + match storage.get_post(i).await { + Ok(post) => match post { + Some(post) => post, + None => json!(i), + }, + Err(e) => { + error!("Error while hydrating post {}: {}", url, e); + json!(i) + } + } + } else { + i.clone() + } + }) + .collect::>() + .await; + if let Some(props) = feed["properties"].as_object_mut() { + props["author"] = json!(author_list); + } else { + feed["properties"] = json!({ "author": author_list }); + } + } + } +} + +#[async_trait] +impl Storage for FileStorage { + #[tracing::instrument(skip(self))] + async fn post_exists(&self, url: &str) -> Result { + let path = url_to_path(&self.root_dir, url); + debug!("Checking if {:?} exists...", path); + /*let result = match tokio::fs::metadata(path).await { + Ok(metadata) => { + Ok(true) + }, + Err(err) => { + if err.kind() == IOErrorKind::NotFound { + Ok(false) + } else { + Err(err.into()) + } + } + };*/ + #[allow(clippy::unwrap_used)] // JoinHandle captures panics, this closure shouldn't panic + Ok(spawn_blocking(move || path.is_file()).await.unwrap()) + } + + #[tracing::instrument(skip(self))] + async fn get_post(&self, url: &str) -> Result> { + let path = url_to_path(&self.root_dir, url); + // TODO: check that the path actually belongs to the dir of user who requested it + // it's not like you CAN access someone else's private posts with it + // so it's not exactly a security issue, but it's still not good + debug!("Opening {:?}", path); + + match File::open(&path).await { + Ok(mut file) => { + let mut content = String::new(); + // Typechecks because OS magic acts on references + // to FDs as if they were behind a mutex + AsyncReadExt::read_to_string(&mut file, &mut content).await?; + debug!( + "Read {} bytes successfully from {:?}", + content.as_bytes().len(), + &path + ); + Ok(Some(serde_json::from_str(&content)?)) + } + Err(err) => { + if err.kind() == IOErrorKind::NotFound { + Ok(None) + } else { + Err(err.into()) + } + } + } + } + + #[tracing::instrument(skip(self))] + async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()> { + let key = post["properties"]["uid"][0] + .as_str() + .expect("Tried to save a post without UID"); + let path = url_to_path(&self.root_dir, key); + let tempfile = (&path).with_extension("tmp"); + debug!("Creating {:?}", path); + + let parent = path + .parent() + .expect("Parent for this directory should always exist") + .to_owned(); + tokio::fs::create_dir_all(&parent).await?; + + let mut file = tokio::fs::OpenOptions::new() + .write(true) + .create_new(true) + .open(&tempfile) + .await?; + + file.write_all(post.to_string().as_bytes()).await?; + file.flush().await?; + file.sync_all().await?; + drop(file); + tokio::fs::rename(&tempfile, &path).await?; + tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; + + if let Some(urls) = post["properties"]["url"].as_array() { + for url in urls.iter().map(|i| i.as_str().unwrap()) { + let url_domain = { + let url = url::Url::parse(url).unwrap(); + format!( + "{}{}", + url.host_str().unwrap(), + url.port() + .map(|port| format!(":{}", port)) + .unwrap_or_default() + ) + }; + if url != key && url_domain == user { + let link = url_to_path(&self.root_dir, url); + debug!("Creating a symlink at {:?}", link); + let orig = path.clone(); + // We're supposed to have a parent here. + let basedir = link.parent().ok_or_else(|| { + StorageError::from_static( + ErrorKind::Backend, + "Failed to calculate parent directory when creating a symlink", + ) + })?; + let relative = path_relative_from(&orig, basedir).unwrap(); + println!("{:?} - {:?} = {:?}", &orig, &basedir, &relative); + tokio::fs::symlink(relative, link).await?; + } + } + } + + if post["type"] + .as_array() + .unwrap() + .iter() + .any(|s| s.as_str() == Some("h-feed")) + { + tracing::debug!("Adding to channel list..."); + // Add the h-feed to the channel list + let path = { + let mut path = relative_path::RelativePathBuf::new(); + path.push(user); + path.push("channels"); + + path.to_path(&self.root_dir) + }; + tokio::fs::create_dir_all(path.parent().unwrap()).await?; + tracing::debug!("Channels file path: {}", path.display()); + let tempfilename = path.with_extension("tmp"); + let channel_name = post["properties"]["name"][0] + .as_str() + .map(|s| s.to_string()) + .unwrap_or_else(String::default); + let key = key.to_string(); + tracing::debug!("Opening temporary file to modify chnanels..."); + let mut tempfile = OpenOptions::new() + .write(true) + .create_new(true) + .open(&tempfilename) + .await?; + tracing::debug!("Opening real channel file..."); + let mut channels: Vec = { + match OpenOptions::new() + .read(true) + .write(false) + .truncate(false) + .create(false) + .open(&path) + .await + { + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + Vec::default() + } + Err(err) => { + // Propagate the error upwards + return Err(err.into()); + } + Ok(mut file) => { + let mut content = String::new(); + file.read_to_string(&mut content).await?; + drop(file); + + if !content.is_empty() { + serde_json::from_str(&content)? + } else { + Vec::default() + } + } + } + }; + + channels.push(super::MicropubChannel { + uid: key.to_string(), + name: channel_name, + }); + + tempfile + .write_all(serde_json::to_string(&channels)?.as_bytes()) + .await?; + tempfile.flush().await?; + tempfile.sync_all().await?; + drop(tempfile); + tokio::fs::rename(tempfilename, &path).await?; + tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; + } + Ok(()) + } + + #[tracing::instrument(skip(self))] + async fn update_post(&self, url: &str, update: MicropubUpdate) -> Result<()> { + let path = url_to_path(&self.root_dir, url); + let tempfilename = path.with_extension("tmp"); + #[allow(unused_variables)] + let (old_json, new_json) = { + let mut temp = OpenOptions::new() + .write(true) + .create_new(true) + .open(&tempfilename) + .await?; + let mut file = OpenOptions::new().read(true).open(&path).await?; + + let mut content = String::new(); + file.read_to_string(&mut content).await?; + let json: serde_json::Value = serde_json::from_str(&content)?; + drop(file); + // Apply the editing algorithms + let new_json = modify_post(&json, update)?; + + temp.write_all(new_json.to_string().as_bytes()).await?; + temp.flush().await?; + temp.sync_all().await?; + drop(temp); + tokio::fs::rename(tempfilename, &path).await?; + tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; + + (json, new_json) + }; + // TODO check if URLs changed between old and new JSON + Ok(()) + } + + #[tracing::instrument(skip(self))] + async fn get_channels(&self, user: &'_ str) -> Result> { + let mut path = relative_path::RelativePathBuf::new(); + path.push(user); + path.push("channels"); + + let path = path.to_path(&self.root_dir); + tracing::debug!("Channels file path: {}", path.display()); + + match File::open(&path).await { + Ok(mut f) => { + let mut content = String::new(); + f.read_to_string(&mut content).await?; + // This should not happen, but if it does, handle it gracefully + if content.is_empty() { + return Ok(vec![]); + } + let channels: Vec = serde_json::from_str(&content)?; + Ok(channels) + } + Err(e) => { + if e.kind() == IOErrorKind::NotFound { + Ok(vec![]) + } else { + Err(e.into()) + } + } + } + } + + async fn read_feed_with_cursor( + &self, + url: &'_ str, + cursor: Option<&'_ str>, + limit: usize, + user: Option<&'_ str> + ) -> Result)>> { + Ok(self.read_feed_with_limit( + url, + &cursor.map(|v| v.to_owned()), + limit, + &user.map(|v| v.to_owned()) + ).await? + .map(|feed| { + tracing::debug!("Feed: {:#}", serde_json::Value::Array( + feed["children"] + .as_array() + .map(|v| v.as_slice()) + .unwrap_or_default() + .iter() + .map(|mf2| mf2["properties"]["uid"][0].clone()) + .collect::>() + )); + let cursor: Option = feed["children"] + .as_array() + .map(|v| v.as_slice()) + .unwrap_or_default() + .last() + .map(|v| v["properties"]["uid"][0].as_str().unwrap().to_owned()); + tracing::debug!("Extracted the cursor: {:?}", cursor); + (feed, cursor) + }) + ) + } + + #[tracing::instrument(skip(self))] + async fn read_feed_with_limit( + &self, + url: &'_ str, + after: &'_ Option, + limit: usize, + user: &'_ Option, + ) -> Result> { + if let Some(mut feed) = self.get_post(url).await? { + if feed["children"].is_array() { + // Take this out of the MF2-JSON document to save memory + // + // This uses a clever match with enum destructuring + // to extract the underlying Vec without cloning it + let children: Vec = match feed["children"].take() { + serde_json::Value::Array(children) => children, + // We've already checked it's an array + _ => unreachable!() + }; + tracing::debug!("Full children array: {:#}", serde_json::Value::Array(children.clone())); + let mut posts_iter = children + .into_iter() + .map(|s: serde_json::Value| s.as_str().unwrap().to_string()); + // Note: we can't actually use `skip_while` here because we end up emitting `after`. + // This imperative snippet consumes after instead of emitting it, allowing the + // stream of posts to return only those items that truly come *after* that one. + // If I would implement an Iter combinator like this, I would call it `skip_until` + if let Some(after) = after { + for s in posts_iter.by_ref() { + if &s == after { + break; + } + } + }; + let posts = stream::iter(posts_iter) + .map(|url: String| async move { self.get_post(&url).await }) + .buffered(std::cmp::min(3, limit)) + // Hack to unwrap the Option and sieve out broken links + // Broken links return None, and Stream::filter_map skips Nones. + .try_filter_map(|post: Option| async move { Ok(post) }) + .and_then(|mut post| async move { + hydrate_author(&mut post, user, self).await; + Ok(post) + }) + .take(limit); + + match posts.try_collect::>().await { + Ok(posts) => feed["children"] = serde_json::json!(posts), + Err(err) => { + return Err(StorageError::with_source( + ErrorKind::Other, + Cow::Owned(format!("Feed assembly error: {}", &err)), + Box::new(err), + )); + } + } + } + hydrate_author(&mut feed, user, self).await; + Ok(Some(feed)) + } else { + Ok(None) + } + } + + #[tracing::instrument(skip(self))] + async fn delete_post(&self, url: &'_ str) -> Result<()> { + let path = url_to_path(&self.root_dir, url); + if let Err(e) = tokio::fs::remove_file(path).await { + Err(e.into()) + } else { + // TODO check for dangling references in the channel list + Ok(()) + } + } + + #[tracing::instrument(skip(self))] + async fn get_setting, 'a>(&self, user: &'_ str) -> Result { + debug!("User for getting settings: {}", user); + let mut path = relative_path::RelativePathBuf::new(); + path.push(user); + path.push("settings"); + + let path = path.to_path(&self.root_dir); + debug!("Getting settings from {:?}", &path); + + let mut file = File::open(path).await?; + let mut content = String::new(); + file.read_to_string(&mut content).await?; + + let settings: HashMap<&str, serde_json::Value> = serde_json::from_str(&content)?; + match settings.get(S::ID) { + Some(value) => Ok(serde_json::from_value::(value.clone())?), + None => Err(StorageError::from_static(ErrorKind::Backend, "Setting not set")) + } + } + + #[tracing::instrument(skip(self))] + async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()> { + let mut path = relative_path::RelativePathBuf::new(); + path.push(user); + path.push("settings"); + + let path = path.to_path(&self.root_dir); + let temppath = path.with_extension("tmp"); + + let parent = path.parent().unwrap().to_owned(); + tokio::fs::create_dir_all(&parent).await?; + + let mut tempfile = OpenOptions::new() + .write(true) + .create_new(true) + .open(&temppath) + .await?; + + let mut settings: HashMap = match File::open(&path).await { + Ok(mut f) => { + let mut content = String::new(); + f.read_to_string(&mut content).await?; + if content.is_empty() { + Default::default() + } else { + serde_json::from_str(&content)? + } + } + Err(err) => { + if err.kind() == IOErrorKind::NotFound { + Default::default() + } else { + return Err(err.into()); + } + } + }; + settings.insert(S::ID.to_owned(), serde_json::to_value(S::new(value))?); + + tempfile + .write_all(serde_json::to_string(&settings)?.as_bytes()) + .await?; + tempfile.flush().await?; + tempfile.sync_all().await?; + drop(tempfile); + tokio::fs::rename(temppath, &path).await?; + tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; + Ok(()) + } + + #[tracing::instrument(skip(self))] + async fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> Result<()> { + let path = url_to_path(&self.root_dir, target); + let tempfilename = path.with_extension("tmp"); + + let mut temp = OpenOptions::new() + .write(true) + .create_new(true) + .open(&tempfilename) + .await?; + let mut file = OpenOptions::new().read(true).open(&path).await?; + + let mut post: serde_json::Value = { + let mut content = String::new(); + file.read_to_string(&mut content).await?; + drop(file); + + serde_json::from_str(&content)? + }; + + let key: &'static str = match mention_type { + MentionType::Reply => "comment", + MentionType::Like => "like", + MentionType::Repost => "repost", + MentionType::Bookmark => "bookmark", + MentionType::Mention => "mention", + }; + let mention_uid = mention["properties"]["uid"][0].clone(); + if let Some(values) = post["properties"][key].as_array_mut() { + for value in values.iter_mut() { + if value["properties"]["uid"][0] == mention_uid { + *value = mention; + break; + } + } + } else { + post["properties"][key] = serde_json::Value::Array(vec![mention]); + } + + temp.write_all(post.to_string().as_bytes()).await?; + temp.flush().await?; + temp.sync_all().await?; + drop(temp); + tokio::fs::rename(tempfilename, &path).await?; + tokio::fs::File::open(path.parent().unwrap()).await?.sync_all().await?; + + Ok(()) + } +} diff --git a/src/database/memory.rs b/src/database/memory.rs new file mode 100644 index 0000000..6339e7a --- /dev/null +++ b/src/database/memory.rs @@ -0,0 +1,249 @@ +#![allow(clippy::todo)] +use async_trait::async_trait; +use futures_util::FutureExt; +use serde_json::json; +use std::collections::HashMap; +use std::sync::Arc; +use tokio::sync::RwLock; + +use crate::database::{ErrorKind, MicropubChannel, Result, settings, Storage, StorageError}; + +#[derive(Clone, Debug)] +pub struct MemoryStorage { + pub mapping: Arc>>, + pub channels: Arc>>>, +} + +#[async_trait] +impl Storage for MemoryStorage { + async fn post_exists(&self, url: &str) -> Result { + return Ok(self.mapping.read().await.contains_key(url)); + } + + async fn get_post(&self, url: &str) -> Result> { + let mapping = self.mapping.read().await; + match mapping.get(url) { + Some(val) => { + if let Some(new_url) = val["see_other"].as_str() { + match mapping.get(new_url) { + Some(val) => Ok(Some(val.clone())), + None => { + drop(mapping); + self.mapping.write().await.remove(url); + Ok(None) + } + } + } else { + Ok(Some(val.clone())) + } + } + _ => Ok(None), + } + } + + async fn put_post(&self, post: &'_ serde_json::Value, _user: &'_ str) -> Result<()> { + let mapping = &mut self.mapping.write().await; + let key: &str = match post["properties"]["uid"][0].as_str() { + Some(uid) => uid, + None => { + return Err(StorageError::from_static( + ErrorKind::Other, + "post doesn't have a UID", + )) + } + }; + mapping.insert(key.to_string(), post.clone()); + if post["properties"]["url"].is_array() { + for url in post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap().to_string()) + { + if url != key { + mapping.insert(url, json!({ "see_other": key })); + } + } + } + if post["type"] + .as_array() + .unwrap() + .iter() + .any(|i| i == "h-feed") + { + // This is a feed. Add it to the channels array if it's not already there. + println!("{:#}", post); + self.channels + .write() + .await + .entry( + post["properties"]["author"][0] + .as_str() + .unwrap() + .to_string(), + ) + .or_insert_with(Vec::new) + .push(key.to_string()) + } + Ok(()) + } + + async fn update_post(&self, url: &'_ str, update: crate::micropub::MicropubUpdate) -> Result<()> { + let mut guard = self.mapping.write().await; + let mut post = guard.get_mut(url).ok_or(StorageError::from_static(ErrorKind::NotFound, "The specified post wasn't found in the database."))?; + + use crate::micropub::MicropubPropertyDeletion; + + let mut add_keys: HashMap> = HashMap::new(); + let mut remove_keys: Vec = vec![]; + let mut remove_values: HashMap> = HashMap::new(); + + if let Some(MicropubPropertyDeletion::Properties(delete)) = update.delete { + remove_keys.extend(delete.iter().cloned()); + } else if let Some(MicropubPropertyDeletion::Values(delete)) = update.delete { + for (k, v) in delete { + remove_values + .entry(k.to_string()) + .or_default() + .extend(v.clone()); + } + } + if let Some(add) = update.add { + for (k, v) in add { + add_keys.insert(k.to_string(), v.clone()); + } + } + if let Some(replace) = update.replace { + for (k, v) in replace { + remove_keys.push(k.to_string()); + add_keys.insert(k.to_string(), v.clone()); + } + } + + if let Some(props) = post["properties"].as_object_mut() { + for k in remove_keys { + props.remove(&k); + } + } + for (k, v) in remove_values { + let k = &k; + let props = if k == "children" { + &mut post + } else { + &mut post["properties"] + }; + v.iter().for_each(|v| { + if let Some(vec) = props[k].as_array_mut() { + if let Some(index) = vec.iter().position(|w| w == v) { + vec.remove(index); + } + } + }); + } + for (k, v) in add_keys { + tracing::debug!("Adding k/v to post: {} => {:?}", k, v); + let props = if k == "children" { + &mut post + } else { + &mut post["properties"] + }; + if let Some(prop) = props[&k].as_array_mut() { + if k == "children" { + v.into_iter().rev().for_each(|v| prop.insert(0, v)); + } else { + prop.extend(v.into_iter()); + } + } else { + props[&k] = serde_json::Value::Array(v) + } + } + + Ok(()) + } + + async fn get_channels(&self, user: &'_ str) -> Result> { + match self.channels.read().await.get(user) { + Some(channels) => Ok(futures_util::future::join_all( + channels + .iter() + .map(|channel| { + self.get_post(channel).map(|result| result.unwrap()).map( + |post: Option| { + post.map(|post| MicropubChannel { + uid: post["properties"]["uid"][0].as_str().unwrap().to_string(), + name: post["properties"]["name"][0] + .as_str() + .unwrap() + .to_string(), + }) + }, + ) + }) + .collect::>(), + ) + .await + .into_iter() + .flatten() + .collect::>()), + None => Ok(vec![]), + } + } + + #[allow(unused_variables)] + async fn read_feed_with_limit( + &self, + url: &'_ str, + after: &'_ Option, + limit: usize, + user: &'_ Option, + ) -> Result> { + todo!() + } + + #[allow(unused_variables)] + async fn read_feed_with_cursor( + &self, + url: &'_ str, + cursor: Option<&'_ str>, + limit: usize, + user: Option<&'_ str> + ) -> Result)>> { + todo!() + } + + async fn delete_post(&self, url: &'_ str) -> Result<()> { + self.mapping.write().await.remove(url); + Ok(()) + } + + #[allow(unused_variables)] + async fn get_setting, 'a>(&'_ self, user: &'_ str) -> Result { + todo!() + } + + #[allow(unused_variables)] + async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()> { + todo!() + } + + #[allow(unused_variables)] + async fn add_or_update_webmention(&self, target: &str, mention_type: kittybox_util::MentionType, mention: serde_json::Value) -> Result<()> { + todo!() + } + +} + +impl Default for MemoryStorage { + fn default() -> Self { + Self::new() + } +} + +impl MemoryStorage { + pub fn new() -> Self { + Self { + mapping: Arc::new(RwLock::new(HashMap::new())), + channels: Arc::new(RwLock::new(HashMap::new())), + } + } +} diff --git a/src/database/mod.rs b/src/database/mod.rs new file mode 100644 index 0000000..b4b70b2 --- /dev/null +++ b/src/database/mod.rs @@ -0,0 +1,793 @@ +#![warn(missing_docs)] +use std::borrow::Cow; + +use async_trait::async_trait; +use kittybox_util::MentionType; + +mod file; +pub use crate::database::file::FileStorage; +use crate::micropub::MicropubUpdate; +#[cfg(feature = "postgres")] +mod postgres; +#[cfg(feature = "postgres")] +pub use postgres::PostgresStorage; + +#[cfg(test)] +mod memory; +#[cfg(test)] +pub use crate::database::memory::MemoryStorage; + +pub use kittybox_util::MicropubChannel; + +use self::settings::Setting; + +/// Enum representing different errors that might occur during the database query. +#[derive(Debug, Clone, Copy)] +pub enum ErrorKind { + /// Backend error (e.g. database connection error) + Backend, + /// Error due to insufficient contextual permissions for the query + PermissionDenied, + /// Error due to the database being unable to parse JSON returned from the backing storage. + /// Usually indicative of someone fiddling with the database manually instead of using proper tools. + JsonParsing, + /// - ErrorKind::NotFound - equivalent to a 404 error. Note, some requests return an Option, + /// in which case None is also equivalent to a 404. + NotFound, + /// The user's query or request to the database was malformed. Used whenever the database processes + /// the user's query directly, such as when editing posts inside of the database (e.g. Redis backend) + BadRequest, + /// the user's query collided with an in-flight request and needs to be retried + Conflict, + /// - ErrorKind::Other - when something so weird happens that it becomes undescribable. + Other, +} + +/// Settings that can be stored in the database. +pub mod settings { + mod private { + pub trait Sealed {} + } + + /// A trait for various settings that should be contained here. + /// + /// **Note**: this trait is sealed to prevent external + /// implementations, as it wouldn't make sense to add new settings + /// that aren't used by Kittybox itself. + pub trait Setting<'de>: private::Sealed + std::fmt::Debug + Default + Clone + serde::Serialize + serde::de::DeserializeOwned + /*From +*/ Send + Sync { + type Data: std::fmt::Debug + Send + Sync; + const ID: &'static str; + + /// Unwrap the setting type, returning owned data contained within. + fn into_inner(self) -> Self::Data; + /// Create a new instance of this type containing certain data. + fn new(data: Self::Data) -> Self; + } + + /// A website's title, shown in the header. + #[derive(Debug, serde::Deserialize, serde::Serialize, Clone, PartialEq, Eq)] + pub struct SiteName(String); + impl Default for SiteName { + fn default() -> Self { + Self("Kittybox".to_string()) + } + } + impl AsRef for SiteName { + fn as_ref(&self) -> &str { + self.0.as_str() + } + } + impl private::Sealed for SiteName {} + impl Setting<'_> for SiteName { + type Data = String; + const ID: &'static str = "site_name"; + + fn into_inner(self) -> String { + self.0 + } + fn new(data: Self::Data) -> Self { + Self(data) + } + } + impl SiteName { + fn from_str(data: &str) -> Self { + Self(data.to_owned()) + } + } + + /// Participation status in the IndieWeb Webring: https://πŸ•ΈπŸ’.ws/dashboard + #[derive(Debug, Default, serde::Deserialize, serde::Serialize, Clone, Copy, PartialEq, Eq)] + pub struct Webring(bool); + impl private::Sealed for Webring {} + impl Setting<'_> for Webring { + type Data = bool; + const ID: &'static str = "webring"; + + fn into_inner(self) -> Self::Data { + self.0 + } + + fn new(data: Self::Data) -> Self { + Self(data) + } + } +} + +/// Error signalled from the database. +#[derive(Debug)] +pub struct StorageError { + msg: std::borrow::Cow<'static, str>, + source: Option>, + kind: ErrorKind, +} + +impl std::error::Error for StorageError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.source + .as_ref() + .map(|e| e.as_ref() as &dyn std::error::Error) + } +} +impl From for StorageError { + fn from(err: serde_json::Error) -> Self { + Self { + msg: std::borrow::Cow::Owned(format!("{}", err)), + source: Some(Box::new(err)), + kind: ErrorKind::JsonParsing, + } + } +} +impl std::fmt::Display for StorageError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}: {}", + match self.kind { + ErrorKind::Backend => "backend error", + ErrorKind::JsonParsing => "JSON parsing error", + ErrorKind::PermissionDenied => "permission denied", + ErrorKind::NotFound => "not found", + ErrorKind::BadRequest => "bad request", + ErrorKind::Conflict => "conflict with an in-flight request or existing data", + ErrorKind::Other => "generic storage layer error", + }, + self.msg + ) + } +} +impl serde::Serialize for StorageError { + fn serialize( + &self, + serializer: S, + ) -> std::result::Result { + serializer.serialize_str(&self.to_string()) + } +} +impl StorageError { + /// Create a new StorageError of an ErrorKind with a message. + pub fn new(kind: ErrorKind, msg: String) -> Self { + Self { + msg: Cow::Owned(msg), + source: None, + kind, + } + } + /// Create a new StorageError of an ErrorKind with a message from + /// a static string. + /// + /// This saves an allocation for a new string and is the preferred + /// way in case the error message doesn't change. + pub fn from_static(kind: ErrorKind, msg: &'static str) -> Self { + Self { + msg: Cow::Borrowed(msg), + source: None, + kind + } + } + /// Create a StorageError using another arbitrary Error as a source. + pub fn with_source( + kind: ErrorKind, + msg: std::borrow::Cow<'static, str>, + source: Box, + ) -> Self { + Self { + msg, + source: Some(source), + kind, + } + } + /// Get the kind of an error. + pub fn kind(&self) -> ErrorKind { + self.kind + } + /// Get the message as a string slice. + pub fn msg(&self) -> &str { + &self.msg + } +} + +/// A special Result type for the Micropub backing storage. +pub type Result = std::result::Result; + +/// A storage backend for the Micropub server. +/// +/// Implementations should note that all methods listed on this trait MUST be fully atomic +/// or lock the database so that write conflicts or reading half-written data should not occur. +#[async_trait] +pub trait Storage: std::fmt::Debug + Clone + Send + Sync { + /// Check if a post exists in the database. + async fn post_exists(&self, url: &str) -> Result; + + /// Load a post from the database in MF2-JSON format, deserialized from JSON. + async fn get_post(&self, url: &str) -> Result>; + + /// Save a post to the database as an MF2-JSON structure. + /// + /// Note that the `post` object MUST have `post["properties"]["uid"][0]` defined. + async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()>; + + /// Add post to feed. Some database implementations might have optimized ways to do this. + #[tracing::instrument(skip(self))] + async fn add_to_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { + tracing::debug!("Inserting {} into {} using `update_post`", post, feed); + self.update_post(feed, serde_json::from_value( + serde_json::json!({"add": {"children": [post]}})).unwrap() + ).await + } + /// Remove post from feed. Some database implementations might have optimized ways to do this. + #[tracing::instrument(skip(self))] + async fn remove_from_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { + tracing::debug!("Removing {} into {} using `update_post`", post, feed); + self.update_post(feed, serde_json::from_value( + serde_json::json!({"delete": {"children": [post]}})).unwrap() + ).await + } + + /// Modify a post using an update object as defined in the + /// Micropub spec. + /// + /// Note to implementors: the update operation MUST be atomic and + /// SHOULD lock the database to prevent two clients overwriting + /// each other's changes or simply corrupting something. Rejecting + /// is allowed in case of concurrent updates if waiting for a lock + /// cannot be done. + async fn update_post(&self, url: &str, update: MicropubUpdate) -> Result<()>; + + /// Get a list of channels available for the user represented by + /// the `user` domain to write to. + async fn get_channels(&self, user: &'_ str) -> Result>; + + /// Fetch a feed at `url` and return an h-feed object containing + /// `limit` posts after a post by url `after`, filtering the content + /// in context of a user specified by `user` (or an anonymous user). + /// + /// This method MUST hydrate the `author` property with an h-card + /// from the database by replacing URLs with corresponding h-cards. + /// + /// When encountering posts which the `user` is not authorized to + /// access, this method MUST elide such posts (as an optimization + /// for the frontend) and not return them, but still return up to + /// `limit` posts (to not reveal the hidden posts' presence). + /// + /// Note for implementors: if you use streams to fetch posts in + /// parallel from the database, preferably make this method use a + /// connection pool to reduce overhead of creating a database + /// connection per post for parallel fetching. + async fn read_feed_with_limit( + &self, + url: &'_ str, + after: &'_ Option, + limit: usize, + user: &'_ Option, + ) -> Result>; + + /// Fetch a feed at `url` and return an h-feed object containing + /// `limit` posts after a `cursor` (filtering the content in + /// context of a user specified by `user`, or an anonymous user), + /// as well as a new cursor to paginate with. + /// + /// This method MUST hydrate the `author` property with an h-card + /// from the database by replacing URLs with corresponding h-cards. + /// + /// When encountering posts which the `user` is not authorized to + /// access, this method MUST elide such posts (as an optimization + /// for the frontend) and not return them, but still return an + /// amount of posts as close to `limit` as possible (to avoid + /// revealing the existence of the hidden post). + /// + /// Note for implementors: if you use streams to fetch posts in + /// parallel from the database, preferably make this method use a + /// connection pool to reduce overhead of creating a database + /// connection per post for parallel fetching. + async fn read_feed_with_cursor( + &self, + url: &'_ str, + cursor: Option<&'_ str>, + limit: usize, + user: Option<&'_ str> + ) -> Result)>>; + + /// Deletes a post from the database irreversibly. Must be idempotent. + async fn delete_post(&self, url: &'_ str) -> Result<()>; + + /// Gets a setting from the setting store and passes the result. + async fn get_setting, 'a>(&'_ self, user: &'_ str) -> Result; + + /// Commits a setting to the setting store. + async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()>; + + /// Add (or update) a webmention on a certian post. + /// + /// The MF2 object describing the webmention content will always + /// be of type `h-cite`, and the `uid` property on the object will + /// always be set. + /// + /// The rationale for this function is as follows: webmentions + /// might be duplicated, and we need to deduplicate them first. As + /// we lack support for transactions and locking posts on the + /// database, the only way is to implement the operation on the + /// database itself. + /// + /// Besides, it may even allow for nice tricks like storing the + /// webmentions separately and rehydrating them on feed reads. + async fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> Result<()>; +} + +#[cfg(test)] +mod tests { + use super::settings; + + use super::{MicropubChannel, Storage}; + use kittybox_util::MentionType; + use serde_json::json; + + async fn test_basic_operations(backend: Backend) { + let post: serde_json::Value = json!({ + "type": ["h-entry"], + "properties": { + "content": ["Test content"], + "author": ["https://fireburn.ru/"], + "uid": ["https://fireburn.ru/posts/hello"], + "url": ["https://fireburn.ru/posts/hello", "https://fireburn.ru/posts/test"] + } + }); + let key = post["properties"]["uid"][0].as_str().unwrap().to_string(); + let alt_url = post["properties"]["url"][1].as_str().unwrap().to_string(); + + // Reading and writing + backend + .put_post(&post, "fireburn.ru") + .await + .unwrap(); + if let Some(returned_post) = backend.get_post(&key).await.unwrap() { + assert!(returned_post.is_object()); + assert_eq!( + returned_post["type"].as_array().unwrap().len(), + post["type"].as_array().unwrap().len() + ); + assert_eq!( + returned_post["type"].as_array().unwrap(), + post["type"].as_array().unwrap() + ); + let props: &serde_json::Map = + post["properties"].as_object().unwrap(); + for key in props.keys() { + assert_eq!( + returned_post["properties"][key].as_array().unwrap(), + post["properties"][key].as_array().unwrap() + ) + } + } else { + panic!("For some reason the backend did not return the post.") + } + // Check the alternative URL - it should return the same post + if let Ok(Some(returned_post)) = backend.get_post(&alt_url).await { + assert!(returned_post.is_object()); + assert_eq!( + returned_post["type"].as_array().unwrap().len(), + post["type"].as_array().unwrap().len() + ); + assert_eq!( + returned_post["type"].as_array().unwrap(), + post["type"].as_array().unwrap() + ); + let props: &serde_json::Map = + post["properties"].as_object().unwrap(); + for key in props.keys() { + assert_eq!( + returned_post["properties"][key].as_array().unwrap(), + post["properties"][key].as_array().unwrap() + ) + } + } else { + panic!("For some reason the backend did not return the post.") + } + } + + /// Note: this is merely a smoke check and is in no way comprehensive. + // TODO updates for feeds must update children using special logic + async fn test_update(backend: Backend) { + let post: serde_json::Value = json!({ + "type": ["h-entry"], + "properties": { + "content": ["Test content"], + "author": ["https://fireburn.ru/"], + "uid": ["https://fireburn.ru/posts/hello"], + "url": ["https://fireburn.ru/posts/hello", "https://fireburn.ru/posts/test"] + } + }); + let key = post["properties"]["uid"][0].as_str().unwrap().to_string(); + + // Reading and writing + backend + .put_post(&post, "fireburn.ru") + .await + .unwrap(); + + backend + .update_post( + &key, + serde_json::from_value(json!({ + "url": &key, + "add": { + "category": ["testing"], + }, + "replace": { + "content": ["Different test content"] + } + })).unwrap(), + ) + .await + .unwrap(); + + match backend.get_post(&key).await { + Ok(Some(returned_post)) => { + assert!(returned_post.is_object()); + assert_eq!( + returned_post["type"].as_array().unwrap().len(), + post["type"].as_array().unwrap().len() + ); + assert_eq!( + returned_post["type"].as_array().unwrap(), + post["type"].as_array().unwrap() + ); + assert_eq!( + returned_post["properties"]["content"][0].as_str().unwrap(), + "Different test content" + ); + assert_eq!( + returned_post["properties"]["category"].as_array().unwrap(), + &vec![json!("testing")] + ); + } + something_else => { + something_else + .expect("Shouldn't error") + .expect("Should have the post"); + } + } + } + + async fn test_get_channel_list(backend: Backend) { + let feed = json!({ + "type": ["h-feed"], + "properties": { + "name": ["Main Page"], + "author": ["https://fireburn.ru/"], + "uid": ["https://fireburn.ru/feeds/main"] + }, + "children": [] + }); + backend + .put_post(&feed, "fireburn.ru") + .await + .unwrap(); + let chans = backend.get_channels("fireburn.ru").await.unwrap(); + assert_eq!(chans.len(), 1); + assert_eq!( + chans[0], + MicropubChannel { + uid: "https://fireburn.ru/feeds/main".to_string(), + name: "Main Page".to_string() + } + ); + } + + async fn test_settings(backend: Backend) { + backend + .set_setting::( + "https://fireburn.ru/", + "Vika's Hideout".to_owned() + ) + .await + .unwrap(); + assert_eq!( + backend + .get_setting::("https://fireburn.ru/") + .await + .unwrap() + .as_ref(), + "Vika's Hideout" + ); + } + + fn gen_random_post(domain: &str) -> serde_json::Value { + use faker_rand::lorem::{Paragraphs, Word}; + + let uid = format!( + "https://{domain}/posts/{}-{}-{}", + rand::random::(), + rand::random::(), + rand::random::() + ); + + let time = chrono::Local::now().to_rfc3339(); + let post = json!({ + "type": ["h-entry"], + "properties": { + "content": [rand::random::().to_string()], + "uid": [&uid], + "url": [&uid], + "published": [&time] + } + }); + + post + } + + fn gen_random_mention(domain: &str, mention_type: MentionType, url: &str) -> serde_json::Value { + use faker_rand::lorem::{Paragraphs, Word}; + + let uid = format!( + "https://{domain}/posts/{}-{}-{}", + rand::random::(), + rand::random::(), + rand::random::() + ); + + let time = chrono::Local::now().to_rfc3339(); + let post = json!({ + "type": ["h-cite"], + "properties": { + "content": [rand::random::().to_string()], + "uid": [&uid], + "url": [&uid], + "published": [&time], + (match mention_type { + MentionType::Reply => "in-reply-to", + MentionType::Like => "like-of", + MentionType::Repost => "repost-of", + MentionType::Bookmark => "bookmark-of", + MentionType::Mention => unimplemented!(), + }): [url] + } + }); + + post + } + + async fn test_feed_pagination(backend: Backend) { + let posts = { + let mut posts = std::iter::from_fn( + || Some(gen_random_post("fireburn.ru")) + ) + .take(40) + .collect::>(); + + // Reverse the array so it's in reverse-chronological order + posts.reverse(); + + posts + }; + + let feed = json!({ + "type": ["h-feed"], + "properties": { + "name": ["Main Page"], + "author": ["https://fireburn.ru/"], + "uid": ["https://fireburn.ru/feeds/main"] + }, + }); + let key = feed["properties"]["uid"][0].as_str().unwrap(); + + backend + .put_post(&feed, "fireburn.ru") + .await + .unwrap(); + + for (i, post) in posts.iter().rev().enumerate() { + backend + .put_post(post, "fireburn.ru") + .await + .unwrap(); + backend.add_to_feed(key, post["properties"]["uid"][0].as_str().unwrap()).await.unwrap(); + } + + let limit: usize = 10; + + tracing::debug!("Starting feed reading..."); + let (result, cursor) = backend + .read_feed_with_cursor(key, None, limit, None) + .await + .unwrap() + .unwrap(); + + assert_eq!(result["children"].as_array().unwrap().len(), limit); + assert_eq!( + result["children"] + .as_array() + .unwrap() + .iter() + .map(|post| post["properties"]["uid"][0].as_str().unwrap()) + .collect::>() + [0..10], + posts + .iter() + .map(|post| post["properties"]["uid"][0].as_str().unwrap()) + .collect::>() + [0..10] + ); + + tracing::debug!("Continuing with cursor: {:?}", cursor); + let (result2, cursor2) = backend + .read_feed_with_cursor( + key, + cursor.as_deref(), + limit, + None, + ) + .await + .unwrap() + .unwrap(); + + assert_eq!( + result2["children"].as_array().unwrap()[0..10], + posts[10..20] + ); + + tracing::debug!("Continuing with cursor: {:?}", cursor); + let (result3, cursor3) = backend + .read_feed_with_cursor( + key, + cursor2.as_deref(), + limit, + None, + ) + .await + .unwrap() + .unwrap(); + + assert_eq!( + result3["children"].as_array().unwrap()[0..10], + posts[20..30] + ); + + tracing::debug!("Continuing with cursor: {:?}", cursor); + let (result4, _) = backend + .read_feed_with_cursor( + key, + cursor3.as_deref(), + limit, + None, + ) + .await + .unwrap() + .unwrap(); + + assert_eq!( + result4["children"].as_array().unwrap()[0..10], + posts[30..40] + ); + + // Regression test for #4 + // + // Results for a bogus cursor are undefined, so we aren't + // checking them. But the function at least shouldn't hang. + let nonsense_after = Some("1010101010"); + let _ = tokio::time::timeout(tokio::time::Duration::from_secs(10), async move { + backend + .read_feed_with_cursor(key, nonsense_after, limit, None) + .await + }) + .await + .expect("Operation should not hang: see https://gitlab.com/kittybox/kittybox/-/issues/4"); + } + + async fn test_webmention_addition(db: Backend) { + let post = gen_random_post("fireburn.ru"); + + db.put_post(&post, "fireburn.ru").await.unwrap(); + const TYPE: MentionType = MentionType::Reply; + + let target = post["properties"]["uid"][0].as_str().unwrap(); + let mut reply = gen_random_mention("aaronparecki.com", TYPE, target); + + let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); + assert_eq!(post, read_post); + + db.add_or_update_webmention(target, TYPE, reply.clone()).await.unwrap(); + + let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); + assert_eq!(read_post["properties"]["comment"][0], reply); + + reply["properties"]["content"][0] = json!(rand::random::().to_string()); + + db.add_or_update_webmention(target, TYPE, reply.clone()).await.unwrap(); + let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); + assert_eq!(read_post["properties"]["comment"][0], reply); + } + + async fn test_pretty_permalinks(db: Backend) { + const PERMALINK: &str = "https://fireburn.ru/posts/pretty-permalink"; + + let post = { + let mut post = gen_random_post("fireburn.ru"); + let urls = post["properties"]["url"].as_array_mut().unwrap(); + urls.push(serde_json::Value::String( + PERMALINK.to_owned() + )); + + post + }; + db.put_post(&post, "fireburn.ru").await.unwrap(); + + for i in post["properties"]["url"].as_array().unwrap() { + let (read_post, _) = db.read_feed_with_cursor(i.as_str().unwrap(), None, 20, None).await.unwrap().unwrap(); + assert_eq!(read_post, post); + } + } + /// Automatically generates a test suite for + macro_rules! test_all { + ($func_name:ident, $mod_name:ident) => { + mod $mod_name { + $func_name!(test_basic_operations); + $func_name!(test_get_channel_list); + $func_name!(test_settings); + $func_name!(test_update); + $func_name!(test_feed_pagination); + $func_name!(test_webmention_addition); + $func_name!(test_pretty_permalinks); + } + }; + } + macro_rules! file_test { + ($func_name:ident) => { + #[tokio::test] + #[tracing_test::traced_test] + async fn $func_name() { + let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); + let backend = super::super::FileStorage::new( + tempdir.path().to_path_buf() + ) + .await + .unwrap(); + super::$func_name(backend).await + } + }; + } + + macro_rules! postgres_test { + ($func_name:ident) => { + #[cfg(feature = "sqlx")] + #[sqlx::test] + #[tracing_test::traced_test] + async fn $func_name( + pool_opts: sqlx::postgres::PgPoolOptions, + connect_opts: sqlx::postgres::PgConnectOptions + ) -> Result<(), sqlx::Error> { + let db = { + //use sqlx::ConnectOptions; + //connect_opts.log_statements(log::LevelFilter::Debug); + + pool_opts.connect_with(connect_opts).await? + }; + let backend = super::super::PostgresStorage::from_pool(db).await.unwrap(); + + Ok(super::$func_name(backend).await) + } + }; + } + + test_all!(file_test, file); + test_all!(postgres_test, postgres); +} diff --git a/src/database/postgres/mod.rs b/src/database/postgres/mod.rs new file mode 100644 index 0000000..9176d12 --- /dev/null +++ b/src/database/postgres/mod.rs @@ -0,0 +1,416 @@ +#![allow(unused_variables)] +use std::borrow::Cow; +use std::str::FromStr; + +use kittybox_util::{MicropubChannel, MentionType}; +use sqlx::{PgPool, Executor}; +use crate::micropub::{MicropubUpdate, MicropubPropertyDeletion}; + +use super::settings::Setting; +use super::{Storage, Result, StorageError, ErrorKind}; + +static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!(); + +impl From for StorageError { + fn from(value: sqlx::Error) -> Self { + Self::with_source( + super::ErrorKind::Backend, + Cow::Owned(format!("sqlx error: {}", &value)), + Box::new(value) + ) + } +} + +impl From for StorageError { + fn from(value: sqlx::migrate::MigrateError) -> Self { + Self::with_source( + super::ErrorKind::Backend, + Cow::Owned(format!("sqlx migration error: {}", &value)), + Box::new(value) + ) + } +} + +#[derive(Debug, Clone)] +pub struct PostgresStorage { + db: PgPool +} + +impl PostgresStorage { + /// Construct a new [`PostgresStorage`] from an URI string and run + /// migrations on the database. + /// + /// If `PGPASS_FILE` environment variable is defined, read the + /// password from the file at the specified path. If, instead, + /// the `PGPASS` environment variable is present, read the + /// password from it. + pub async fn new(uri: &str) -> Result { + tracing::debug!("Postgres URL: {uri}"); + let mut options = sqlx::postgres::PgConnectOptions::from_str(uri)? + .options([("search_path", "kittybox")]); + if let Ok(password_file) = std::env::var("PGPASS_FILE") { + let password = tokio::fs::read_to_string(password_file).await.unwrap(); + options = options.password(&password); + } else if let Ok(password) = std::env::var("PGPASS") { + options = options.password(&password) + } + Self::from_pool( + sqlx::postgres::PgPoolOptions::new() + .max_connections(50) + .connect_with(options) + .await? + ).await + + } + + /// Construct a [`PostgresStorage`] from a [`sqlx::PgPool`], + /// running appropriate migrations. + pub async fn from_pool(db: sqlx::PgPool) -> Result { + db.execute(sqlx::query("CREATE SCHEMA IF NOT EXISTS kittybox")).await?; + MIGRATOR.run(&db).await?; + Ok(Self { db }) + } +} + +#[async_trait::async_trait] +impl Storage for PostgresStorage { + #[tracing::instrument(skip(self))] + async fn post_exists(&self, url: &str) -> Result { + sqlx::query_as::<_, (bool,)>("SELECT exists(SELECT 1 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1)") + .bind(url) + .fetch_one(&self.db) + .await + .map(|v| v.0) + .map_err(|err| err.into()) + } + + #[tracing::instrument(skip(self))] + async fn get_post(&self, url: &str) -> Result> { + sqlx::query_as::<_, (serde_json::Value,)>("SELECT mf2 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1") + .bind(url) + .fetch_optional(&self.db) + .await + .map(|v| v.map(|v| v.0)) + .map_err(|err| err.into()) + + } + + #[tracing::instrument(skip(self))] + async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()> { + tracing::debug!("New post: {}", post); + sqlx::query("INSERT INTO kittybox.mf2_json (uid, mf2, owner) VALUES ($1 #>> '{properties,uid,0}', $1, $2)") + .bind(post) + .bind(user) + .execute(&self.db) + .await + .map(|_| ()) + .map_err(Into::into) + } + + #[tracing::instrument(skip(self))] + async fn add_to_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { + tracing::debug!("Inserting {} into {}", post, feed); + sqlx::query("INSERT INTO kittybox.children (parent, child) VALUES ($1, $2) ON CONFLICT DO NOTHING") + .bind(feed) + .bind(post) + .execute(&self.db) + .await + .map(|_| ()) + .map_err(Into::into) + } + + #[tracing::instrument(skip(self))] + async fn remove_from_feed(&self, feed: &'_ str, post: &'_ str) -> Result<()> { + sqlx::query("DELETE FROM kittybox.children WHERE parent = $1 AND child = $2") + .bind(feed) + .bind(post) + .execute(&self.db) + .await + .map_err(Into::into) + .map(|_| ()) + } + + #[tracing::instrument(skip(self))] + async fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> Result<()> { + let mut txn = self.db.begin().await?; + + let (uid, mut post) = sqlx::query_as::<_, (String, serde_json::Value)>("SELECT uid, mf2 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 FOR UPDATE") + .bind(target) + .fetch_optional(&mut *txn) + .await? + .ok_or(StorageError::from_static( + ErrorKind::NotFound, + "The specified post wasn't found in the database." + ))?; + + tracing::debug!("Loaded post for target {} with uid {}", target, uid); + + let key: &'static str = match mention_type { + MentionType::Reply => "comment", + MentionType::Like => "like", + MentionType::Repost => "repost", + MentionType::Bookmark => "bookmark", + MentionType::Mention => "mention", + }; + + tracing::debug!("Mention type -> key: {}", key); + + let mention_uid = mention["properties"]["uid"][0].clone(); + if let Some(values) = post["properties"][key].as_array_mut() { + for value in values.iter_mut() { + if value["properties"]["uid"][0] == mention_uid { + *value = mention; + break; + } + } + } else { + post["properties"][key] = serde_json::Value::Array(vec![mention]); + } + + sqlx::query("UPDATE kittybox.mf2_json SET mf2 = $2 WHERE uid = $1") + .bind(uid) + .bind(post) + .execute(&mut *txn) + .await?; + + txn.commit().await.map_err(Into::into) + } + #[tracing::instrument(skip(self))] + async fn update_post(&self, url: &'_ str, update: MicropubUpdate) -> Result<()> { + tracing::debug!("Updating post {}", url); + let mut txn = self.db.begin().await?; + let (uid, mut post) = sqlx::query_as::<_, (String, serde_json::Value)>("SELECT uid, mf2 FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 FOR UPDATE") + .bind(url) + .fetch_optional(&mut *txn) + .await? + .ok_or(StorageError::from_static( + ErrorKind::NotFound, + "The specified post wasn't found in the database." + ))?; + + if let Some(MicropubPropertyDeletion::Properties(ref delete)) = update.delete { + if let Some(props) = post["properties"].as_object_mut() { + for key in delete { + props.remove(key); + } + } + } else if let Some(MicropubPropertyDeletion::Values(ref delete)) = update.delete { + if let Some(props) = post["properties"].as_object_mut() { + for (key, values) in delete { + if let Some(prop) = props.get_mut(key).and_then(serde_json::Value::as_array_mut) { + prop.retain(|v| { values.iter().all(|i| i != v) }) + } + } + } + } + if let Some(replace) = update.replace { + if let Some(props) = post["properties"].as_object_mut() { + for (key, value) in replace { + props.insert(key, serde_json::Value::Array(value)); + } + } + } + if let Some(add) = update.add { + if let Some(props) = post["properties"].as_object_mut() { + for (key, value) in add { + if let Some(prop) = props.get_mut(&key).and_then(serde_json::Value::as_array_mut) { + prop.extend_from_slice(value.as_slice()); + } else { + props.insert(key, serde_json::Value::Array(value)); + } + } + } + } + + sqlx::query("UPDATE kittybox.mf2_json SET mf2 = $2 WHERE uid = $1") + .bind(uid) + .bind(post) + .execute(&mut *txn) + .await?; + + txn.commit().await.map_err(Into::into) + } + + #[tracing::instrument(skip(self))] + async fn get_channels(&self, user: &'_ str) -> Result> { + /*sqlx::query_as::<_, MicropubChannel>("SELECT name, uid FROM kittybox.channels WHERE owner = $1") + .bind(user) + .fetch_all(&self.db) + .await + .map_err(|err| err.into())*/ + sqlx::query_as::<_, MicropubChannel>(r#"SELECT mf2 #>> '{properties,name,0}' as name, uid FROM kittybox.mf2_json WHERE '["h-feed"]'::jsonb @> mf2['type'] AND owner = $1"#) + .bind(user) + .fetch_all(&self.db) + .await + .map_err(|err| err.into()) + } + + #[tracing::instrument(skip(self))] + async fn read_feed_with_limit( + &self, + url: &'_ str, + after: &'_ Option, + limit: usize, + user: &'_ Option, + ) -> Result> { + let mut feed = match sqlx::query_as::<_, (serde_json::Value,)>(" +SELECT jsonb_set( + mf2, + '{properties,author,0}', + (SELECT mf2 FROM kittybox.mf2_json + WHERE uid = mf2 #>> '{properties,author,0}') +) FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 +") + .bind(url) + .fetch_optional(&self.db) + .await? + .map(|v| v.0) + { + Some(feed) => feed, + None => return Ok(None) + }; + + let posts: Vec = { + let mut posts_iter = feed["children"] + .as_array() + .cloned() + .unwrap_or_default() + .into_iter() + .map(|s| s.as_str().unwrap().to_string()); + if let Some(after) = after { + for s in posts_iter.by_ref() { + if &s == after { + break; + } + } + }; + + posts_iter.take(limit).collect::>() + }; + feed["children"] = serde_json::Value::Array( + sqlx::query_as::<_, (serde_json::Value,)>(" +SELECT jsonb_set( + mf2, + '{properties,author,0}', + (SELECT mf2 FROM kittybox.mf2_json + WHERE uid = mf2 #>> '{properties,author,0}') +) FROM kittybox.mf2_json +WHERE uid = ANY($1) +ORDER BY mf2 #>> '{properties,published,0}' DESC +") + .bind(&posts[..]) + .fetch_all(&self.db) + .await? + .into_iter() + .map(|v| v.0) + .collect::>() + ); + + Ok(Some(feed)) + + } + + #[tracing::instrument(skip(self))] + async fn read_feed_with_cursor( + &self, + url: &'_ str, + cursor: Option<&'_ str>, + limit: usize, + user: Option<&'_ str> + ) -> Result)>> { + let mut txn = self.db.begin().await?; + sqlx::query("SET TRANSACTION ISOLATION LEVEL REPEATABLE READ, READ ONLY") + .execute(&mut *txn) + .await?; + tracing::debug!("Started txn: {:?}", txn); + let mut feed = match sqlx::query_scalar::<_, serde_json::Value>(" +SELECT kittybox.hydrate_author(mf2) FROM kittybox.mf2_json WHERE uid = $1 OR mf2['properties']['url'] ? $1 +") + .bind(url) + .fetch_optional(&mut *txn) + .await? + { + Some(feed) => feed, + None => return Ok(None) + }; + + // Don't query for children if this isn't a feed. + // + // The second query is very long and will probably be extremely + // expensive. It's best to skip it on types where it doesn't make sense + // (Kittybox doesn't support rendering children on non-feeds) + if !feed["type"].as_array().unwrap().iter().any(|t| *t == serde_json::json!("h-feed")) { + return Ok(Some((feed, None))); + } + + feed["children"] = sqlx::query_scalar::<_, serde_json::Value>(" +SELECT kittybox.hydrate_author(mf2) FROM kittybox.mf2_json +INNER JOIN kittybox.children +ON mf2_json.uid = children.child +WHERE + children.parent = $1 + AND ( + ( + (mf2 #>> '{properties,visibility,0}') = 'public' + OR + NOT (mf2['properties'] ? 'visibility') + ) + OR + ( + $3 != null AND ( + mf2['properties']['audience'] ? $3 + OR mf2['properties']['author'] ? $3 + ) + ) + ) + AND ($4 IS NULL OR ((mf2_json.mf2 #>> '{properties,published,0}') < $4)) +ORDER BY (mf2_json.mf2 #>> '{properties,published,0}') DESC +LIMIT $2" + ) + .bind(url) + .bind(limit as i64) + .bind(user) + .bind(cursor) + .fetch_all(&mut *txn) + .await + .map(serde_json::Value::Array)?; + + let new_cursor = feed["children"].as_array().unwrap() + .last() + .map(|v| v["properties"]["published"][0].as_str().unwrap().to_owned()); + + txn.commit().await?; + + Ok(Some((feed, new_cursor))) + } + + #[tracing::instrument(skip(self))] + async fn delete_post(&self, url: &'_ str) -> Result<()> { + todo!() + } + + #[tracing::instrument(skip(self))] + async fn get_setting, 'a>(&'_ self, user: &'_ str) -> Result { + match sqlx::query_as::<_, (serde_json::Value,)>("SELECT kittybox.get_setting($1, $2)") + .bind(user) + .bind(S::ID) + .fetch_one(&self.db) + .await + { + Ok((value,)) => Ok(serde_json::from_value(value)?), + Err(err) => Err(err.into()) + } + } + + #[tracing::instrument(skip(self))] + async fn set_setting + 'a, 'a>(&self, user: &'a str, value: S::Data) -> Result<()> { + sqlx::query("SELECT kittybox.set_setting($1, $2, $3)") + .bind(user) + .bind(S::ID) + .bind(serde_json::to_value(S::new(value)).unwrap()) + .execute(&self.db) + .await + .map_err(Into::into) + .map(|_| ()) + } +} diff --git a/src/database/redis/edit_post.lua b/src/database/redis/edit_post.lua new file mode 100644 index 0000000..a398f8d --- /dev/null +++ b/src/database/redis/edit_post.lua @@ -0,0 +1,93 @@ +local posts = KEYS[1] +local update_desc = cjson.decode(ARGV[2]) +local post = cjson.decode(redis.call("HGET", posts, ARGV[1])) + +local delete_keys = {} +local delete_kvs = {} +local add_keys = {} + +if update_desc.replace ~= nil then + for k, v in pairs(update_desc.replace) do + table.insert(delete_keys, k) + add_keys[k] = v + end +end +if update_desc.delete ~= nil then + if update_desc.delete[0] == nil then + -- Table has string keys. Probably! + for k, v in pairs(update_desc.delete) do + delete_kvs[k] = v + end + else + -- Table has numeric keys. Probably! + for i, v in ipairs(update_desc.delete) do + table.insert(delete_keys, v) + end + end +end +if update_desc.add ~= nil then + for k, v in pairs(update_desc.add) do + add_keys[k] = v + end +end + +for i, v in ipairs(delete_keys) do + post["properties"][v] = nil + -- TODO delete URL links +end + +for k, v in pairs(delete_kvs) do + local index = -1 + if k == "children" then + for j, w in ipairs(post[k]) do + if w == v then + index = j + break + end + end + if index > -1 then + table.remove(post[k], index) + end + else + for j, w in ipairs(post["properties"][k]) do + if w == v then + index = j + break + end + end + if index > -1 then + table.remove(post["properties"][k], index) + -- TODO delete URL links + end + end +end + +for k, v in pairs(add_keys) do + if k == "children" then + if post["children"] == nil then + post["children"] = {} + end + for i, w in ipairs(v) do + table.insert(post["children"], 1, w) + end + else + if post["properties"][k] == nil then + post["properties"][k] = {} + end + for i, w in ipairs(v) do + table.insert(post["properties"][k], w) + end + if k == "url" then + redis.call("HSET", posts, v, cjson.encode({ see_other = post["properties"]["uid"][1] })) + elseif k == "channel" then + local feed = cjson.decode(redis.call("HGET", posts, v)) + table.insert(feed["children"], 1, post["properties"]["uid"][1]) + redis.call("HSET", posts, v, cjson.encode(feed)) + end + end +end + +local encoded = cjson.encode(post) +redis.call("SET", "debug", encoded) +redis.call("HSET", posts, post["properties"]["uid"][1], encoded) +return \ No newline at end of file diff --git a/src/database/redis/mod.rs b/src/database/redis/mod.rs new file mode 100644 index 0000000..39ee852 --- /dev/null +++ b/src/database/redis/mod.rs @@ -0,0 +1,398 @@ +use async_trait::async_trait; +use futures::stream; +use futures_util::FutureExt; +use futures_util::StreamExt; +use futures_util::TryStream; +use futures_util::TryStreamExt; +use lazy_static::lazy_static; +use log::error; +use mobc::Pool; +use mobc_redis::redis; +use mobc_redis::redis::AsyncCommands; +use mobc_redis::RedisConnectionManager; +use serde_json::json; +use std::time::Duration; + +use crate::database::{ErrorKind, MicropubChannel, Result, Storage, StorageError, filter_post}; +use crate::indieauth::User; + +struct RedisScripts { + edit_post: redis::Script, +} + +impl From for StorageError { + fn from(err: mobc_redis::redis::RedisError) -> Self { + Self { + msg: format!("{}", err), + source: Some(Box::new(err)), + kind: ErrorKind::Backend, + } + } +} +impl From> for StorageError { + fn from(err: mobc::Error) -> Self { + Self { + msg: format!("{}", err), + source: Some(Box::new(err)), + kind: ErrorKind::Backend, + } + } +} + +lazy_static! { + static ref SCRIPTS: RedisScripts = RedisScripts { + edit_post: redis::Script::new(include_str!("./edit_post.lua")) + }; +} +/*#[cfg(feature(lazy_cell))] +static SCRIPTS_CELL: std::cell::LazyCell = std::cell::LazyCell::new(|| { + RedisScripts { + edit_post: redis::Script::new(include_str!("./edit_post.lua")) + } +});*/ + +#[derive(Clone)] +pub struct RedisStorage { + // note to future Vika: + // mobc::Pool is actually a fancy name for an Arc + // around a shared connection pool with a manager + // which makes it safe to implement [`Clone`] and + // not worry about new pools being suddenly made + // + // stop worrying and start coding, you dum-dum + redis: mobc::Pool, +} + +#[async_trait] +impl Storage for RedisStorage { + async fn get_setting<'a>(&self, setting: &'a str, user: &'a str) -> Result { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + Ok(conn + .hget::(format!("settings_{}", user), setting) + .await?) + } + + async fn set_setting<'a>(&self, setting: &'a str, user: &'a str, value: &'a str) -> Result<()> { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + Ok(conn + .hset::(format!("settings_{}", user), setting, value) + .await?) + } + + async fn delete_post<'a>(&self, url: &'a str) -> Result<()> { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + Ok(conn.hdel::<&str, &str, ()>("posts", url).await?) + } + + async fn post_exists(&self, url: &str) -> Result { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + Ok(conn.hexists::<&str, &str, bool>("posts", url).await?) + } + + async fn get_post(&self, url: &str) -> Result> { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + match conn + .hget::<&str, &str, Option>("posts", url) + .await? + { + Some(val) => { + let parsed = serde_json::from_str::(&val)?; + if let Some(new_url) = parsed["see_other"].as_str() { + match conn + .hget::<&str, &str, Option>("posts", new_url) + .await? + { + Some(val) => Ok(Some(serde_json::from_str::(&val)?)), + None => Ok(None), + } + } else { + Ok(Some(parsed)) + } + } + None => Ok(None), + } + } + + async fn get_channels(&self, user: &User) -> Result> { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + let channels = conn + .smembers::>("channels_".to_string() + user.me.as_str()) + .await?; + // TODO: use streams here instead of this weird thing... how did I even write this?! + Ok(futures_util::future::join_all( + channels + .iter() + .map(|channel| { + self.get_post(channel).map(|result| result.unwrap()).map( + |post: Option| { + post.map(|post| MicropubChannel { + uid: post["properties"]["uid"][0].as_str().unwrap().to_string(), + name: post["properties"]["name"][0].as_str().unwrap().to_string(), + }) + }, + ) + }) + .collect::>(), + ) + .await + .into_iter() + .flatten() + .collect::>()) + } + + async fn put_post<'a>(&self, post: &'a serde_json::Value, user: &'a str) -> Result<()> { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + let key: &str; + match post["properties"]["uid"][0].as_str() { + Some(uid) => key = uid, + None => { + return Err(StorageError::new( + ErrorKind::BadRequest, + "post doesn't have a UID", + )) + } + } + conn.hset::<&str, &str, String, ()>("posts", key, post.to_string()) + .await?; + if post["properties"]["url"].is_array() { + for url in post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap().to_string()) + { + if url != key && url.starts_with(user) { + conn.hset::<&str, &str, String, ()>( + "posts", + &url, + json!({ "see_other": key }).to_string(), + ) + .await?; + } + } + } + if post["type"] + .as_array() + .unwrap() + .iter() + .any(|i| i == "h-feed") + { + // This is a feed. Add it to the channels array if it's not already there. + conn.sadd::( + "channels_".to_string() + post["properties"]["author"][0].as_str().unwrap(), + key, + ) + .await? + } + Ok(()) + } + + async fn read_feed_with_limit<'a>( + &self, + url: &'a str, + after: &'a Option, + limit: usize, + user: &'a Option, + ) -> Result> { + let mut conn = self.redis.get().await?; + let mut feed; + match conn + .hget::<&str, &str, Option>("posts", url) + .await + .map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))? + { + Some(post) => feed = serde_json::from_str::(&post)?, + None => return Ok(None), + } + if feed["see_other"].is_string() { + match conn + .hget::<&str, &str, Option>("posts", feed["see_other"].as_str().unwrap()) + .await? + { + Some(post) => feed = serde_json::from_str::(&post)?, + None => return Ok(None), + } + } + if let Some(post) = filter_post(feed, user) { + feed = post + } else { + return Err(StorageError::new( + ErrorKind::PermissionDenied, + "specified user cannot access this post", + )); + } + if feed["children"].is_array() { + let children = feed["children"].as_array().unwrap(); + let mut posts_iter = children.iter().map(|i| i.as_str().unwrap().to_string()); + if after.is_some() { + loop { + let i = posts_iter.next(); + if &i == after { + break; + } + } + } + async fn fetch_post_for_feed(url: String) -> Option { + return Some(serde_json::json!({})); + } + let posts = stream::iter(posts_iter) + .map(|url: String| async move { + return Ok(fetch_post_for_feed(url).await); + /*match self.redis.get().await { + Ok(mut conn) => { + match conn.hget::<&str, &str, Option>("posts", &url).await { + Ok(post) => match post { + Some(post) => { + Ok(Some(serde_json::from_str(&post)?)) + } + // Happens because of a broken link (result of an improper deletion?) + None => Ok(None), + }, + Err(err) => Err(StorageError::with_source(ErrorKind::Backend, "Error executing a Redis command", Box::new(err))) + } + } + Err(err) => Err(StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(err))) + }*/ + }) + // TODO: determine the optimal value for this buffer + // It will probably depend on how often can you encounter a private post on the page + // It shouldn't be too large, or we'll start fetching too many posts from the database + // It MUST NOT be larger than the typical page size + // It MUST NOT be a significant amount of the connection pool size + //.buffered(std::cmp::min(3, limit)) + // Hack to unwrap the Option and sieve out broken links + // Broken links return None, and Stream::filter_map skips all Nones. + // I wonder if one can use try_flatten() here somehow akin to iters + .try_filter_map(|post| async move { Ok(post) }) + .try_filter_map(|post| async move { + Ok(filter_post(post, user)) + }) + .take(limit); + match posts.try_collect::>().await { + Ok(posts) => feed["children"] = json!(posts), + Err(err) => { + let e = StorageError::with_source( + ErrorKind::Other, + "An error was encountered while processing the feed", + Box::new(err) + ); + error!("Error while assembling feed: {}", e); + return Err(e); + } + } + } + return Ok(Some(feed)); + } + + async fn update_post<'a>(&self, mut url: &'a str, update: serde_json::Value) -> Result<()> { + let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?; + if !conn + .hexists::<&str, &str, bool>("posts", url) + .await + .unwrap() + { + return Err(StorageError::new( + ErrorKind::NotFound, + "can't edit a non-existent post", + )); + } + let post: serde_json::Value = + serde_json::from_str(&conn.hget::<&str, &str, String>("posts", url).await?)?; + if let Some(new_url) = post["see_other"].as_str() { + url = new_url + } + Ok(SCRIPTS + .edit_post + .key("posts") + .arg(url) + .arg(update.to_string()) + .invoke_async::<_, ()>(&mut conn as &mut redis::aio::Connection) + .await?) + } +} + +impl RedisStorage { + /// Create a new RedisDatabase that will connect to Redis at `redis_uri` to store data. + pub async fn new(redis_uri: String) -> Result { + match redis::Client::open(redis_uri) { + Ok(client) => Ok(Self { + redis: Pool::builder() + .max_open(20) + .max_idle(5) + .get_timeout(Some(Duration::from_secs(3))) + .max_lifetime(Some(Duration::from_secs(120))) + .build(RedisConnectionManager::new(client)), + }), + Err(e) => Err(e.into()), + } + } + + pub async fn conn(&self) -> Result> { + self.redis.get().await.map_err(|e| StorageError::with_source( + ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e) + )) + } +} + +#[cfg(test)] +pub mod tests { + use mobc_redis::redis; + use std::process; + use std::time::Duration; + + pub struct RedisInstance { + // We just need to hold on to it so it won't get dropped and remove the socket + _tempdir: tempdir::TempDir, + uri: String, + child: std::process::Child, + } + impl Drop for RedisInstance { + fn drop(&mut self) { + self.child.kill().expect("Failed to kill the child!"); + } + } + impl RedisInstance { + pub fn uri(&self) -> &str { + &self.uri + } + } + + pub async fn get_redis_instance() -> RedisInstance { + let tempdir = tempdir::TempDir::new("redis").expect("failed to create tempdir"); + let socket = tempdir.path().join("redis.sock"); + let redis_child = process::Command::new("redis-server") + .current_dir(&tempdir) + .arg("--port") + .arg("0") + .arg("--unixsocket") + .arg(&socket) + .stdout(process::Stdio::null()) + .stderr(process::Stdio::null()) + .spawn() + .expect("Failed to spawn Redis"); + println!("redis+unix:///{}", socket.to_str().unwrap()); + let uri = format!("redis+unix:///{}", socket.to_str().unwrap()); + // There should be a slight delay, we need to wait for Redis to spin up + let client = redis::Client::open(uri.clone()).unwrap(); + let millisecond = Duration::from_millis(1); + let mut retries: usize = 0; + const MAX_RETRIES: usize = 60 * 1000/*ms*/; + while let Err(err) = client.get_connection() { + if err.is_connection_refusal() { + async_std::task::sleep(millisecond).await; + retries += 1; + if retries > MAX_RETRIES { + panic!("Timeout waiting for Redis, last error: {}", err); + } + } else { + panic!("Could not connect: {}", err); + } + } + + RedisInstance { + uri, + child: redis_child, + _tempdir: tempdir, + } + } +} diff --git a/src/frontend/login.rs b/src/frontend/login.rs new file mode 100644 index 0000000..c693899 --- /dev/null +++ b/src/frontend/login.rs @@ -0,0 +1,333 @@ +use http_types::Mime; +use log::{debug, error}; +use rand::Rng; +use serde::{Deserialize, Serialize}; +use sha2::{Digest, Sha256}; +use std::convert::TryInto; +use std::str::FromStr; + +use crate::frontend::templates::Template; +use crate::frontend::{FrontendError, IndiewebEndpoints}; +use crate::{database::Storage, ApplicationState}; +use kittybox_frontend_renderer::LoginPage; + +pub async fn form(req: Request>) -> Result { + let owner = req.url().origin().ascii_serialization() + "/"; + let storage = &req.state().storage; + let authorization_endpoint = req.state().authorization_endpoint.to_string(); + let token_endpoint = req.state().token_endpoint.to_string(); + let blog_name = storage + .get_setting("site_name", &owner) + .await + .unwrap_or_else(|_| "Kitty Box!".to_string()); + let feeds = storage.get_channels(&owner).await.unwrap_or_default(); + + Ok(Response::builder(200) + .body( + Template { + title: "Sign in with IndieAuth", + blog_name: &blog_name, + endpoints: IndiewebEndpoints { + authorization_endpoint, + token_endpoint, + webmention: None, + microsub: None, + }, + feeds, + user: req.session().get("user"), + content: LoginPage {}.to_string(), + } + .to_string(), + ) + .content_type("text/html; charset=utf-8") + .build()) +} + +#[derive(Serialize, Deserialize)] +struct LoginForm { + url: String, +} + +#[derive(Serialize, Deserialize)] +struct IndieAuthClientState { + /// A random value to protect from CSRF attacks. + nonce: String, + /// The user's initial "me" value. + me: String, + /// Authorization endpoint used. + authorization_endpoint: String, +} + +#[derive(Serialize, Deserialize)] +struct IndieAuthRequestParams { + response_type: String, // can only have "code". TODO make an enum + client_id: String, // always a URL. TODO consider making a URL + redirect_uri: surf::Url, // callback URI for IndieAuth + state: String, // CSRF protection, should include randomness and be passed through + code_challenge: String, // base64-encoded PKCE challenge + code_challenge_method: String, // usually "S256". TODO make an enum + scope: Option, // oAuth2 scopes to grant, + me: surf::Url, // User's entered profile URL +} + +/// Handle login requests. Find the IndieAuth authorization endpoint and redirect to it. +pub async fn handler(mut req: Request>) -> Result { + let content_type = req.content_type(); + if content_type.is_none() { + return Err(FrontendError::with_code(400, "Use the login form, Luke.").into()); + } + if content_type.unwrap() != Mime::from_str("application/x-www-form-urlencoded").unwrap() { + return Err( + FrontendError::with_code(400, "Login form results must be a urlencoded form").into(), + ); + } + + let form = req.body_form::().await?; // FIXME check if it returns 400 or 500 on error + let homepage_uri = surf::Url::parse(&form.url)?; + let http = &req.state().http_client; + + let mut fetch_response = http.get(&homepage_uri).send().await?; + if fetch_response.status() != 200 { + return Err(FrontendError::with_code( + 500, + "Error fetching your authorization endpoint. Check if your website's okay.", + ) + .into()); + } + + let mut authorization_endpoint: Option = None; + if let Some(links) = fetch_response.header("Link") { + // NOTE: this is the same Link header parser used in src/micropub/post.rs:459. + // One should refactor it to a function to use independently and improve later + for link in links.iter().flat_map(|i| i.as_str().split(',')) { + debug!("Trying to match {} as authorization_endpoint", link); + let mut split_link = link.split(';'); + + match split_link.next() { + Some(uri) => { + if let Some(uri) = uri.strip_prefix('<').and_then(|uri| uri.strip_suffix('>')) { + debug!("uri: {}", uri); + for prop in split_link { + debug!("prop: {}", prop); + let lowercased = prop.to_ascii_lowercase(); + let trimmed = lowercased.trim(); + if trimmed == "rel=\"authorization_endpoint\"" + || trimmed == "rel=authorization_endpoint" + { + if let Ok(endpoint) = homepage_uri.join(uri) { + debug!( + "Found authorization endpoint {} for user {}", + endpoint, + homepage_uri.as_str() + ); + authorization_endpoint = Some(endpoint); + break; + } + } + } + } + } + None => continue, + } + } + } + // If the authorization_endpoint is still not found after the Link parsing gauntlet, + // bring out the big guns and parse HTML to find it. + if authorization_endpoint.is_none() { + let body = fetch_response.body_string().await?; + let pattern = + easy_scraper::Pattern::new(r#""#) + .expect("Cannot parse the pattern for authorization_endpoint"); + let matches = pattern.matches(&body); + debug!("Matches for authorization_endpoint in HTML: {:?}", matches); + if !matches.is_empty() { + if let Ok(endpoint) = homepage_uri.join(&matches[0]["url"]) { + debug!( + "Found authorization endpoint {} for user {}", + endpoint, + homepage_uri.as_str() + ); + authorization_endpoint = Some(endpoint) + } + } + }; + // If even after this the authorization endpoint is still not found, bail out. + if authorization_endpoint.is_none() { + error!( + "Couldn't find authorization_endpoint for {}", + homepage_uri.as_str() + ); + return Err(FrontendError::with_code( + 400, + "Your website doesn't support the IndieAuth protocol.", + ) + .into()); + } + let mut authorization_endpoint: surf::Url = authorization_endpoint.unwrap(); + let mut rng = rand::thread_rng(); + let state: String = data_encoding::BASE64URL.encode( + serde_urlencoded::to_string(IndieAuthClientState { + nonce: (0..8) + .map(|_| { + let idx = rng.gen_range(0..INDIEAUTH_PKCE_CHARSET.len()); + INDIEAUTH_PKCE_CHARSET[idx] as char + }) + .collect(), + me: homepage_uri.to_string(), + authorization_endpoint: authorization_endpoint.to_string(), + })? + .as_bytes(), + ); + // PKCE code generation + let code_verifier: String = (0..128) + .map(|_| { + let idx = rng.gen_range(0..INDIEAUTH_PKCE_CHARSET.len()); + INDIEAUTH_PKCE_CHARSET[idx] as char + }) + .collect(); + let mut hasher = Sha256::new(); + hasher.update(code_verifier.as_bytes()); + let code_challenge: String = data_encoding::BASE64URL.encode(&hasher.finalize()); + + authorization_endpoint.set_query(Some(&serde_urlencoded::to_string( + IndieAuthRequestParams { + response_type: "code".to_string(), + client_id: req.url().origin().ascii_serialization(), + redirect_uri: req.url().join("login/callback")?, + state: state.clone(), + code_challenge, + code_challenge_method: "S256".to_string(), + scope: Some("profile".to_string()), + me: homepage_uri, + }, + )?)); + + let cookies = vec![ + format!( + r#"indieauth_state="{}"; Same-Site: None; Secure; Max-Age: 600"#, + state + ), + format!( + r#"indieauth_code_verifier="{}"; Same-Site: None; Secure; Max-Age: 600"#, + code_verifier + ), + ]; + + let cookie_header = cookies + .iter() + .map(|i| -> http_types::headers::HeaderValue { (i as &str).try_into().unwrap() }) + .collect::>(); + + Ok(Response::builder(302) + .header("Location", authorization_endpoint.to_string()) + .header("Set-Cookie", &*cookie_header) + .build()) +} + +const INDIEAUTH_PKCE_CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + 1234567890-._~"; + +#[derive(Deserialize)] +struct IndieAuthCallbackResponse { + code: Option, + error: Option, + error_description: Option, + #[allow(dead_code)] + error_uri: Option, + // This needs to be further decoded to receive state back and will always be present + state: String, +} + +impl IndieAuthCallbackResponse { + fn is_successful(&self) -> bool { + self.code.is_some() + } +} + +#[derive(Serialize, Deserialize)] +struct IndieAuthCodeRedeem { + grant_type: String, + code: String, + client_id: String, + redirect_uri: String, + code_verifier: String, +} + +#[derive(Serialize, Deserialize)] +struct IndieWebProfile { + name: Option, + url: Option, + email: Option, + photo: Option, +} + +#[derive(Serialize, Deserialize)] +struct IndieAuthResponse { + me: String, + scope: Option, + access_token: Option, + token_type: Option, + profile: Option, +} + +/// Handle IndieAuth parameters, fetch the final h-card and redirect the user to the homepage. +pub async fn callback(mut req: Request>) -> Result { + let params: IndieAuthCallbackResponse = req.query()?; + let http: &surf::Client = &req.state().http_client; + let origin = req.url().origin().ascii_serialization(); + + if req.cookie("indieauth_state").unwrap().value() != params.state { + return Err(FrontendError::with_code(400, "The state doesn't match. A possible CSRF attack was prevented. Please try again later.").into()); + } + let state: IndieAuthClientState = + serde_urlencoded::from_bytes(&data_encoding::BASE64URL.decode(params.state.as_bytes())?)?; + + if !params.is_successful() { + return Err(FrontendError::with_code( + 400, + &format!( + "The authorization endpoint indicated a following error: {:?}: {:?}", + ¶ms.error, ¶ms.error_description + ), + ) + .into()); + } + + let authorization_endpoint = surf::Url::parse(&state.authorization_endpoint).unwrap(); + let mut code_response = http + .post(authorization_endpoint) + .body_string(serde_urlencoded::to_string(IndieAuthCodeRedeem { + grant_type: "authorization_code".to_string(), + code: params.code.unwrap().to_string(), + client_id: origin.to_string(), + redirect_uri: origin + "/login/callback", + code_verifier: req + .cookie("indieauth_code_verifier") + .unwrap() + .value() + .to_string(), + })?) + .header("Content-Type", "application/x-www-form-urlencoded") + .header("Accept", "application/json") + .send() + .await?; + + if code_response.status() != 200 { + return Err(FrontendError::with_code( + code_response.status(), + &format!( + "Authorization endpoint returned an error when redeeming the code: {}", + code_response.body_string().await? + ), + ) + .into()); + } + + let json: IndieAuthResponse = code_response.body_json().await?; + let session = req.session_mut(); + session.insert("user", &json.me)?; + + // TODO redirect to the page user came from + Ok(Response::builder(302).header("Location", "/").build()) +} diff --git a/src/frontend/mod.rs b/src/frontend/mod.rs new file mode 100644 index 0000000..7a43532 --- /dev/null +++ b/src/frontend/mod.rs @@ -0,0 +1,404 @@ +use crate::database::{Storage, StorageError}; +use axum::{ + extract::{Host, Path, Query}, + http::{StatusCode, Uri}, + response::IntoResponse, + Extension, +}; +use futures_util::FutureExt; +use serde::Deserialize; +use std::convert::TryInto; +use tracing::{debug, error}; +//pub mod login; +pub mod onboarding; + +use kittybox_frontend_renderer::{ + Entry, Feed, VCard, + ErrorPage, Template, MainPage, + POSTS_PER_PAGE +}; +pub use kittybox_frontend_renderer::assets::statics; + +#[derive(Debug, Deserialize)] +pub struct QueryParams { + after: Option, +} + +#[derive(Debug)] +struct FrontendError { + msg: String, + source: Option>, + code: StatusCode, +} + +impl FrontendError { + pub fn with_code(code: C, msg: &str) -> Self + where + C: TryInto, + { + Self { + msg: msg.to_string(), + source: None, + code: code.try_into().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR), + } + } + pub fn msg(&self) -> &str { + &self.msg + } + pub fn code(&self) -> StatusCode { + self.code + } +} + +impl From for FrontendError { + fn from(err: StorageError) -> Self { + Self { + msg: "Database error".to_string(), + source: Some(Box::new(err)), + code: StatusCode::INTERNAL_SERVER_ERROR, + } + } +} + +impl std::error::Error for FrontendError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.source + .as_ref() + .map(|e| e.as_ref() as &(dyn std::error::Error + 'static)) + } +} + +impl std::fmt::Display for FrontendError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.msg)?; + if let Some(err) = std::error::Error::source(&self) { + write!(f, ": {}", err)?; + } + + Ok(()) + } +} + +/// Filter the post according to the value of `user`. +/// +/// Anonymous users cannot view private posts and protected locations; +/// Logged-in users can only view private posts targeted at them; +/// Logged-in users can't view private location data +#[tracing::instrument(skip(post), fields(post = %post))] +pub fn filter_post( + mut post: serde_json::Value, + user: Option<&str>, +) -> Option { + if post["properties"]["deleted"][0].is_string() { + tracing::debug!("Deleted post; returning tombstone instead"); + return Some(serde_json::json!({ + "type": post["type"], + "properties": { + "deleted": post["properties"]["deleted"] + } + })); + } + let empty_vec: Vec = vec![]; + let author_list = post["properties"]["author"] + .as_array() + .unwrap_or(&empty_vec) + .iter() + .map(|i| -> &str { + match i { + serde_json::Value::String(ref author) => author.as_str(), + mf2 => mf2["properties"]["uid"][0].as_str().unwrap() + } + }).collect::>(); + let visibility = post["properties"]["visibility"][0] + .as_str() + .unwrap_or("public"); + let audience = { + let mut audience = author_list.clone(); + audience.extend(post["properties"]["audience"] + .as_array() + .unwrap_or(&empty_vec) + .iter() + .map(|i| i.as_str().unwrap())); + + audience + }; + tracing::debug!("post audience = {:?}", audience); + if (visibility == "private" && !audience.iter().any(|i| Some(*i) == user)) + || (visibility == "protected" && user.is_none()) + { + return None; + } + if post["properties"]["location"].is_array() { + let location_visibility = post["properties"]["location-visibility"][0] + .as_str() + .unwrap_or("private"); + tracing::debug!("Post contains location, location privacy = {}", location_visibility); + let mut author = post["properties"]["author"] + .as_array() + .unwrap_or(&empty_vec) + .iter() + .map(|i| i.as_str().unwrap()); + if (location_visibility == "private" && !author.any(|i| Some(i) == user)) + || (location_visibility == "protected" && user.is_none()) + { + post["properties"] + .as_object_mut() + .unwrap() + .remove("location"); + } + } + + match post["properties"]["author"].take() { + serde_json::Value::Array(children) => { + post["properties"]["author"] = serde_json::Value::Array( + children + .into_iter() + .filter_map(|post| if post.is_string() { + Some(post) + } else { + filter_post(post, user) + }) + .collect::>() + ); + }, + serde_json::Value::Null => {}, + other => post["properties"]["author"] = other + } + + match post["children"].take() { + serde_json::Value::Array(children) => { + post["children"] = serde_json::Value::Array( + children + .into_iter() + .filter_map(|post| filter_post(post, user)) + .collect::>() + ); + }, + serde_json::Value::Null => {}, + other => post["children"] = other + } + Some(post) +} + +async fn get_post_from_database( + db: &S, + url: &str, + after: Option, + user: &Option, +) -> std::result::Result<(serde_json::Value, Option), FrontendError> { + match db + .read_feed_with_cursor(url, after.as_deref(), POSTS_PER_PAGE, user.as_deref()) + .await + { + Ok(result) => match result { + Some((post, cursor)) => match filter_post(post, user.as_deref()) { + Some(post) => Ok((post, cursor)), + None => { + // TODO: Authentication + if user.is_some() { + Err(FrontendError::with_code( + StatusCode::FORBIDDEN, + "User authenticated AND forbidden to access this resource", + )) + } else { + Err(FrontendError::with_code( + StatusCode::UNAUTHORIZED, + "User needs to authenticate themselves", + )) + } + } + } + None => Err(FrontendError::with_code( + StatusCode::NOT_FOUND, + "Post not found in the database", + )), + }, + Err(err) => match err.kind() { + crate::database::ErrorKind::PermissionDenied => { + // TODO: Authentication + if user.is_some() { + Err(FrontendError::with_code( + StatusCode::FORBIDDEN, + "User authenticated AND forbidden to access this resource", + )) + } else { + Err(FrontendError::with_code( + StatusCode::UNAUTHORIZED, + "User needs to authenticate themselves", + )) + } + } + _ => Err(err.into()), + }, + } +} + +#[tracing::instrument(skip(db))] +pub async fn homepage( + Host(host): Host, + Query(query): Query, + Extension(db): Extension, +) -> impl IntoResponse { + let user = None; // TODO authentication + let path = format!("https://{}/", host); + let feed_path = format!("https://{}/feeds/main", host); + + match tokio::try_join!( + get_post_from_database(&db, &path, None, &user), + get_post_from_database(&db, &feed_path, query.after, &user) + ) { + Ok(((hcard, _), (hfeed, cursor))) => { + // Here, we know those operations can't really fail + // (or it'll be a transient failure that will show up on + // other requests anyway if it's serious...) + // + // btw is it more efficient to fetch these in parallel? + let (blogname, webring, channels) = tokio::join!( + db.get_setting::(&host) + .map(Result::unwrap_or_default), + + db.get_setting::(&host) + .map(Result::unwrap_or_default), + + db.get_channels(&host).map(|i| i.unwrap_or_default()) + ); + // Render the homepage + ( + StatusCode::OK, + [( + axum::http::header::CONTENT_TYPE, + r#"text/html; charset="utf-8""#, + )], + Template { + title: blogname.as_ref(), + blog_name: blogname.as_ref(), + feeds: channels, + user, + content: MainPage { + feed: &hfeed, + card: &hcard, + cursor: cursor.as_deref(), + webring: crate::database::settings::Setting::into_inner(webring) + } + .to_string(), + } + .to_string(), + ) + } + Err(err) => { + if err.code == StatusCode::NOT_FOUND { + debug!("Transferring to onboarding..."); + // Transfer to onboarding + ( + StatusCode::FOUND, + [(axum::http::header::LOCATION, "/.kittybox/onboarding")], + String::default(), + ) + } else { + error!("Error while fetching h-card and/or h-feed: {}", err); + // Return the error + let (blogname, channels) = tokio::join!( + db.get_setting::(&host) + .map(Result::unwrap_or_default), + + db.get_channels(&host).map(|i| i.unwrap_or_default()) + ); + + ( + err.code(), + [( + axum::http::header::CONTENT_TYPE, + r#"text/html; charset="utf-8""#, + )], + Template { + title: blogname.as_ref(), + blog_name: blogname.as_ref(), + feeds: channels, + user, + content: ErrorPage { + code: err.code(), + msg: Some(err.msg().to_string()), + } + .to_string(), + } + .to_string(), + ) + } + } + } +} + +#[tracing::instrument(skip(db))] +pub async fn catchall( + Extension(db): Extension, + Host(host): Host, + Query(query): Query, + uri: Uri, +) -> impl IntoResponse { + let user = None; // TODO authentication + let path = url::Url::parse(&format!("https://{}/", host)) + .unwrap() + .join(uri.path()) + .unwrap(); + + match get_post_from_database(&db, path.as_str(), query.after, &user).await { + Ok((post, cursor)) => { + let (blogname, channels) = tokio::join!( + db.get_setting::(&host) + .map(Result::unwrap_or_default), + + db.get_channels(&host).map(|i| i.unwrap_or_default()) + ); + // Render the homepage + ( + StatusCode::OK, + [( + axum::http::header::CONTENT_TYPE, + r#"text/html; charset="utf-8""#, + )], + Template { + title: blogname.as_ref(), + blog_name: blogname.as_ref(), + feeds: channels, + user, + content: match post.pointer("/type/0").and_then(|i| i.as_str()) { + Some("h-entry") => Entry { post: &post }.to_string(), + Some("h-feed") => Feed { feed: &post, cursor: cursor.as_deref() }.to_string(), + Some("h-card") => VCard { card: &post }.to_string(), + unknown => { + unimplemented!("Template for MF2-JSON type {:?}", unknown) + } + }, + } + .to_string(), + ) + } + Err(err) => { + let (blogname, channels) = tokio::join!( + db.get_setting::(&host) + .map(Result::unwrap_or_default), + + db.get_channels(&host).map(|i| i.unwrap_or_default()) + ); + ( + err.code(), + [( + axum::http::header::CONTENT_TYPE, + r#"text/html; charset="utf-8""#, + )], + Template { + title: blogname.as_ref(), + blog_name: blogname.as_ref(), + feeds: channels, + user, + content: ErrorPage { + code: err.code(), + msg: Some(err.msg().to_owned()), + } + .to_string(), + } + .to_string(), + ) + } + } +} diff --git a/src/frontend/onboarding.rs b/src/frontend/onboarding.rs new file mode 100644 index 0000000..e44e866 --- /dev/null +++ b/src/frontend/onboarding.rs @@ -0,0 +1,181 @@ +use std::sync::Arc; + +use crate::database::{settings, Storage}; +use axum::{ + extract::{Extension, Host}, + http::StatusCode, + response::{Html, IntoResponse}, + Json, +}; +use kittybox_frontend_renderer::{ErrorPage, OnboardingPage, Template}; +use serde::Deserialize; +use tokio::{task::JoinSet, sync::Mutex}; +use tracing::{debug, error}; + +use super::FrontendError; + +pub async fn get() -> Html { + Html( + Template { + title: "Kittybox - Onboarding", + blog_name: "Kittybox", + feeds: vec![], + user: None, + content: OnboardingPage {}.to_string(), + } + .to_string(), + ) +} + +#[derive(Deserialize, Debug)] +struct OnboardingFeed { + slug: String, + name: String, +} + +#[derive(Deserialize, Debug)] +pub struct OnboardingData { + user: serde_json::Value, + first_post: serde_json::Value, + #[serde(default = "OnboardingData::default_blog_name")] + blog_name: String, + feeds: Vec, +} + +impl OnboardingData { + fn default_blog_name() -> String { + "Kitty Box!".to_owned() + } +} + +#[tracing::instrument(skip(db, http))] +async fn onboard( + db: D, + user_uid: url::Url, + data: OnboardingData, + http: reqwest::Client, + jobset: Arc>>, +) -> Result<(), FrontendError> { + // Create a user to pass to the backend + // At this point the site belongs to nobody, so it is safe to do + tracing::debug!("Creating user..."); + let user = kittybox_indieauth::TokenData { + me: user_uid.clone(), + client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), + scope: kittybox_indieauth::Scopes::new(vec![kittybox_indieauth::Scope::Create]), + iat: None, exp: None + }; + tracing::debug!("User data: {:?}", user); + + if data.user["type"][0] != "h-card" || data.first_post["type"][0] != "h-entry" { + return Err(FrontendError::with_code( + StatusCode::BAD_REQUEST, + "user and first_post should be an h-card and an h-entry", + )); + } + + tracing::debug!("Setting settings..."); + let user_domain = format!( + "{}{}", + user.me.host_str().unwrap(), + user.me.port() + .map(|port| format!(":{}", port)) + .unwrap_or_default() + ); + db.set_setting::(&user_domain, data.blog_name.to_owned()) + .await + .map_err(FrontendError::from)?; + + db.set_setting::(&user_domain, false) + .await + .map_err(FrontendError::from)?; + + let (_, hcard) = { + let mut hcard = data.user; + hcard["properties"]["uid"] = serde_json::json!([&user_uid]); + crate::micropub::normalize_mf2(hcard, &user) + }; + db.put_post(&hcard, user_domain.as_str()) + .await + .map_err(FrontendError::from)?; + + debug!("Creating feeds..."); + for feed in data.feeds { + if feed.name.is_empty() || feed.slug.is_empty() { + continue; + }; + debug!("Creating feed {} with slug {}", &feed.name, &feed.slug); + let (_, feed) = crate::micropub::normalize_mf2( + serde_json::json!({ + "type": ["h-feed"], + "properties": {"name": [feed.name], "mp-slug": [feed.slug]} + }), + &user, + ); + + db.put_post(&feed, user_uid.as_str()) + .await + .map_err(FrontendError::from)?; + } + let (uid, post) = crate::micropub::normalize_mf2(data.first_post, &user); + tracing::debug!("Posting first post {}...", uid); + crate::micropub::_post(&user, uid, post, db, http, jobset) + .await + .map_err(|e| FrontendError { + msg: "Error while posting the first post".to_string(), + source: Some(Box::new(e)), + code: StatusCode::INTERNAL_SERVER_ERROR, + })?; + + Ok(()) +} + +pub async fn post( + Extension(db): Extension, + Host(host): Host, + Extension(http): Extension, + Extension(jobset): Extension>>>, + Json(data): Json, +) -> axum::response::Response { + let user_uid = format!("https://{}/", host.as_str()); + + if db.post_exists(&user_uid).await.unwrap() { + IntoResponse::into_response((StatusCode::FOUND, [("Location", "/")])) + } else { + match onboard(db, user_uid.parse().unwrap(), data, http, jobset).await { + Ok(()) => IntoResponse::into_response((StatusCode::FOUND, [("Location", "/")])), + Err(err) => { + error!("Onboarding error: {}", err); + IntoResponse::into_response(( + err.code(), + Html( + Template { + title: "Kittybox - Onboarding", + blog_name: "Kittybox", + feeds: vec![], + user: None, + content: ErrorPage { + code: err.code(), + msg: Some(err.msg().to_string()), + } + .to_string(), + } + .to_string(), + ), + )) + } + } + } +} + +pub fn router( + database: S, + http: reqwest::Client, + jobset: Arc>>, +) -> axum::routing::MethodRouter { + axum::routing::get(get) + .post(post::) + .layer::<_, _, std::convert::Infallible>(axum::Extension(database)) + .layer::<_, _, std::convert::Infallible>(axum::Extension(http)) + .layer(axum::Extension(jobset)) +} diff --git a/src/indieauth/backend.rs b/src/indieauth/backend.rs new file mode 100644 index 0000000..534bcfb --- /dev/null +++ b/src/indieauth/backend.rs @@ -0,0 +1,105 @@ +use std::collections::HashMap; +use kittybox_indieauth::{ + AuthorizationRequest, TokenData +}; +pub use kittybox_util::auth::EnrolledCredential; + +type Result = std::io::Result; + +pub mod fs; +pub use fs::FileBackend; + +#[async_trait::async_trait] +pub trait AuthBackend: Clone + Send + Sync + 'static { + // Authorization code management. + /// Create a one-time OAuth2 authorization code for the passed + /// authorization request, and save it for later retrieval. + /// + /// Note for implementors: the [`AuthorizationRequest::me`] value + /// is guaranteed to be [`Some(url::Url)`][Option::Some] and can + /// be trusted to be correct and non-malicious. + async fn create_code(&self, data: AuthorizationRequest) -> Result; + /// Retreive an authorization request using the one-time + /// code. Implementations must sanitize the `code` field to + /// prevent exploits, and must check if the code should still be + /// valid at this point in time (validity interval is left up to + /// the implementation, but is recommended to be no more than 10 + /// minutes). + async fn get_code(&self, code: &str) -> Result>; + // Token management. + async fn create_token(&self, data: TokenData) -> Result; + async fn get_token(&self, website: &url::Url, token: &str) -> Result>; + async fn list_tokens(&self, website: &url::Url) -> Result>; + async fn revoke_token(&self, website: &url::Url, token: &str) -> Result<()>; + // Refresh token management. + async fn create_refresh_token(&self, data: TokenData) -> Result; + async fn get_refresh_token(&self, website: &url::Url, token: &str) -> Result>; + async fn list_refresh_tokens(&self, website: &url::Url) -> Result>; + async fn revoke_refresh_token(&self, website: &url::Url, token: &str) -> Result<()>; + // Password management. + /// Verify a password. + #[must_use] + async fn verify_password(&self, website: &url::Url, password: String) -> Result; + /// Enroll a password credential for a user. Only one password + /// credential must exist for a given user. + async fn enroll_password(&self, website: &url::Url, password: String) -> Result<()>; + /// List currently enrolled credential types for a given user. + async fn list_user_credential_types(&self, website: &url::Url) -> Result>; + // WebAuthn credential management. + #[cfg(feature = "webauthn")] + /// Enroll a WebAuthn authenticator public key for this user. + /// Multiple public keys may be saved for one user, corresponding + /// to different authenticators used by them. + /// + /// This function can also be used to overwrite a passkey with an + /// updated version after using + /// [webauthn::prelude::Passkey::update_credential()]. + async fn enroll_webauthn(&self, website: &url::Url, credential: webauthn::prelude::Passkey) -> Result<()>; + #[cfg(feature = "webauthn")] + /// List currently enrolled WebAuthn authenticators for a given user. + async fn list_webauthn_pubkeys(&self, website: &url::Url) -> Result>; + #[cfg(feature = "webauthn")] + /// Persist registration challenge state for a little while so it + /// can be used later. + /// + /// Challenges saved in this manner MUST expire after a little + /// while. 10 minutes is recommended. + async fn persist_registration_challenge( + &self, + website: &url::Url, + state: webauthn::prelude::PasskeyRegistration + ) -> Result; + #[cfg(feature = "webauthn")] + /// Retrieve a persisted registration challenge. + /// + /// The challenge should be deleted after retrieval. + async fn retrieve_registration_challenge( + &self, + website: &url::Url, + challenge_id: &str + ) -> Result; + #[cfg(feature = "webauthn")] + /// Persist authentication challenge state for a little while so + /// it can be used later. + /// + /// Challenges saved in this manner MUST expire after a little + /// while. 10 minutes is recommended. + /// + /// To support multiple authentication options, this can return an + /// opaque token that should be set as a cookie. + async fn persist_authentication_challenge( + &self, + website: &url::Url, + state: webauthn::prelude::PasskeyAuthentication + ) -> Result; + #[cfg(feature = "webauthn")] + /// Retrieve a persisted authentication challenge. + /// + /// The challenge should be deleted after retrieval. + async fn retrieve_authentication_challenge( + &self, + website: &url::Url, + challenge_id: &str + ) -> Result; + +} diff --git a/src/indieauth/backend/fs.rs b/src/indieauth/backend/fs.rs new file mode 100644 index 0000000..600e901 --- /dev/null +++ b/src/indieauth/backend/fs.rs @@ -0,0 +1,420 @@ +use std::{path::PathBuf, collections::HashMap, borrow::Cow, time::{SystemTime, Duration}}; + +use super::{AuthBackend, Result, EnrolledCredential}; +use async_trait::async_trait; +use kittybox_indieauth::{ + AuthorizationRequest, TokenData +}; +use serde::de::DeserializeOwned; +use tokio::{task::spawn_blocking, io::AsyncReadExt}; +#[cfg(feature = "webauthn")] +use webauthn::prelude::{Passkey, PasskeyRegistration, PasskeyAuthentication}; + +const CODE_LENGTH: usize = 16; +const TOKEN_LENGTH: usize = 128; +const CODE_DURATION: std::time::Duration = std::time::Duration::from_secs(600); + +#[derive(Clone, Debug)] +pub struct FileBackend { + path: PathBuf, +} + +impl FileBackend { + pub fn new>(path: T) -> Self { + Self { + path: path.into() + } + } + + /// Sanitize a filename, leaving only alphanumeric characters. + /// + /// Doesn't allocate a new string unless non-alphanumeric + /// characters are encountered. + fn sanitize_for_path(filename: &'_ str) -> Cow<'_, str> { + if filename.chars().all(char::is_alphanumeric) { + Cow::Borrowed(filename) + } else { + let mut s = String::with_capacity(filename.len()); + + filename.chars() + .filter(|c| c.is_alphanumeric()) + .for_each(|c| s.push(c)); + + Cow::Owned(s) + } + } + + #[inline] + async fn serialize_to_file>>( + &self, + dir: &str, + basename: B, + length: usize, + data: T + ) -> Result { + let basename = basename.into(); + let has_ext = basename.is_some(); + let (filename, mut file) = kittybox_util::fs::mktemp( + self.path.join(dir), basename, length + ) + .await + .map(|(name, file)| (name, file.try_into_std().unwrap()))?; + + spawn_blocking(move || serde_json::to_writer(&mut file, &data)) + .await + .unwrap_or_else(|e| panic!( + "Panic while serializing {}: {}", + std::any::type_name::(), + e + )) + .map(move |_| { + (if has_ext { + filename + .extension() + + } else { + filename + .file_name() + }) + .unwrap() + .to_str() + .unwrap() + .to_owned() + }) + .map_err(|err| err.into()) + } + + #[inline] + async fn deserialize_from_file<'filename, 'this: 'filename, T, B>( + &'this self, + dir: &'filename str, + basename: B, + filename: &'filename str, + ) -> Result> + where + T: serde::de::DeserializeOwned + Send, + B: Into> + { + let basename = basename.into(); + let path = self.path + .join(dir) + .join(format!( + "{}{}{}", + basename.unwrap_or(""), + if basename.is_none() { "" } else { "." }, + FileBackend::sanitize_for_path(filename) + )); + + let data = match tokio::fs::File::open(&path).await { + Ok(mut file) => { + let mut buf = Vec::new(); + + file.read_to_end(&mut buf).await?; + + match serde_json::from_slice::<'_, T>(buf.as_slice()) { + Ok(data) => data, + Err(err) => return Err(err.into()) + } + }, + Err(err) => if err.kind() == std::io::ErrorKind::NotFound { + return Ok(None) + } else { + return Err(err) + } + }; + + let ctime = tokio::fs::metadata(&path).await?.created()?; + + Ok(Some((path, ctime, data))) + } + + #[inline] + fn url_to_dir(url: &url::Url) -> String { + let host = url.host_str().unwrap(); + let port = url.port() + .map(|port| Cow::Owned(format!(":{}", port))) + .unwrap_or(Cow::Borrowed("")); + + format!("{}{}", host, port) + } + + async fn list_files<'dir, 'this: 'dir, T: DeserializeOwned + Send>( + &'this self, + dir: &'dir str, + prefix: &'static str + ) -> Result> { + let dir = self.path.join(dir); + + let mut hashmap = HashMap::new(); + let mut readdir = match tokio::fs::read_dir(dir).await { + Ok(readdir) => readdir, + Err(err) => if err.kind() == std::io::ErrorKind::NotFound { + // empty hashmap + return Ok(hashmap); + } else { + return Err(err); + } + }; + while let Some(entry) = readdir.next_entry().await? { + // safe to unwrap; filenames are alphanumeric + let filename = entry.file_name() + .into_string() + .expect("token filenames should be alphanumeric!"); + if let Some(token) = filename.strip_prefix(&format!("{}.", prefix)) { + match tokio::fs::File::open(entry.path()).await { + Ok(mut file) => { + let mut buf = Vec::new(); + + file.read_to_end(&mut buf).await?; + + match serde_json::from_slice::<'_, T>(buf.as_slice()) { + Ok(data) => hashmap.insert(token.to_string(), data), + Err(err) => { + tracing::error!( + "Error decoding token data from file {}: {}", + entry.path().display(), err + ); + continue; + } + }; + }, + Err(err) => if err.kind() == std::io::ErrorKind::NotFound { + continue + } else { + return Err(err) + } + } + } + } + + Ok(hashmap) + } +} + +#[async_trait] +impl AuthBackend for FileBackend { + // Authorization code management. + async fn create_code(&self, data: AuthorizationRequest) -> Result { + self.serialize_to_file("codes", None, CODE_LENGTH, data).await + } + + async fn get_code(&self, code: &str) -> Result> { + match self.deserialize_from_file("codes", None, FileBackend::sanitize_for_path(code).as_ref()).await? { + Some((path, ctime, data)) => { + if let Err(err) = tokio::fs::remove_file(path).await { + tracing::error!("Failed to clean up authorization code: {}", err); + } + // Err on the safe side in case of clock drift + if ctime.elapsed().unwrap_or(Duration::ZERO) > CODE_DURATION { + Ok(None) + } else { + Ok(Some(data)) + } + }, + None => Ok(None) + } + } + + // Token management. + async fn create_token(&self, data: TokenData) -> Result { + let dir = format!("{}/tokens", FileBackend::url_to_dir(&data.me)); + self.serialize_to_file(&dir, "access", TOKEN_LENGTH, data).await + } + + async fn get_token(&self, website: &url::Url, token: &str) -> Result> { + let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); + match self.deserialize_from_file::( + &dir, "access", + FileBackend::sanitize_for_path(token).as_ref() + ).await? { + Some((path, _, token)) => { + if token.expired() { + if let Err(err) = tokio::fs::remove_file(path).await { + tracing::error!("Failed to remove expired token: {}", err); + } + Ok(None) + } else { + Ok(Some(token)) + } + }, + None => Ok(None) + } + } + + async fn list_tokens(&self, website: &url::Url) -> Result> { + let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); + self.list_files(&dir, "access").await + } + + async fn revoke_token(&self, website: &url::Url, token: &str) -> Result<()> { + match tokio::fs::remove_file( + self.path + .join(FileBackend::url_to_dir(website)) + .join("tokens") + .join(format!("access.{}", FileBackend::sanitize_for_path(token))) + ).await { + Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result + } + } + + // Refresh token management. + async fn create_refresh_token(&self, data: TokenData) -> Result { + let dir = format!("{}/tokens", FileBackend::url_to_dir(&data.me)); + self.serialize_to_file(&dir, "refresh", TOKEN_LENGTH, data).await + } + + async fn get_refresh_token(&self, website: &url::Url, token: &str) -> Result> { + let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); + match self.deserialize_from_file::( + &dir, "refresh", + FileBackend::sanitize_for_path(token).as_ref() + ).await? { + Some((path, _, token)) => { + if token.expired() { + if let Err(err) = tokio::fs::remove_file(path).await { + tracing::error!("Failed to remove expired token: {}", err); + } + Ok(None) + } else { + Ok(Some(token)) + } + }, + None => Ok(None) + } + } + + async fn list_refresh_tokens(&self, website: &url::Url) -> Result> { + let dir = format!("{}/tokens", FileBackend::url_to_dir(website)); + self.list_files(&dir, "refresh").await + } + + async fn revoke_refresh_token(&self, website: &url::Url, token: &str) -> Result<()> { + match tokio::fs::remove_file( + self.path + .join(FileBackend::url_to_dir(website)) + .join("tokens") + .join(format!("refresh.{}", FileBackend::sanitize_for_path(token))) + ).await { + Err(err) if err.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result + } + } + + // Password management. + #[tracing::instrument(skip(password))] + async fn verify_password(&self, website: &url::Url, password: String) -> Result { + use argon2::{Argon2, password_hash::{PasswordHash, PasswordVerifier}}; + + let password_filename = self.path + .join(FileBackend::url_to_dir(website)) + .join("password"); + + tracing::debug!("Reading password for {} from {}", website, password_filename.display()); + + match tokio::fs::read_to_string(password_filename).await { + Ok(password_hash) => { + let parsed_hash = { + let hash = password_hash.trim(); + #[cfg(debug_assertions)] tracing::debug!("Password hash: {}", hash); + PasswordHash::new(hash) + .expect("Password hash should be valid!") + }; + Ok(Argon2::default().verify_password(password.as_bytes(), &parsed_hash).is_ok()) + }, + Err(err) => if err.kind() == std::io::ErrorKind::NotFound { + Ok(false) + } else { + Err(err) + } + } + } + + #[tracing::instrument(skip(password))] + async fn enroll_password(&self, website: &url::Url, password: String) -> Result<()> { + use argon2::{Argon2, password_hash::{rand_core::OsRng, PasswordHasher, SaltString}}; + + let password_filename = self.path + .join(FileBackend::url_to_dir(website)) + .join("password"); + + let salt = SaltString::generate(&mut OsRng); + let argon2 = Argon2::default(); + let password_hash = argon2.hash_password(password.as_bytes(), &salt) + .expect("Hashing a password should not error out") + .to_string(); + + tracing::debug!("Enrolling password for {} at {}", website, password_filename.display()); + tokio::fs::write(password_filename, password_hash.as_bytes()).await + } + + // WebAuthn credential management. + #[cfg(feature = "webauthn")] + async fn enroll_webauthn(&self, website: &url::Url, credential: Passkey) -> Result<()> { + todo!() + } + + #[cfg(feature = "webauthn")] + async fn list_webauthn_pubkeys(&self, website: &url::Url) -> Result> { + // TODO stub! + Ok(vec![]) + } + + #[cfg(feature = "webauthn")] + async fn persist_registration_challenge( + &self, + website: &url::Url, + state: PasskeyRegistration + ) -> Result { + todo!() + } + + #[cfg(feature = "webauthn")] + async fn retrieve_registration_challenge( + &self, + website: &url::Url, + challenge_id: &str + ) -> Result { + todo!() + } + + #[cfg(feature = "webauthn")] + async fn persist_authentication_challenge( + &self, + website: &url::Url, + state: PasskeyAuthentication + ) -> Result { + todo!() + } + + #[cfg(feature = "webauthn")] + async fn retrieve_authentication_challenge( + &self, + website: &url::Url, + challenge_id: &str + ) -> Result { + todo!() + } + + async fn list_user_credential_types(&self, website: &url::Url) -> Result> { + let mut creds = vec![]; + + match tokio::fs::metadata(self.path + .join(FileBackend::url_to_dir(website)) + .join("password")) + .await + { + Ok(_) => creds.push(EnrolledCredential::Password), + Err(err) => if err.kind() != std::io::ErrorKind::NotFound { + return Err(err) + } + } + + #[cfg(feature = "webauthn")] + if !self.list_webauthn_pubkeys(website).await?.is_empty() { + creds.push(EnrolledCredential::WebAuthn); + } + + Ok(creds) + } +} diff --git a/src/indieauth/mod.rs b/src/indieauth/mod.rs new file mode 100644 index 0000000..0ad2702 --- /dev/null +++ b/src/indieauth/mod.rs @@ -0,0 +1,883 @@ +use std::marker::PhantomData; + +use tracing::error; +use serde::Deserialize; +use axum::{ + extract::{Query, Json, Host, Form}, + response::{Html, IntoResponse, Response}, + http::StatusCode, TypedHeader, headers::{Authorization, authorization::Bearer}, + Extension +}; +#[cfg_attr(not(feature = "webauthn"), allow(unused_imports))] +use axum_extra::extract::cookie::{CookieJar, Cookie}; +use crate::database::Storage; +use kittybox_indieauth::{ + Metadata, IntrospectionEndpointAuthMethod, RevocationEndpointAuthMethod, + Scope, Scopes, PKCEMethod, Error, ErrorKind, ResponseType, + AuthorizationRequest, AuthorizationResponse, + GrantType, GrantRequest, GrantResponse, Profile, + TokenIntrospectionRequest, TokenIntrospectionResponse, TokenRevocationRequest, TokenData +}; +use std::str::FromStr; +use std::ops::Deref; + +pub mod backend; +#[cfg(feature = "webauthn")] +mod webauthn; +use backend::AuthBackend; + +const ACCESS_TOKEN_VALIDITY: u64 = 7 * 24 * 60 * 60; // 7 days +const REFRESH_TOKEN_VALIDITY: u64 = ACCESS_TOKEN_VALIDITY / 7 * 60; // 60 days +/// Internal scope for accessing the token introspection endpoint. +const KITTYBOX_TOKEN_STATUS: &str = "kittybox:token_status"; + +pub(crate) struct User(pub(crate) TokenData, pub(crate) PhantomData); +impl std::fmt::Debug for User { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_tuple("User").field(&self.0).finish() + } +} +impl std::ops::Deref for User { + type Target = TokenData; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +pub enum IndieAuthResourceError { + InvalidRequest, + Unauthorized, + InvalidToken +} +impl axum::response::IntoResponse for IndieAuthResourceError { + fn into_response(self) -> axum::response::Response { + use IndieAuthResourceError::*; + + match self { + Unauthorized => ( + StatusCode::UNAUTHORIZED, + [("WWW-Authenticate", "Bearer")] + ).into_response(), + InvalidRequest => ( + StatusCode::BAD_REQUEST, + Json(&serde_json::json!({"error": "invalid_request"})) + ).into_response(), + InvalidToken => ( + StatusCode::UNAUTHORIZED, + [("WWW-Authenticate", "Bearer, error=\"invalid_token\"")], + Json(&serde_json::json!({"error": "unauthorized"})) + ).into_response() + } + } +} + +#[async_trait::async_trait] +impl axum::extract::FromRequestParts for User { + type Rejection = IndieAuthResourceError; + + async fn from_request_parts(req: &mut axum::http::request::Parts, state: &S) -> Result { + let TypedHeader(Authorization(token)) = + TypedHeader::>::from_request_parts(req, state) + .await + .map_err(|_| IndieAuthResourceError::Unauthorized)?; + + let axum::Extension(auth) = axum::Extension::::from_request_parts(req, state) + .await + .unwrap(); + + let Host(host) = Host::from_request_parts(req, state) + .await + .map_err(|_| IndieAuthResourceError::InvalidRequest)?; + + auth.get_token( + &format!("https://{host}/").parse().unwrap(), + token.token() + ) + .await + .unwrap() + .ok_or(IndieAuthResourceError::InvalidToken) + .map(|t| User(t, PhantomData)) + } +} + +pub async fn metadata( + Host(host): Host +) -> Metadata { + let issuer: url::Url = format!( + "{}://{}/", + if cfg!(debug_assertions) { + "http" + } else { + "https" + }, + host + ).parse().unwrap(); + + let indieauth: url::Url = issuer.join("/.kittybox/indieauth/").unwrap(); + Metadata { + issuer, + authorization_endpoint: indieauth.join("auth").unwrap(), + token_endpoint: indieauth.join("token").unwrap(), + introspection_endpoint: indieauth.join("token_status").unwrap(), + introspection_endpoint_auth_methods_supported: Some(vec![ + IntrospectionEndpointAuthMethod::Bearer + ]), + revocation_endpoint: Some(indieauth.join("revoke_token").unwrap()), + revocation_endpoint_auth_methods_supported: Some(vec![ + RevocationEndpointAuthMethod::None + ]), + scopes_supported: Some(vec![ + Scope::Create, + Scope::Update, + Scope::Delete, + Scope::Media, + Scope::Profile + ]), + response_types_supported: Some(vec![ResponseType::Code]), + grant_types_supported: Some(vec![GrantType::AuthorizationCode, GrantType::RefreshToken]), + service_documentation: None, + code_challenge_methods_supported: vec![PKCEMethod::S256], + authorization_response_iss_parameter_supported: Some(true), + userinfo_endpoint: Some(indieauth.join("userinfo").unwrap()), + } +} + +async fn authorization_endpoint_get( + Host(host): Host, + Query(request): Query, + Extension(db): Extension, + Extension(http): Extension, + Extension(auth): Extension +) -> Response { + let me = format!("https://{host}/").parse().unwrap(); + let h_app = { + tracing::debug!("Sending request to {} to fetch metadata", request.client_id); + match http.get(request.client_id.clone()).send().await { + Ok(response) => { + let url = response.url().clone(); + let text = response.text().await.unwrap(); + tracing::debug!("Received {} bytes in response", text.len()); + match microformats::from_html(&text, url) { + Ok(mf2) => { + if let Some(relation) = mf2.rels.items.get(&request.redirect_uri) { + if !relation.rels.iter().any(|i| i == "redirect_uri") { + return (StatusCode::BAD_REQUEST, + [("Content-Type", "text/plain")], + "The redirect_uri provided was declared as \ + something other than redirect_uri.") + .into_response() + } + } else if request.redirect_uri.origin() != request.client_id.origin() { + return (StatusCode::BAD_REQUEST, + [("Content-Type", "text/plain")], + "The redirect_uri didn't match the origin \ + and wasn't explicitly allowed. You were being tricked.") + .into_response() + } + + mf2.items.iter() + .cloned() + .find(|i| (**i).borrow().r#type.iter() + .any(|i| *i == microformats::types::Class::from_str("h-app").unwrap() + || *i == microformats::types::Class::from_str("h-x-app").unwrap())) + .map(|i| serde_json::to_value(i.borrow().deref()).unwrap()) + }, + Err(err) => { + tracing::error!("Error parsing application metadata: {}", err); + return (StatusCode::BAD_REQUEST, + [("Content-Type", "text/plain")], + "Parsing application metadata failed.").into_response() + } + } + }, + Err(err) => { + tracing::error!("Error fetching application metadata: {}", err); + return (StatusCode::INTERNAL_SERVER_ERROR, + [("Content-Type", "text/plain")], + "Fetching application metadata failed.").into_response() + } + } + }; + + tracing::debug!("Application metadata: {:#?}", h_app); + + Html(kittybox_frontend_renderer::Template { + title: "Confirm sign-in via IndieAuth", + blog_name: "Kittybox", + feeds: vec![], + user: None, + content: kittybox_frontend_renderer::AuthorizationRequestPage { + request, + credentials: auth.list_user_credential_types(&me).await.unwrap(), + user: db.get_post(me.as_str()).await.unwrap().unwrap(), + app: h_app + }.to_string(), + }.to_string()) + .into_response() +} + +#[derive(Deserialize, Debug)] +#[serde(untagged)] +enum Credential { + Password(String), + #[cfg(feature = "webauthn")] + WebAuthn(::webauthn::prelude::PublicKeyCredential) +} + +#[derive(Deserialize, Debug)] +struct AuthorizationConfirmation { + authorization_method: Credential, + request: AuthorizationRequest +} + +async fn verify_credential( + auth: &A, + website: &url::Url, + credential: Credential, + #[cfg_attr(not(feature = "webauthn"), allow(unused_variables))] + challenge_id: Option<&str> +) -> std::io::Result { + match credential { + Credential::Password(password) => auth.verify_password(website, password).await, + #[cfg(feature = "webauthn")] + Credential::WebAuthn(credential) => webauthn::verify( + auth, + website, + credential, + challenge_id.unwrap() + ).await + } +} + +#[tracing::instrument(skip(backend, confirmation))] +async fn authorization_endpoint_confirm( + Host(host): Host, + Extension(backend): Extension, + cookies: CookieJar, + Json(confirmation): Json, +) -> Response { + tracing::debug!("Received authorization confirmation from user"); + #[cfg(feature = "webauthn")] + let challenge_id = cookies.get(webauthn::CHALLENGE_ID_COOKIE) + .map(|cookie| cookie.value()); + #[cfg(not(feature = "webauthn"))] + let challenge_id = None; + + let website = format!("https://{}/", host).parse().unwrap(); + let AuthorizationConfirmation { + authorization_method: credential, + request: mut auth + } = confirmation; + match verify_credential(&backend, &website, credential, challenge_id).await { + Ok(verified) => if !verified { + error!("User failed verification, bailing out."); + return StatusCode::UNAUTHORIZED.into_response(); + }, + Err(err) => { + error!("Error while verifying credential: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + } + // Insert the correct `me` value into the request + // + // From this point, the `me` value that hits the backend is + // guaranteed to be authoritative and correct, and can be safely + // unwrapped. + auth.me = Some(website.clone()); + // Cloning these two values, because we can't destructure + // the AuthorizationRequest - we need it for the code + let state = auth.state.clone(); + let redirect_uri = auth.redirect_uri.clone(); + + let code = match backend.create_code(auth).await { + Ok(code) => code, + Err(err) => { + error!("Error creating authorization code: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + let location = { + let mut uri = redirect_uri; + uri.set_query(Some(&serde_urlencoded::to_string( + AuthorizationResponse { code, state, iss: website } + ).unwrap())); + + uri + }; + + // DO NOT SET `StatusCode::FOUND` here! `fetch()` cannot read from + // redirects, it can only follow them or choose to receive an + // opaque response instead that is completely useless + (StatusCode::NO_CONTENT, + [("Location", location.as_str())], + #[cfg(feature = "webauthn")] + cookies.remove(Cookie::named(webauthn::CHALLENGE_ID_COOKIE)) + ) + .into_response() +} + +#[tracing::instrument(skip(backend, db))] +async fn authorization_endpoint_post( + Host(host): Host, + Extension(backend): Extension, + Extension(db): Extension, + Form(grant): Form, +) -> Response { + match grant { + GrantRequest::AuthorizationCode { + code, + client_id, + redirect_uri, + code_verifier + } => { + let request: AuthorizationRequest = match backend.get_code(&code).await { + Ok(Some(request)) => request, + Ok(None) => return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("The provided authorization code is invalid.".to_string()), + error_uri: None + }.into_response(), + Err(err) => { + tracing::error!("Error retrieving auth request: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + if client_id != request.client_id { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This authorization code isn't yours.".to_string()), + error_uri: None + }.into_response() + } + if redirect_uri != request.redirect_uri { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This redirect_uri doesn't match the one the code has been sent to.".to_string()), + error_uri: None + }.into_response() + } + if !request.code_challenge.verify(code_verifier) { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("The PKCE challenge failed.".to_string()), + // are RFCs considered human-readable? 😝 + error_uri: "https://datatracker.ietf.org/doc/html/rfc7636#section-4.6".parse().ok() + }.into_response() + } + let me: url::Url = format!("https://{}/", host).parse().unwrap(); + if request.me.unwrap() != me { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This authorization endpoint does not serve this user.".to_string()), + error_uri: None + }.into_response() + } + let profile = if request.scope.as_ref() + .map(|s| s.has(&Scope::Profile)) + .unwrap_or_default() + { + match get_profile( + db, + me.as_str(), + request.scope.as_ref() + .map(|s| s.has(&Scope::Email)) + .unwrap_or_default() + ).await { + Ok(profile) => { + tracing::debug!("Retrieved profile: {:?}", profile); + profile + }, + Err(err) => { + tracing::error!("Error retrieving profile from database: {}", err); + + return StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } + } else { + None + }; + + GrantResponse::ProfileUrl { me, profile }.into_response() + }, + _ => Error { + kind: ErrorKind::InvalidGrant, + msg: Some("The provided grant_type is unusable on this endpoint.".to_string()), + error_uri: "https://indieauth.spec.indieweb.org/#redeeming-the-authorization-code".parse().ok() + }.into_response() + } +} + +#[tracing::instrument(skip(backend, db))] +async fn token_endpoint_post( + Host(host): Host, + Extension(backend): Extension, + Extension(db): Extension, + Form(grant): Form, +) -> Response { + #[inline] + fn prepare_access_token(me: url::Url, client_id: url::Url, scope: Scopes) -> TokenData { + TokenData { + me, client_id, scope, + exp: (std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + + std::time::Duration::from_secs(ACCESS_TOKEN_VALIDITY)) + .as_secs() + .into(), + iat: std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs() + .into() + } + } + + #[inline] + fn prepare_refresh_token(me: url::Url, client_id: url::Url, scope: Scopes) -> TokenData { + TokenData { + me, client_id, scope, + exp: (std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + + std::time::Duration::from_secs(REFRESH_TOKEN_VALIDITY)) + .as_secs() + .into(), + iat: std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_secs() + .into() + } + } + + let me: url::Url = format!("https://{}/", host).parse().unwrap(); + + match grant { + GrantRequest::AuthorizationCode { + code, + client_id, + redirect_uri, + code_verifier + } => { + let request: AuthorizationRequest = match backend.get_code(&code).await { + Ok(Some(request)) => request, + Ok(None) => return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("The provided authorization code is invalid.".to_string()), + error_uri: None + }.into_response(), + Err(err) => { + tracing::error!("Error retrieving auth request: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + tracing::debug!("Retrieved authorization request: {:?}", request); + + let scope = if let Some(scope) = request.scope { scope } else { + return Error { + kind: ErrorKind::InvalidScope, + msg: Some("Tokens cannot be issued if no scopes are requested.".to_string()), + error_uri: "https://indieauth.spec.indieweb.org/#access-token-response".parse().ok() + }.into_response(); + }; + if client_id != request.client_id { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This authorization code isn't yours.".to_string()), + error_uri: None + }.into_response() + } + if redirect_uri != request.redirect_uri { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This redirect_uri doesn't match the one the code has been sent to.".to_string()), + error_uri: None + }.into_response() + } + if !request.code_challenge.verify(code_verifier) { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("The PKCE challenge failed.".to_string()), + error_uri: "https://datatracker.ietf.org/doc/html/rfc7636#section-4.6".parse().ok() + }.into_response(); + } + + // Note: we can trust the `request.me` value, since we set + // it earlier before generating the authorization code + if request.me.unwrap() != me { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This authorization endpoint does not serve this user.".to_string()), + error_uri: None + }.into_response() + } + + let profile = if dbg!(scope.has(&Scope::Profile)) { + match get_profile( + db, + me.as_str(), + scope.has(&Scope::Email) + ).await { + Ok(profile) => dbg!(profile), + Err(err) => { + tracing::error!("Error retrieving profile from database: {}", err); + + return StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } + } else { + None + }; + + let access_token = match backend.create_token( + prepare_access_token(me.clone(), client_id.clone(), scope.clone()) + ).await { + Ok(token) => token, + Err(err) => { + tracing::error!("Error creating access token: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + // TODO: only create refresh token if user allows it + let refresh_token = match backend.create_refresh_token( + prepare_refresh_token(me.clone(), client_id, scope.clone()) + ).await { + Ok(token) => token, + Err(err) => { + tracing::error!("Error creating refresh token: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + GrantResponse::AccessToken { + me, + profile, + access_token, + token_type: kittybox_indieauth::TokenType::Bearer, + scope: Some(scope), + expires_in: Some(ACCESS_TOKEN_VALIDITY), + refresh_token: Some(refresh_token) + }.into_response() + }, + GrantRequest::RefreshToken { + refresh_token, + client_id, + scope + } => { + let data = match backend.get_refresh_token(&me, &refresh_token).await { + Ok(Some(token)) => token, + Ok(None) => return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This refresh token is not valid.".to_string()), + error_uri: None + }.into_response(), + Err(err) => { + tracing::error!("Error retrieving refresh token: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + }; + + if data.client_id != client_id { + return Error { + kind: ErrorKind::InvalidGrant, + msg: Some("This refresh token is not yours.".to_string()), + error_uri: None + }.into_response(); + } + + let scope = if let Some(scope) = scope { + if !data.scope.has_all(scope.as_ref()) { + return Error { + kind: ErrorKind::InvalidScope, + msg: Some("You can't request additional scopes through the refresh token grant.".to_string()), + error_uri: None + }.into_response(); + } + + scope + } else { + // Note: check skipped because of redundancy (comparing a scope list with itself) + data.scope + }; + + + let profile = if scope.has(&Scope::Profile) { + match get_profile( + db, + data.me.as_str(), + scope.has(&Scope::Email) + ).await { + Ok(profile) => profile, + Err(err) => { + tracing::error!("Error retrieving profile from database: {}", err); + + return StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } + } else { + None + }; + + let access_token = match backend.create_token( + prepare_access_token(data.me.clone(), client_id.clone(), scope.clone()) + ).await { + Ok(token) => token, + Err(err) => { + tracing::error!("Error creating access token: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + + let old_refresh_token = refresh_token; + let refresh_token = match backend.create_refresh_token( + prepare_refresh_token(data.me.clone(), client_id, scope.clone()) + ).await { + Ok(token) => token, + Err(err) => { + tracing::error!("Error creating refresh token: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + }; + if let Err(err) = backend.revoke_refresh_token(&me, &old_refresh_token).await { + tracing::error!("Error revoking refresh token: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); + } + + GrantResponse::AccessToken { + me: data.me, + profile, + access_token, + token_type: kittybox_indieauth::TokenType::Bearer, + scope: Some(scope), + expires_in: Some(ACCESS_TOKEN_VALIDITY), + refresh_token: Some(refresh_token) + }.into_response() + } + } +} + +#[tracing::instrument(skip(backend, token_request))] +async fn introspection_endpoint_post( + Host(host): Host, + TypedHeader(Authorization(auth_token)): TypedHeader>, + Extension(backend): Extension, + Form(token_request): Form, +) -> Response { + use serde_json::json; + + let me: url::Url = format!("https://{}/", host).parse().unwrap(); + + // Check authentication first + match backend.get_token(&me, auth_token.token()).await { + Ok(Some(token)) => if !token.scope.has(&Scope::custom(KITTYBOX_TOKEN_STATUS)) { + return (StatusCode::UNAUTHORIZED, Json(json!({ + "error": kittybox_indieauth::ResourceErrorKind::InsufficientScope + }))).into_response(); + }, + Ok(None) => return (StatusCode::UNAUTHORIZED, Json(json!({ + "error": kittybox_indieauth::ResourceErrorKind::InvalidToken + }))).into_response(), + Err(err) => { + tracing::error!("Error retrieving token data for introspection: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } + let response: TokenIntrospectionResponse = match backend.get_token(&me, &token_request.token).await { + Ok(maybe_data) => maybe_data.into(), + Err(err) => { + tracing::error!("Error retrieving token data: {}", err); + return StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + }; + + response.into_response() +} + +async fn revocation_endpoint_post( + Host(host): Host, + Extension(backend): Extension, + Form(revocation): Form, +) -> impl IntoResponse { + let me: url::Url = format!("https://{}/", host).parse().unwrap(); + + if let Err(err) = tokio::try_join!( + backend.revoke_token(&me, &revocation.token), + backend.revoke_refresh_token(&me, &revocation.token) + ) { + tracing::error!("Error revoking token: {}", err); + + StatusCode::INTERNAL_SERVER_ERROR + } else { + StatusCode::OK + } +} + +async fn get_profile( + db: D, + url: &str, + email: bool +) -> crate::database::Result> { + Ok(db.get_post(url).await?.map(|mut mf2| { + // Ruthlessly manually destructure the MF2 document to save memory + let name = match mf2["properties"]["name"][0].take() { + serde_json::Value::String(s) => Some(s), + _ => None + }; + let url = match mf2["properties"]["uid"][0].take() { + serde_json::Value::String(s) => s.parse().ok(), + _ => None + }; + let photo = match mf2["properties"]["photo"][0].take() { + serde_json::Value::String(s) => s.parse().ok(), + _ => None + }; + let email = if email { + match mf2["properties"]["email"][0].take() { + serde_json::Value::String(s) => Some(s), + _ => None + } + } else { + None + }; + + Profile { name, url, photo, email } + })) +} + +async fn userinfo_endpoint_get( + Host(host): Host, + TypedHeader(Authorization(auth_token)): TypedHeader>, + Extension(backend): Extension, + Extension(db): Extension +) -> Response { + use serde_json::json; + + let me: url::Url = format!("https://{}/", host).parse().unwrap(); + + match backend.get_token(&me, auth_token.token()).await { + Ok(Some(token)) => { + if token.expired() { + return (StatusCode::UNAUTHORIZED, Json(json!({ + "error": kittybox_indieauth::ResourceErrorKind::InvalidToken + }))).into_response(); + } + if !token.scope.has(&Scope::Profile) { + return (StatusCode::UNAUTHORIZED, Json(json!({ + "error": kittybox_indieauth::ResourceErrorKind::InsufficientScope + }))).into_response(); + } + + match get_profile(db, me.as_str(), token.scope.has(&Scope::Email)).await { + Ok(Some(profile)) => profile.into_response(), + Ok(None) => Json(json!({ + // We do this because ResourceErrorKind is IndieAuth errors only + "error": "invalid_request" + })).into_response(), + Err(err) => { + tracing::error!("Error retrieving profile from database: {}", err); + + StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } + }, + Ok(None) => Json(json!({ + "error": kittybox_indieauth::ResourceErrorKind::InvalidToken + })).into_response(), + Err(err) => { + tracing::error!("Error reading token: {}", err); + + StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } +} + +#[must_use] +pub fn router(backend: A, db: D, http: reqwest::Client) -> axum::Router { + use axum::routing::{Router, get, post}; + + Router::new() + .nest( + "/.kittybox/indieauth", + Router::new() + .route("/metadata", + get(metadata)) + .route( + "/auth", + get(authorization_endpoint_get::) + .post(authorization_endpoint_post::)) + .route( + "/auth/confirm", + post(authorization_endpoint_confirm::)) + .route( + "/token", + post(token_endpoint_post::)) + .route( + "/token_status", + post(introspection_endpoint_post::)) + .route( + "/revoke_token", + post(revocation_endpoint_post::)) + .route( + "/userinfo", + get(userinfo_endpoint_get::)) + + .route("/webauthn/pre_register", + get( + #[cfg(feature = "webauthn")] webauthn::webauthn_pre_register::, + #[cfg(not(feature = "webauthn"))] || std::future::ready(axum::http::StatusCode::NOT_FOUND) + ) + ) + .layer(tower_http::cors::CorsLayer::new() + .allow_methods([ + axum::http::Method::GET, + axum::http::Method::POST + ]) + .allow_origin(tower_http::cors::Any)) + .layer(Extension(backend)) + // I don't really like the fact that I have to use the whole database + // If I could, I would've designed a separate trait for getting profiles + // And made databases implement it, for example + .layer(Extension(db)) + .layer(Extension(http)) + ) + .route( + "/.well-known/oauth-authorization-server", + get(|| std::future::ready( + (StatusCode::FOUND, + [("Location", + "/.kittybox/indieauth/metadata")] + ).into_response() + )) + ) +} + +#[cfg(test)] +mod tests { + #[test] + fn test_deserialize_authorization_confirmation() { + use super::{Credential, AuthorizationConfirmation}; + + let confirmation = serde_json::from_str::(r#"{ + "request":{ + "response_type": "code", + "client_id": "https://quill.p3k.io/", + "redirect_uri": "https://quill.p3k.io/", + "state": "10101010", + "code_challenge": "awooooooooooo", + "code_challenge_method": "S256", + "scope": "create+media" + }, + "authorization_method": "swordfish" + }"#).unwrap(); + + match confirmation.authorization_method { + Credential::Password(password) => assert_eq!(password.as_str(), "swordfish"), + #[allow(unreachable_patterns)] + other => panic!("Incorrect credential: {:?}", other) + } + assert_eq!(confirmation.request.state.as_ref(), "10101010"); + } +} diff --git a/src/indieauth/webauthn.rs b/src/indieauth/webauthn.rs new file mode 100644 index 0000000..ea3ad3d --- /dev/null +++ b/src/indieauth/webauthn.rs @@ -0,0 +1,140 @@ +use axum::{ + extract::{Json, Host}, + response::{IntoResponse, Response}, + http::StatusCode, Extension, TypedHeader, headers::{authorization::Bearer, Authorization} +}; +use axum_extra::extract::cookie::{CookieJar, Cookie}; + +use super::backend::AuthBackend; +use crate::database::Storage; + +pub(crate) const CHALLENGE_ID_COOKIE: &str = "kittybox_webauthn_challenge_id"; + +macro_rules! bail { + ($msg:literal, $err:expr) => { + { + ::tracing::error!($msg, $err); + return ::axum::http::StatusCode::INTERNAL_SERVER_ERROR.into_response() + } + } +} + +pub async fn webauthn_pre_register( + Host(host): Host, + Extension(db): Extension, + Extension(auth): Extension, + cookies: CookieJar +) -> Response { + let uid = format!("https://{}/", host.clone()); + let uid_url: url::Url = uid.parse().unwrap(); + // This will not find an h-card in onboarding! + let display_name = match db.get_post(&uid).await { + Ok(hcard) => match hcard { + Some(mut hcard) => { + match hcard["properties"]["uid"][0].take() { + serde_json::Value::String(name) => name, + _ => String::default() + } + }, + None => String::default() + }, + Err(err) => bail!("Error retrieving h-card: {}", err) + }; + + let webauthn = webauthn::WebauthnBuilder::new( + &host, + &uid_url + ) + .unwrap() + .rp_name("Kittybox") + .build() + .unwrap(); + + let (challenge, state) = match webauthn.start_passkey_registration( + // Note: using a nil uuid here is fine + // Because the user corresponds to a website anyway + // We do not track multiple users + webauthn::prelude::Uuid::nil(), + &uid, + &display_name, + Some(vec![]) + ) { + Ok((challenge, state)) => (challenge, state), + Err(err) => bail!("Error generating WebAuthn registration data: {}", err) + }; + + match auth.persist_registration_challenge(&uid_url, state).await { + Ok(challenge_id) => ( + cookies.add( + Cookie::build(CHALLENGE_ID_COOKIE, challenge_id) + .secure(true) + .finish() + ), + Json(challenge) + ).into_response(), + Err(err) => bail!("Failed to persist WebAuthn challenge: {}", err) + } +} + +pub async fn webauthn_register( + Host(host): Host, + Json(credential): Json, + // TODO determine if we can use a cookie maybe? + user_credential: Option>>, + Extension(auth): Extension +) -> Response { + let uid = format!("https://{}/", host.clone()); + let uid_url: url::Url = uid.parse().unwrap(); + + let pubkeys = match auth.list_webauthn_pubkeys(&uid_url).await { + Ok(pubkeys) => pubkeys, + Err(err) => bail!("Error enumerating existing WebAuthn credentials: {}", err) + }; + + if !pubkeys.is_empty() { + if let Some(TypedHeader(Authorization(token))) = user_credential { + // TODO check validity of the credential + } else { + return StatusCode::UNAUTHORIZED.into_response() + } + } + + return StatusCode::OK.into_response() +} + +pub(crate) async fn verify( + auth: &A, + website: &url::Url, + credential: webauthn::prelude::PublicKeyCredential, + challenge_id: &str +) -> std::io::Result { + let host = website.host_str().unwrap(); + + let webauthn = webauthn::WebauthnBuilder::new( + host, + website + ) + .unwrap() + .rp_name("Kittybox") + .build() + .unwrap(); + + match webauthn.finish_passkey_authentication( + &credential, + &auth.retrieve_authentication_challenge(&website, challenge_id).await? + ) { + Err(err) => { + tracing::error!("WebAuthn error: {}", err); + Ok(false) + }, + Ok(authentication_result) => { + let counter = authentication_result.counter(); + let cred_id = authentication_result.cred_id(); + + if authentication_result.needs_update() { + todo!() + } + Ok(true) + } + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000..c1bd965 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,93 @@ +#![forbid(unsafe_code)] +#![warn(clippy::todo)] + +/// Database abstraction layer for Kittybox, allowing the CMS to work with any kind of database. +pub mod database; +pub mod frontend; +pub mod media; +pub mod micropub; +pub mod indieauth; +pub mod webmentions; + +pub mod companion { + use std::{collections::HashMap, sync::Arc}; + use axum::{ + extract::{Extension, Path}, + response::{IntoResponse, Response} + }; + + #[derive(Debug, Clone, Copy)] + struct Resource { + data: &'static [u8], + mime: &'static str + } + + impl IntoResponse for &Resource { + fn into_response(self) -> Response { + (axum::http::StatusCode::OK, + [("Content-Type", self.mime)], + self.data).into_response() + } + } + + // TODO replace with the "phf" crate someday + type ResourceTable = Arc>; + + #[tracing::instrument] + async fn map_to_static( + Path(name): Path, + Extension(resources): Extension + ) -> Response { + tracing::debug!("Searching for {} in the resource table...", name); + match resources.get(name.as_str()) { + Some(res) => res.into_response(), + None => { + #[cfg(debug_assertions)] tracing::error!("Not found"); + + (axum::http::StatusCode::NOT_FOUND, + [("Content-Type", "text/plain")], + "Not found. Sorry.".as_bytes()).into_response() + } + } + } + + #[must_use] + pub fn router() -> axum::Router { + let resources: ResourceTable = { + let mut map = HashMap::new(); + + macro_rules! register_resource { + ($map:ident, $prefix:expr, ($filename:literal, $mime:literal)) => {{ + $map.insert($filename, Resource { + data: include_bytes!(concat!($prefix, $filename)), + mime: $mime + }) + }}; + ($map:ident, $prefix:expr, ($filename:literal, $mime:literal), $( ($f:literal, $m:literal) ),+) => {{ + register_resource!($map, $prefix, ($filename, $mime)); + register_resource!($map, $prefix, $(($f, $m)),+); + }}; + } + + register_resource! { + map, + concat!(env!("OUT_DIR"), "/", "companion", "/"), + ("index.html", "text/html; charset=\"utf-8\""), + ("main.js", "text/javascript"), + ("micropub_api.js", "text/javascript"), + ("indieauth.js", "text/javascript"), + ("base64.js", "text/javascript"), + ("style.css", "text/css") + }; + + Arc::new(map) + }; + + axum::Router::new() + .route( + "/:filename", + axum::routing::get(map_to_static) + .layer(Extension(resources)) + ) + } +} diff --git a/src/main.rs b/src/main.rs new file mode 100644 index 0000000..6389489 --- /dev/null +++ b/src/main.rs @@ -0,0 +1,489 @@ +use kittybox::database::FileStorage; +use std::{env, time::Duration, sync::Arc}; +use tracing::error; + +fn init_media(auth_backend: A, blobstore_uri: &str) -> axum::Router { + match blobstore_uri.split_once(':').unwrap().0 { + "file" => { + let folder = std::path::PathBuf::from( + blobstore_uri.strip_prefix("file://").unwrap() + ); + let blobstore = kittybox::media::storage::file::FileStore::new(folder); + + kittybox::media::router::<_, _>(blobstore, auth_backend) + }, + other => unimplemented!("Unsupported backend: {other}") + } +} + +async fn compose_kittybox_with_auth( + http: reqwest::Client, + auth_backend: A, + backend_uri: &str, + blobstore_uri: &str, + job_queue_uri: &str, + jobset: &Arc>>, + cancellation_token: &tokio_util::sync::CancellationToken +) -> (axum::Router, kittybox::webmentions::SupervisedTask) +where A: kittybox::indieauth::backend::AuthBackend +{ + match backend_uri.split_once(':').unwrap().0 { + "file" => { + let database = { + let folder = backend_uri.strip_prefix("file://").unwrap(); + let path = std::path::PathBuf::from(folder); + + match kittybox::database::FileStorage::new(path).await { + Ok(db) => db, + Err(err) => { + error!("Error creating database: {:?}", err); + std::process::exit(1); + } + } + }; + + // Technically, if we don't construct the micropub router, + // we could use some wrapper that makes the database + // read-only. + // + // This would allow to exclude all code to write to the + // database and separate reader and writer processes of + // Kittybox to improve security. + let homepage: axum::routing::MethodRouter<_> = axum::routing::get( + kittybox::frontend::homepage:: + ) + .layer(axum::Extension(database.clone())); + let fallback = axum::routing::get( + kittybox::frontend::catchall:: + ) + .layer(axum::Extension(database.clone())); + + let micropub = kittybox::micropub::router( + database.clone(), + http.clone(), + auth_backend.clone(), + Arc::clone(jobset) + ); + let onboarding = kittybox::frontend::onboarding::router( + database.clone(), http.clone(), Arc::clone(jobset) + ); + + + let (webmention, task) = kittybox::webmentions::router( + kittybox::webmentions::queue::PostgresJobQueue::new(job_queue_uri).await.unwrap(), + database.clone(), + http.clone(), + cancellation_token.clone() + ); + + let router = axum::Router::new() + .route("/", homepage) + .fallback(fallback) + .route("/.kittybox/micropub", micropub) + .route("/.kittybox/onboarding", onboarding) + .nest("/.kittybox/media", init_media(auth_backend.clone(), blobstore_uri)) + .merge(kittybox::indieauth::router(auth_backend.clone(), database.clone(), http.clone())) + .merge(webmention) + .route( + "/.kittybox/health", + axum::routing::get(health_check::) + .layer(axum::Extension(database)) + ); + + (router, task) + }, + "redis" => unimplemented!("Redis backend is not supported."), + #[cfg(feature = "postgres")] + "postgres" => { + use kittybox::database::PostgresStorage; + + let database = { + match PostgresStorage::new(backend_uri).await { + Ok(db) => db, + Err(err) => { + error!("Error creating database: {:?}", err); + std::process::exit(1); + } + } + }; + + // Technically, if we don't construct the micropub router, + // we could use some wrapper that makes the database + // read-only. + // + // This would allow to exclude all code to write to the + // database and separate reader and writer processes of + // Kittybox to improve security. + let homepage: axum::routing::MethodRouter<_> = axum::routing::get( + kittybox::frontend::homepage:: + ) + .layer(axum::Extension(database.clone())); + let fallback = axum::routing::get( + kittybox::frontend::catchall:: + ) + .layer(axum::Extension(database.clone())); + + let micropub = kittybox::micropub::router( + database.clone(), + http.clone(), + auth_backend.clone(), + Arc::clone(jobset) + ); + let onboarding = kittybox::frontend::onboarding::router( + database.clone(), http.clone(), Arc::clone(jobset) + ); + + let (webmention, task) = kittybox::webmentions::router( + kittybox::webmentions::queue::PostgresJobQueue::new(job_queue_uri).await.unwrap(), + database.clone(), + http.clone(), + cancellation_token.clone() + ); + + let router = axum::Router::new() + .route("/", homepage) + .fallback(fallback) + .route("/.kittybox/micropub", micropub) + .route("/.kittybox/onboarding", onboarding) + .nest("/.kittybox/media", init_media(auth_backend.clone(), blobstore_uri)) + .merge(kittybox::indieauth::router(auth_backend.clone(), database.clone(), http.clone())) + .merge(webmention) + .route( + "/.kittybox/health", + axum::routing::get(health_check::) + .layer(axum::Extension(database)) + ); + + (router, task) + }, + other => unimplemented!("Unsupported backend: {other}") + } +} + +async fn compose_kittybox( + backend_uri: &str, + blobstore_uri: &str, + authstore_uri: &str, + job_queue_uri: &str, + jobset: &Arc>>, + cancellation_token: &tokio_util::sync::CancellationToken +) -> (axum::Router, kittybox::webmentions::SupervisedTask) { + let http: reqwest::Client = { + #[allow(unused_mut)] + let mut builder = reqwest::Client::builder() + .user_agent(concat!( + env!("CARGO_PKG_NAME"), + "/", + env!("CARGO_PKG_VERSION") + )); + if let Ok(certs) = std::env::var("KITTYBOX_CUSTOM_PKI_ROOTS") { + // TODO: add a root certificate if there's an environment variable pointing at it + for path in certs.split(':') { + let metadata = match tokio::fs::metadata(path).await { + Ok(metadata) => metadata, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => { + tracing::error!("TLS root certificate {} not found, skipping...", path); + continue; + } + Err(err) => panic!("Error loading TLS certificates: {}", err) + }; + if metadata.is_dir() { + let mut dir = tokio::fs::read_dir(path).await.unwrap(); + while let Ok(Some(file)) = dir.next_entry().await { + let pem = tokio::fs::read(file.path()).await.unwrap(); + builder = builder.add_root_certificate( + reqwest::Certificate::from_pem(&pem).unwrap() + ); + } + } else { + let pem = tokio::fs::read(path).await.unwrap(); + builder = builder.add_root_certificate( + reqwest::Certificate::from_pem(&pem).unwrap() + ); + } + } + } + + builder.build().unwrap() + }; + + let (router, task) = match authstore_uri.split_once(':').unwrap().0 { + "file" => { + let auth_backend = { + let folder = authstore_uri + .strip_prefix("file://") + .unwrap(); + kittybox::indieauth::backend::fs::FileBackend::new(folder) + }; + + compose_kittybox_with_auth(http, auth_backend, backend_uri, blobstore_uri, job_queue_uri, jobset, cancellation_token).await + } + other => unimplemented!("Unsupported backend: {other}") + }; + + let router = router + .route( + "/.kittybox/static/:path", + axum::routing::get(kittybox::frontend::statics) + ) + .route("/.kittybox/coffee", teapot_route()) + .nest("/.kittybox/micropub/client", kittybox::companion::router()) + .layer(tower_http::trace::TraceLayer::new_for_http()) + .layer(tower_http::catch_panic::CatchPanicLayer::new()); + + (router, task) +} + +fn teapot_route() -> axum::routing::MethodRouter { + axum::routing::get(|| async { + use axum::http::{header, StatusCode}; + (StatusCode::IM_A_TEAPOT, [(header::CONTENT_TYPE, "text/plain")], "Sorry, can't brew coffee yet!") + }) +} + +async fn health_check( + //axum::Extension(auth): axum::Extension, + //axum::Extension(blob): axum::Extension, + axum::Extension(data): axum::Extension, +) -> impl axum::response::IntoResponse +where + //A: kittybox::indieauth::backend::AuthBackend, + //B: kittybox::media::storage::MediaStore, + D: kittybox::database::Storage +{ + (axum::http::StatusCode::OK, std::borrow::Cow::Borrowed("OK")) +} + +#[tokio::main] +async fn main() { + use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry}; + + let tracing_registry = Registry::default() + .with(EnvFilter::from_default_env()) + .with( + #[cfg(debug_assertions)] + tracing_tree::HierarchicalLayer::new(2) + .with_bracketed_fields(true) + .with_indent_lines(true) + .with_verbose_exit(true), + #[cfg(not(debug_assertions))] + tracing_subscriber::fmt::layer().json() + .with_ansi(std::io::IsTerminal::is_terminal(&std::io::stdout().lock())) + ); + // In debug builds, also log to JSON, but to file. + #[cfg(debug_assertions)] + let tracing_registry = tracing_registry.with( + tracing_subscriber::fmt::layer() + .json() + .with_writer({ + let instant = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap(); + move || std::fs::OpenOptions::new() + .append(true) + .create(true) + .open( + format!( + "{}.log.json", + instant + .as_secs_f64() + .to_string() + .replace('.', "_") + ) + ).unwrap() + }) + ); + tracing_registry.init(); + + tracing::info!("Starting the kittybox server..."); + + let backend_uri: String = env::var("BACKEND_URI") + .unwrap_or_else(|_| { + error!("BACKEND_URI is not set, cannot find a database"); + std::process::exit(1); + }); + let blobstore_uri: String = env::var("BLOBSTORE_URI") + .unwrap_or_else(|_| { + error!("BLOBSTORE_URI is not set, can't find media store"); + std::process::exit(1); + }); + + let authstore_uri: String = env::var("AUTH_STORE_URI") + .unwrap_or_else(|_| { + error!("AUTH_STORE_URI is not set, can't find authentication store"); + std::process::exit(1); + }); + + let job_queue_uri: String = env::var("JOB_QUEUE_URI") + .unwrap_or_else(|_| { + error!("JOB_QUEUE_URI is not set, can't find job queue"); + std::process::exit(1); + }); + + let cancellation_token = tokio_util::sync::CancellationToken::new(); + let jobset = Arc::new(tokio::sync::Mutex::new(tokio::task::JoinSet::new())); + + let (router, webmentions_task) = compose_kittybox( + backend_uri.as_str(), + blobstore_uri.as_str(), + authstore_uri.as_str(), + job_queue_uri.as_str(), + &jobset, + &cancellation_token + ).await; + + let mut servers: Vec> = vec![]; + + let build_hyper = |tcp: std::net::TcpListener| { + tracing::info!("Listening on {}", tcp.local_addr().unwrap()); + // Set the socket to non-blocking so tokio can poll it + // properly -- this is the async magic! + tcp.set_nonblocking(true).unwrap(); + + hyper::server::Server::from_tcp(tcp).unwrap() + // Otherwise Chrome keeps connections open for too long + .tcp_keepalive(Some(Duration::from_secs(30 * 60))) + .serve(router.clone().into_make_service()) + }; + + let mut listenfd = listenfd::ListenFd::from_env(); + for i in 0..(listenfd.len()) { + match listenfd.take_tcp_listener(i) { + Ok(Some(tcp)) => servers.push(build_hyper(tcp)), + Ok(None) => {}, + Err(err) => tracing::error!("Error binding to socket in fd {}: {}", i, err) + } + } + // TODO this requires the `hyperlocal` crate + //#[rustfmt::skip] + /*#[cfg(unix)] { + let build_hyper_unix = |unix: std::os::unix::net::UnixListener| { + { + use std::os::linux::net::SocketAddrExt; + + let local_addr = unix.local_addr().unwrap(); + if let Some(pathname) = local_addr.as_pathname() { + tracing::info!("Listening on unix:{}", pathname.display()); + } else if let Some(name) = { + #[cfg(linux)] + local_addr.as_abstract_name(); + #[cfg(not(linux))] + None::<&[u8]> + } { + tracing::info!("Listening on unix:@{}", String::from_utf8_lossy(name)); + } else { + tracing::info!("Listening on unnamed unix socket"); + } + } + unix.set_nonblocking(true).unwrap(); + + hyper::server::Server::builder(unix) + .serve(router.clone().into_make_service()) + }; + for i in 0..(listenfd.len()) { + match listenfd.take_unix_listener(i) { + Ok(Some(unix)) => servers.push(build_hyper_unix(unix)), + Ok(None) => {}, + Err(err) => tracing::error!("Error binding to socket in fd {}: {}", i, err) + } + } + }*/ + if servers.is_empty() { + servers.push(build_hyper({ + let listen_addr = env::var("SERVE_AT") + .ok() + .unwrap_or_else(|| "[::]:8080".to_string()) + .parse::() + .unwrap_or_else(|e| { + error!("Cannot parse SERVE_AT: {}", e); + std::process::exit(1); + }); + + std::net::TcpListener::bind(listen_addr).unwrap() + })) + } + // Drop the remaining copy of the router + // to get rid of an extra reference to `jobset` + drop(router); + // Polling streams mutates them + let mut servers_futures = Box::pin(servers.into_iter() + .map( + #[cfg(not(tokio_unstable))] |server| tokio::task::spawn( + server.with_graceful_shutdown(cancellation_token.clone().cancelled_owned()) + ), + #[cfg(tokio_unstable)] |server| { + tokio::task::Builder::new() + .name(format!("Kittybox HTTP acceptor: {}", server.local_addr()).as_str()) + .spawn( + server.with_graceful_shutdown( + cancellation_token.clone().cancelled_owned() + ) + ) + .unwrap() + } + ) + .collect::>>>() + ); + + #[cfg(not(unix))] + let shutdown_signal = tokio::signal::ctrl_c(); + #[cfg(unix)] + let shutdown_signal = { + use tokio::signal::unix::{signal, SignalKind}; + + async move { + let mut interrupt = signal(SignalKind::interrupt()) + .expect("Failed to set up SIGINT handler"); + let mut terminate = signal(SignalKind::terminate()) + .expect("Failed to setup SIGTERM handler"); + + tokio::select! { + _ = terminate.recv() => {}, + _ = interrupt.recv() => {}, + } + } + }; + use futures_util::stream::StreamExt; + + let exitcode: i32 = tokio::select! { + // Poll the servers stream for errors. + // If any error out, shut down the entire operation + // + // We do this because there might not be a good way + // to recover from some errors without external help + Some(Err(e)) = servers_futures.next() => { + tracing::error!("Error in HTTP server: {}", e); + tracing::error!("Shutting down because of error."); + cancellation_token.cancel(); + + 1 + } + _ = cancellation_token.cancelled() => { + tracing::info!("Signal caught from watchdog."); + + 0 + } + _ = shutdown_signal => { + tracing::info!("Shutdown requested by signal."); + cancellation_token.cancel(); + + 0 + } + }; + + tracing::info!("Waiting for unfinished background tasks..."); + + let _ = tokio::join!( + webmentions_task, + Box::pin(futures_util::future::join_all( + servers_futures.iter_mut().collect::>() + )), + ); + let mut jobset: tokio::task::JoinSet<()> = Arc::try_unwrap(jobset) + .expect("Dangling jobset references present") + .into_inner(); + while (jobset.join_next().await).is_some() {} + tracing::info!("Shutdown complete, exiting."); + std::process::exit(exitcode); + +} diff --git a/src/media/mod.rs b/src/media/mod.rs new file mode 100644 index 0000000..71f875e --- /dev/null +++ b/src/media/mod.rs @@ -0,0 +1,141 @@ +use std::convert::TryFrom; + +use axum::{ + extract::{Extension, Host, multipart::Multipart, Path}, + response::{IntoResponse, Response}, + headers::{Header, HeaderValue, IfNoneMatch, HeaderMapExt}, + TypedHeader, +}; +use kittybox_util::error::{MicropubError, ErrorType}; +use kittybox_indieauth::Scope; +use crate::indieauth::{User, backend::AuthBackend}; + +pub mod storage; +use storage::{MediaStore, MediaStoreError, Metadata, ErrorKind}; +pub use storage::file::FileStore; + +impl From for MicropubError { + fn from(err: MediaStoreError) -> Self { + Self { + error: ErrorType::InternalServerError, + error_description: format!("{}", err) + } + } +} + +#[tracing::instrument(skip(blobstore))] +pub(crate) async fn upload( + Extension(blobstore): Extension, + user: User, + mut upload: Multipart +) -> Response { + if !user.check_scope(&Scope::Media) { + return MicropubError { + error: ErrorType::NotAuthorized, + error_description: "Interacting with the media storage requires the \"media\" scope.".to_owned() + }.into_response(); + } + let host = user.me.host().unwrap().to_string() + &user.me.port().map(|i| format!(":{}", i)).unwrap_or_default(); + let field = match upload.next_field().await { + Ok(Some(field)) => field, + Ok(None) => { + return MicropubError { + error: ErrorType::InvalidRequest, + error_description: "Send multipart/form-data with one field named file".to_owned() + }.into_response(); + }, + Err(err) => { + return MicropubError { + error: ErrorType::InternalServerError, + error_description: format!("Error while parsing multipart/form-data: {}", err) + }.into_response(); + }, + }; + let metadata: Metadata = (&field).into(); + match blobstore.write_streaming(&host, metadata, field).await { + Ok(filename) => IntoResponse::into_response(( + axum::http::StatusCode::CREATED, + [ + ("Location", user.me.join( + &format!(".kittybox/media/uploads/{}", filename) + ).unwrap().as_str()) + ] + )), + Err(err) => MicropubError::from(err).into_response() + } +} + +#[tracing::instrument(skip(blobstore))] +pub(crate) async fn serve( + Host(host): Host, + Path(path): Path, + if_none_match: Option>, + Extension(blobstore): Extension +) -> Response { + use axum::http::StatusCode; + tracing::debug!("Searching for file..."); + match blobstore.read_streaming(&host, path.as_str()).await { + Ok((metadata, stream)) => { + tracing::debug!("Metadata: {:?}", metadata); + + let etag = if let Some(etag) = metadata.etag { + let etag = format!("\"{}\"", etag).parse::().unwrap(); + + if let Some(TypedHeader(if_none_match)) = if_none_match { + tracing::debug!("If-None-Match: {:?}", if_none_match); + // If-None-Match is a negative precondition that + // returns 304 when it doesn't match because it + // only matches when file is different + if !if_none_match.precondition_passes(&etag) { + return StatusCode::NOT_MODIFIED.into_response() + } + } + + Some(etag) + } else { None }; + + let mut r = Response::builder(); + { + let headers = r.headers_mut().unwrap(); + headers.insert( + "Content-Type", + HeaderValue::from_str( + metadata.content_type + .as_deref() + .unwrap_or("application/octet-stream") + ).unwrap() + ); + if let Some(length) = metadata.length { + headers.insert( + "Content-Length", + HeaderValue::from_str(&length.to_string()).unwrap() + ); + } + if let Some(etag) = etag { + headers.typed_insert(etag); + } + } + r.body(axum::body::StreamBody::new(stream)) + .unwrap() + .into_response() + }, + Err(err) => match err.kind() { + ErrorKind::NotFound => { + IntoResponse::into_response(StatusCode::NOT_FOUND) + }, + _ => { + tracing::error!("{}", err); + IntoResponse::into_response(StatusCode::INTERNAL_SERVER_ERROR) + } + } + } +} + +#[must_use] +pub fn router(blobstore: S, auth: A) -> axum::Router { + axum::Router::new() + .route("/", axum::routing::post(upload::)) + .route("/uploads/*file", axum::routing::get(serve::)) + .layer(axum::Extension(blobstore)) + .layer(axum::Extension(auth)) +} diff --git a/src/media/storage/file.rs b/src/media/storage/file.rs new file mode 100644 index 0000000..0aaaa3b --- /dev/null +++ b/src/media/storage/file.rs @@ -0,0 +1,434 @@ +use super::{Metadata, ErrorKind, MediaStore, MediaStoreError, Result}; +use async_trait::async_trait; +use std::{path::PathBuf, fmt::Debug}; +use tokio::fs::OpenOptions; +use tokio::io::{BufReader, BufWriter, AsyncWriteExt, AsyncSeekExt}; +use futures::{StreamExt, TryStreamExt}; +use std::ops::{Bound, RangeBounds, Neg}; +use std::pin::Pin; +use sha2::Digest; +use futures::FutureExt; +use tracing::{debug, error}; + +const BUF_CAPACITY: usize = 16 * 1024; + +#[derive(Clone)] +pub struct FileStore { + base: PathBuf, +} + +impl From for MediaStoreError { + fn from(source: tokio::io::Error) -> Self { + Self { + msg: format!("file I/O error: {}", source), + kind: match source.kind() { + std::io::ErrorKind::NotFound => ErrorKind::NotFound, + _ => ErrorKind::Backend + }, + source: Some(Box::new(source)), + } + } +} + +impl FileStore { + pub fn new>(base: T) -> Self { + Self { base: base.into() } + } + + async fn mktemp(&self) -> Result<(PathBuf, BufWriter)> { + kittybox_util::fs::mktemp(&self.base, "temp", 16) + .await + .map(|(name, file)| (name, BufWriter::new(file))) + .map_err(Into::into) + } +} + +#[async_trait] +impl MediaStore for FileStore { + + #[tracing::instrument(skip(self, content))] + async fn write_streaming( + &self, + domain: &str, + mut metadata: Metadata, + mut content: T, + ) -> Result + where + T: tokio_stream::Stream> + Unpin + Send + Debug + { + let (tempfilepath, mut tempfile) = self.mktemp().await?; + debug!("Temporary file opened for storing pending upload: {}", tempfilepath.display()); + let mut hasher = sha2::Sha256::new(); + let mut length: usize = 0; + + while let Some(chunk) = content.next().await { + let chunk = chunk.map_err(|err| MediaStoreError { + kind: ErrorKind::Backend, + source: Some(Box::new(err)), + msg: "Failed to read a data chunk".to_owned() + })?; + debug!("Read {} bytes from the stream", chunk.len()); + length += chunk.len(); + let (write_result, _hasher) = tokio::join!( + { + let chunk = chunk.clone(); + let tempfile = &mut tempfile; + async move { + tempfile.write_all(&*chunk).await + } + }, + { + let chunk = chunk.clone(); + tokio::task::spawn_blocking(move || { + hasher.update(&*chunk); + + hasher + }).map(|r| r.unwrap()) + } + ); + if let Err(err) = write_result { + error!("Error while writing pending upload: {}", err); + drop(tempfile); + // this is just cleanup, nothing fails if it fails + // though temporary files might take up space on the hard drive + // We'll clean them when maintenance time comes + #[allow(unused_must_use)] + { tokio::fs::remove_file(tempfilepath).await; } + return Err(err.into()); + } + hasher = _hasher; + } + // Manually flush the buffer and drop the handle to close the file + tempfile.flush().await?; + tempfile.into_inner().sync_all().await?; + + let hash = hasher.finalize(); + debug!("Pending upload hash: {}", hex::encode(&hash)); + let filename = format!( + "{}/{}/{}/{}/{}", + hex::encode([hash[0]]), + hex::encode([hash[1]]), + hex::encode([hash[2]]), + hex::encode([hash[3]]), + hex::encode(&hash[4..32]) + ); + let domain_str = domain.to_string(); + let filepath = self.base.join(domain_str.as_str()).join(&filename); + let metafilename = filename.clone() + ".json"; + let metapath = self.base.join(domain_str.as_str()).join(&metafilename); + let metatemppath = self.base.join(domain_str.as_str()).join(metafilename + ".tmp"); + metadata.length = std::num::NonZeroUsize::new(length); + metadata.etag = Some(hex::encode(&hash)); + debug!("File path: {}, metadata: {}", filepath.display(), metapath.display()); + { + let parent = filepath.parent().unwrap(); + tokio::fs::create_dir_all(parent).await?; + } + let mut meta = OpenOptions::new() + .create_new(true) + .write(true) + .open(&metatemppath) + .await?; + meta.write_all(&serde_json::to_vec(&metadata).unwrap()).await?; + tokio::fs::rename(tempfilepath, filepath).await?; + tokio::fs::rename(metatemppath, metapath).await?; + Ok(filename) + } + + #[tracing::instrument(skip(self))] + async fn read_streaming( + &self, + domain: &str, + filename: &str, + ) -> Result<(Metadata, Pin> + Send>>)> { + debug!("Domain: {}, filename: {}", domain, filename); + let path = self.base.join(domain).join(filename); + debug!("Path: {}", path.display()); + + let file = OpenOptions::new() + .read(true) + .open(path) + .await?; + let meta = self.metadata(domain, filename).await?; + + Ok((meta, Box::pin( + tokio_util::io::ReaderStream::new( + // TODO: determine if BufReader provides benefit here + // From the logs it looks like we're reading 4KiB at a time + // Buffering file contents seems to double download speed + // How to benchmark this? + BufReader::with_capacity(BUF_CAPACITY, file) + ) + // Sprinkle some salt in form of protective log wrapping + .inspect_ok(|chunk| debug!("Read {} bytes from file", chunk.len())) + ))) + } + + #[tracing::instrument(skip(self))] + async fn metadata(&self, domain: &str, filename: &str) -> Result { + let metapath = self.base.join(domain).join(format!("{}.json", filename)); + debug!("Metadata path: {}", metapath.display()); + + let meta = serde_json::from_slice(&tokio::fs::read(metapath).await?) + .map_err(|err| MediaStoreError { + kind: ErrorKind::Json, + msg: format!("{}", err), + source: Some(Box::new(err)) + })?; + + Ok(meta) + } + + #[tracing::instrument(skip(self))] + async fn stream_range( + &self, + domain: &str, + filename: &str, + range: (Bound, Bound) + ) -> Result> + Send>>> { + let path = self.base.join(format!("{}/{}", domain, filename)); + let metapath = self.base.join(format!("{}/{}.json", domain, filename)); + debug!("Path: {}, metadata: {}", path.display(), metapath.display()); + + let mut file = OpenOptions::new() + .read(true) + .open(path) + .await?; + + let start = match range { + (Bound::Included(bound), _) => { + debug!("Seeking {} bytes forward...", bound); + file.seek(std::io::SeekFrom::Start(bound)).await? + } + (Bound::Excluded(_), _) => unreachable!(), + (Bound::Unbounded, Bound::Included(bound)) => { + // Seek to the end minus the bounded bytes + debug!("Seeking {} bytes back from the end...", bound); + file.seek(std::io::SeekFrom::End(i64::try_from(bound).unwrap().neg())).await? + }, + (Bound::Unbounded, Bound::Unbounded) => 0, + (_, Bound::Excluded(_)) => unreachable!() + }; + + let stream = Box::pin(tokio_util::io::ReaderStream::new(BufReader::with_capacity(BUF_CAPACITY, file))) + .map_ok({ + let mut bytes_read = 0usize; + let len = match range { + (_, Bound::Unbounded) => None, + (Bound::Unbounded, Bound::Included(bound)) => Some(bound), + (_, Bound::Included(bound)) => Some(bound + 1 - start), + (_, Bound::Excluded(_)) => unreachable!() + }; + move |chunk| { + debug!("Read {} bytes from file, {} in this chunk", bytes_read, chunk.len()); + bytes_read += chunk.len(); + if let Some(len) = len.map(|len| len.try_into().unwrap()) { + if bytes_read > len { + if bytes_read - len > chunk.len() { + return None + } + debug!("Truncating last {} bytes", bytes_read - len); + return Some(chunk.slice(..chunk.len() - (bytes_read - len))) + } + } + + Some(chunk) + } + }) + .try_take_while(|x| std::future::ready(Ok(x.is_some()))) + // Will never panic, because the moment the stream yields + // a None, it is considered exhausted. + .map_ok(|x| x.unwrap()); + + return Ok(Box::pin(stream)) + } + + + async fn delete(&self, domain: &str, filename: &str) -> Result<()> { + let path = self.base.join(format!("{}/{}", domain, filename)); + + Ok(tokio::fs::remove_file(path).await?) + } +} + +#[cfg(test)] +mod tests { + use super::{Metadata, FileStore, MediaStore}; + use std::ops::Bound; + use tokio::io::AsyncReadExt; + + #[tokio::test] + #[tracing_test::traced_test] + async fn test_ranges() { + let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); + let store = FileStore::new(tempdir.path()); + + let file: &[u8] = include_bytes!("./file.rs"); + let stream = tokio_stream::iter(file.chunks(100).map(|i| Ok(bytes::Bytes::copy_from_slice(i)))); + let metadata = Metadata { + filename: Some("file.rs".to_string()), + content_type: Some("text/plain".to_string()), + length: None, + etag: None, + }; + + // write through the interface + let filename = store.write_streaming( + "fireburn.ru", + metadata, stream + ).await.unwrap(); + + tracing::debug!("Writing complete."); + + // Ensure the file is there + let content = tokio::fs::read( + tempdir.path() + .join("fireburn.ru") + .join(&filename) + ).await.unwrap(); + assert_eq!(content, file); + + tracing::debug!("Reading range from the start..."); + // try to read range + let range = { + let stream = store.stream_range( + "fireburn.ru", &filename, + (Bound::Included(0), Bound::Included(299)) + ).await.unwrap(); + + let mut reader = tokio_util::io::StreamReader::new(stream); + + let mut buf = Vec::default(); + reader.read_to_end(&mut buf).await.unwrap(); + + buf + }; + + assert_eq!(range.len(), 300); + assert_eq!(range.as_slice(), &file[..=299]); + + tracing::debug!("Reading range from the middle..."); + + let range = { + let stream = store.stream_range( + "fireburn.ru", &filename, + (Bound::Included(150), Bound::Included(449)) + ).await.unwrap(); + + let mut reader = tokio_util::io::StreamReader::new(stream); + + let mut buf = Vec::default(); + reader.read_to_end(&mut buf).await.unwrap(); + + buf + }; + + assert_eq!(range.len(), 300); + assert_eq!(range.as_slice(), &file[150..=449]); + + tracing::debug!("Reading range from the end..."); + let range = { + let stream = store.stream_range( + "fireburn.ru", &filename, + // Note: the `headers` crate parses bounds in a + // non-standard way, where unbounded start actually + // means getting things from the end... + (Bound::Unbounded, Bound::Included(300)) + ).await.unwrap(); + + let mut reader = tokio_util::io::StreamReader::new(stream); + + let mut buf = Vec::default(); + reader.read_to_end(&mut buf).await.unwrap(); + + buf + }; + + assert_eq!(range.len(), 300); + assert_eq!(range.as_slice(), &file[file.len()-300..file.len()]); + + tracing::debug!("Reading the whole file..."); + // try to read range + let range = { + let stream = store.stream_range( + "fireburn.ru", &("/".to_string() + &filename), + (Bound::Unbounded, Bound::Unbounded) + ).await.unwrap(); + + let mut reader = tokio_util::io::StreamReader::new(stream); + + let mut buf = Vec::default(); + reader.read_to_end(&mut buf).await.unwrap(); + + buf + }; + + assert_eq!(range.len(), file.len()); + assert_eq!(range.as_slice(), file); + } + + + #[tokio::test] + #[tracing_test::traced_test] + async fn test_streaming_read_write() { + let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); + let store = FileStore::new(tempdir.path()); + + let file: &[u8] = include_bytes!("./file.rs"); + let stream = tokio_stream::iter(file.chunks(100).map(|i| Ok(bytes::Bytes::copy_from_slice(i)))); + let metadata = Metadata { + filename: Some("style.css".to_string()), + content_type: Some("text/css".to_string()), + length: None, + etag: None, + }; + + // write through the interface + let filename = store.write_streaming( + "fireburn.ru", + metadata, stream + ).await.unwrap(); + println!("{}, {}", filename, tempdir.path() + .join("fireburn.ru") + .join(&filename) + .display()); + let content = tokio::fs::read( + tempdir.path() + .join("fireburn.ru") + .join(&filename) + ).await.unwrap(); + assert_eq!(content, file); + + // check internal metadata format + let meta: Metadata = serde_json::from_slice(&tokio::fs::read( + tempdir.path() + .join("fireburn.ru") + .join(filename.clone() + ".json") + ).await.unwrap()).unwrap(); + assert_eq!(meta.content_type.as_deref(), Some("text/css")); + assert_eq!(meta.filename.as_deref(), Some("style.css")); + assert_eq!(meta.length.map(|i| i.get()), Some(file.len())); + assert!(meta.etag.is_some()); + + // read back the data using the interface + let (metadata, read_back) = { + let (metadata, stream) = store.read_streaming( + "fireburn.ru", + &filename + ).await.unwrap(); + let mut reader = tokio_util::io::StreamReader::new(stream); + + let mut buf = Vec::default(); + reader.read_to_end(&mut buf).await.unwrap(); + + (metadata, buf) + }; + + assert_eq!(read_back, file); + assert_eq!(metadata.content_type.as_deref(), Some("text/css")); + assert_eq!(meta.filename.as_deref(), Some("style.css")); + assert_eq!(meta.length.map(|i| i.get()), Some(file.len())); + assert!(meta.etag.is_some()); + + } +} diff --git a/src/media/storage/mod.rs b/src/media/storage/mod.rs new file mode 100644 index 0000000..020999c --- /dev/null +++ b/src/media/storage/mod.rs @@ -0,0 +1,177 @@ +use async_trait::async_trait; +use axum::extract::multipart::Field; +use tokio_stream::Stream; +use bytes::Bytes; +use serde::{Deserialize, Serialize}; +use std::ops::Bound; +use std::pin::Pin; +use std::fmt::Debug; +use std::num::NonZeroUsize; + +pub mod file; + +#[derive(Debug, Deserialize, Serialize)] +pub struct Metadata { + /// Content type of the file. If None, the content-type is considered undefined. + pub content_type: Option, + /// The original filename that was passed. + pub filename: Option, + /// The recorded length of the file. + pub length: Option, + /// The e-tag of a file. Note: it must be a strong e-tag, for example, a hash. + pub etag: Option, +} +impl From<&Field<'_>> for Metadata { + fn from(field: &Field<'_>) -> Self { + Self { + content_type: field.content_type() + .map(|i| i.to_owned()), + filename: field.file_name() + .map(|i| i.to_owned()), + length: None, + etag: None, + } + } +} + + +#[derive(Debug, Clone, Copy)] +pub enum ErrorKind { + Backend, + Permission, + Json, + NotFound, + Other, +} + +#[derive(Debug)] +pub struct MediaStoreError { + kind: ErrorKind, + source: Option>, + msg: String, +} + +impl MediaStoreError { + pub fn kind(&self) -> ErrorKind { + self.kind + } +} + +impl std::error::Error for MediaStoreError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.source + .as_ref() + .map(|i| i.as_ref() as &dyn std::error::Error) + } +} + +impl std::fmt::Display for MediaStoreError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}: {}", + match self.kind { + ErrorKind::Backend => "media storage backend error", + ErrorKind::Permission => "permission denied", + ErrorKind::Json => "failed to parse json", + ErrorKind::NotFound => "blob not found", + ErrorKind::Other => "unknown media storage error", + }, + self.msg + ) + } +} + +pub type Result = std::result::Result; + +#[async_trait] +pub trait MediaStore: 'static + Send + Sync + Clone { + async fn write_streaming( + &self, + domain: &str, + metadata: Metadata, + content: T, + ) -> Result + where + T: tokio_stream::Stream> + Unpin + Send + Debug; + + async fn read_streaming( + &self, + domain: &str, + filename: &str, + ) -> Result<(Metadata, Pin> + Send>>)>; + + async fn stream_range( + &self, + domain: &str, + filename: &str, + range: (Bound, Bound) + ) -> Result> + Send>>> { + use futures::stream::TryStreamExt; + use tracing::debug; + let (metadata, mut stream) = self.read_streaming(domain, filename).await?; + let length = metadata.length.unwrap().get(); + + use Bound::*; + let (start, end): (usize, usize) = match range { + (Unbounded, Unbounded) => return Ok(stream), + (Included(start), Unbounded) => (start.try_into().unwrap(), length - 1), + (Unbounded, Included(end)) => (length - usize::try_from(end).unwrap(), length - 1), + (Included(start), Included(end)) => (start.try_into().unwrap(), end.try_into().unwrap()), + (_, _) => unreachable!() + }; + + stream = Box::pin( + stream.map_ok({ + let mut bytes_skipped = 0usize; + let mut bytes_read = 0usize; + + move |chunk| { + debug!("Skipped {}/{} bytes, chunk len {}", bytes_skipped, start, chunk.len()); + let chunk = if bytes_skipped < start { + let need_to_skip = start - bytes_skipped; + if chunk.len() < need_to_skip { + return None + } + debug!("Skipping {} bytes", need_to_skip); + bytes_skipped += need_to_skip; + + chunk.slice(need_to_skip..) + } else { + chunk + }; + + debug!("Read {} bytes from file, {} in this chunk", bytes_read, chunk.len()); + bytes_read += chunk.len(); + + if bytes_read > length { + if bytes_read - length > chunk.len() { + return None + } + debug!("Truncating last {} bytes", bytes_read - length); + return Some(chunk.slice(..chunk.len() - (bytes_read - length))) + } + + Some(chunk) + } + }) + .try_skip_while(|x| std::future::ready(Ok(x.is_none()))) + .try_take_while(|x| std::future::ready(Ok(x.is_some()))) + .map_ok(|x| x.unwrap()) + ); + + return Ok(stream); + } + + /// Read metadata for a file. + /// + /// The default implementation uses the `read_streaming` method + /// and drops the stream containing file content. + async fn metadata(&self, domain: &str, filename: &str) -> Result { + self.read_streaming(domain, filename) + .await + .map(|(meta, stream)| meta) + } + + async fn delete(&self, domain: &str, filename: &str) -> Result<()>; +} diff --git a/src/metrics.rs b/src/metrics.rs new file mode 100644 index 0000000..e13fcb9 --- /dev/null +++ b/src/metrics.rs @@ -0,0 +1,21 @@ +#![allow(unused_imports, dead_code)] +use async_trait::async_trait; +use lazy_static::lazy_static; +use prometheus::Encoder; +use std::time::{Duration, Instant}; + +// TODO: Vendor in the Metrics struct from warp_prometheus and rework the path matching algorithm + +pub fn metrics(path_includes: Vec) -> warp::log::Log { + let metrics = warp_prometheus::Metrics::new(prometheus::default_registry(), &path_includes); + warp::log::custom(move |info| metrics.http_metrics(info)) +} + +pub fn gather() -> Vec { + let mut buffer: Vec = vec![]; + let encoder = prometheus::TextEncoder::new(); + let metric_families = prometheus::gather(); + encoder.encode(&metric_families, &mut buffer).unwrap(); + + buffer +} diff --git a/src/micropub/get.rs b/src/micropub/get.rs new file mode 100644 index 0000000..718714a --- /dev/null +++ b/src/micropub/get.rs @@ -0,0 +1,82 @@ +use crate::database::{MicropubChannel, Storage}; +use crate::indieauth::User; +use crate::ApplicationState; +use tide::prelude::{json, Deserialize}; +use tide::{Request, Response, Result}; + +#[derive(Deserialize)] +struct QueryOptions { + q: String, + url: Option, +} + +pub async fn get_handler(req: Request>) -> Result +where + Backend: Storage + Send + Sync, +{ + let user = req.ext::().unwrap(); + let backend = &req.state().storage; + let media_endpoint = &req.state().media_endpoint; + let query = req.query::().unwrap_or(QueryOptions { + q: "".to_string(), + url: None, + }); + match &*query.q { + "config" => { + let channels: Vec; + match backend.get_channels(user.me.as_str()).await { + Ok(chans) => channels = chans, + Err(err) => return Ok(err.into()) + } + Ok(Response::builder(200).body(json!({ + "q": ["source", "config", "channel"], + "channels": channels, + "media-endpoint": media_endpoint + })).build()) + }, + "channel" => { + let channels: Vec; + match backend.get_channels(user.me.as_str()).await { + Ok(chans) => channels = chans, + Err(err) => return Ok(err.into()) + } + Ok(Response::builder(200).body(json!(channels)).build()) + } + "source" => { + if user.check_scope("create") || user.check_scope("update") || user.check_scope("delete") || user.check_scope("undelete") { + if let Some(url) = query.url { + match backend.get_post(&url).await { + Ok(post) => if let Some(post) = post { + Ok(Response::builder(200).body(post).build()) + } else { + Ok(Response::builder(404).build()) + }, + Err(err) => Ok(err.into()) + } + } else { + Ok(Response::builder(400).body(json!({ + "error": "invalid_request", + "error_description": "Please provide `url`." + })).build()) + } + } else { + Ok(Response::builder(401).body(json!({ + "error": "insufficient_scope", + "error_description": "You don't have the required scopes to proceed.", + "scope": "update" + })).build()) + } + }, + // TODO: ?q=food, ?q=geo, ?q=contacts + // Depends on indexing posts + // Errors + "" => Ok(Response::builder(400).body(json!({ + "error": "invalid_request", + "error_description": "No ?q= parameter specified. Try ?q=config maybe?" + })).build()), + _ => Ok(Response::builder(400).body(json!({ + "error": "invalid_request", + "error_description": "Unsupported ?q= query. Try ?q=config and see the q array for supported values." + })).build()) + } +} diff --git a/src/micropub/mod.rs b/src/micropub/mod.rs new file mode 100644 index 0000000..02eee6e --- /dev/null +++ b/src/micropub/mod.rs @@ -0,0 +1,846 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use crate::database::{MicropubChannel, Storage, StorageError}; +use crate::indieauth::backend::AuthBackend; +use crate::indieauth::User; +use crate::micropub::util::form_to_mf2_json; +use axum::extract::{BodyStream, Query, Host}; +use axum::headers::ContentType; +use axum::response::{IntoResponse, Response}; +use axum::TypedHeader; +use axum::{http::StatusCode, Extension}; +use serde::{Deserialize, Serialize}; +use serde_json::json; +use tokio::sync::Mutex; +use tokio::task::JoinSet; +use tracing::{debug, error, info, warn}; +use kittybox_indieauth::{Scope, TokenData}; +use kittybox_util::{MicropubError, ErrorType}; + +#[derive(Serialize, Deserialize, Debug, PartialEq)] +#[serde(rename_all = "kebab-case")] +enum QueryType { + Source, + Config, + Channel, + SyndicateTo, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct MicropubQuery { + q: QueryType, + url: Option, +} + +impl From for MicropubError { + fn from(err: StorageError) -> Self { + Self { + error: match err.kind() { + crate::database::ErrorKind::NotFound => ErrorType::NotFound, + _ => ErrorType::InternalServerError, + }, + error_description: format!("Backend error: {}", err), + } + } +} + +mod util; +pub(crate) use util::normalize_mf2; + +#[derive(Debug)] +struct FetchedPostContext { + url: url::Url, + mf2: serde_json::Value, + webmention: Option, +} + +fn populate_reply_context( + mf2: &serde_json::Value, + prop: &str, + ctxs: &[FetchedPostContext], +) -> Option> { + mf2["properties"][prop].as_array().map(|array| { + array + .iter() + // TODO: This seems to be O(n^2) and I don't like it. + // Switching `ctxs` to a hashmap might speed it up to O(n) + // The key would be the URL/UID + .map(|i| ctxs + .iter() + .find(|ctx| Some(ctx.url.as_str()) == i.as_str()) + .and_then(|ctx| ctx.mf2["items"].get(0)) + .unwrap_or(i)) + .cloned() + .collect::>() + }) +} + +#[tracing::instrument(skip(db))] +async fn background_processing( + db: D, + mf2: serde_json::Value, + http: reqwest::Client, +) -> () { + // TODO: Post-processing the post (aka second write pass) + // - [x] Download rich reply contexts + // - [ ] Syndicate the post if requested, add links to the syndicated copies + // - [ ] Send WebSub notifications to the hub (if we happen to have one) + // - [x] Send webmentions + + use futures_util::StreamExt; + + let uid: &str = mf2["properties"]["uid"][0].as_str().unwrap(); + + let context_props = ["in-reply-to", "like-of", "repost-of", "bookmark-of"]; + let mut context_urls: Vec = vec![]; + for prop in &context_props { + if let Some(array) = mf2["properties"][prop].as_array() { + context_urls.extend( + array + .iter() + .filter_map(|v| v.as_str()) + .filter_map(|v| v.parse::().ok()), + ); + } + } + // TODO parse HTML in e-content and add links found here + context_urls.sort_unstable_by_key(|u| u.to_string()); + context_urls.dedup(); + + // TODO: Make a stream to fetch all these posts and convert them to MF2 + let post_contexts = { + let http = &http; + tokio_stream::iter(context_urls.into_iter()) + .then(move |url: url::Url| http.get(url).send()) + .filter_map(|response| futures::future::ready(response.ok())) + .filter(|response| futures::future::ready(response.status() == 200)) + .filter_map(|response: reqwest::Response| async move { + // 1. We need to preserve the URL + // 2. We need to get the HTML for MF2 processing + // 3. We need to get the webmention endpoint address + // All of that can be done in one go. + let url = response.url().clone(); + // TODO parse link headers + let links = response + .headers() + .get_all(hyper::http::header::LINK) + .iter() + .cloned() + .collect::>(); + let html = response.text().await; + if html.is_err() { + return None; + } + let html = html.unwrap(); + let mf2 = microformats::from_html(&html, url.clone()).unwrap(); + // TODO use first Link: header if available + let webmention: Option = mf2 + .rels + .by_rels() + .get("webmention") + .and_then(|i| i.first().cloned()); + + dbg!(Some(FetchedPostContext { + url, + mf2: serde_json::to_value(mf2).unwrap(), + webmention + })) + }) + .collect::>() + .await + }; + + let mut update = MicropubUpdate { + replace: Some(Default::default()), + ..Default::default() + }; + for prop in context_props { + if let Some(json) = populate_reply_context(&mf2, prop, &post_contexts) { + update.replace.as_mut().unwrap().insert(prop.to_owned(), json); + } + } + if !update.replace.as_ref().unwrap().is_empty() { + if let Err(err) = db.update_post(uid, update).await { + error!("Failed to update post with rich reply contexts: {}", err); + } + } + + // At this point we can start syndicating the post. + // Currently we don't really support any syndication endpoints, but still! + /*if let Some(syndicate_to) = mf2["properties"]["mp-syndicate-to"].as_array() { + let http = &http; + tokio_stream::iter(syndicate_to) + .filter_map(|i| futures::future::ready(i.as_str())) + .for_each_concurrent(3, |s: &str| async move { + #[allow(clippy::match_single_binding)] + match s { + _ => { + todo!("Syndicate to generic webmention-aware service {}", s); + } + // TODO special handling for non-webmention-aware services like the birdsite + } + }) + .await; + }*/ + + { + let http = &http; + tokio_stream::iter( + post_contexts + .into_iter() + .filter(|ctx| ctx.webmention.is_some()), + ) + .for_each_concurrent(2, |ctx| async move { + let mut map = std::collections::HashMap::new(); + map.insert("source", uid); + map.insert("target", ctx.url.as_str()); + + match http + .post(ctx.webmention.unwrap().clone()) + .form(&map) + .send() + .await + { + Ok(res) => { + if !res.status().is_success() { + warn!( + "Failed to send a webmention for {}: got HTTP {}", + ctx.url, + res.status() + ); + } else { + info!( + "Sent a webmention to {}, got HTTP {}", + ctx.url, + res.status() + ) + } + } + Err(err) => warn!("Failed to send a webmention for {}: {}", ctx.url, err), + } + }) + .await; + } +} + +// TODO actually save the post to the database and schedule post-processing +pub(crate) async fn _post( + user: &TokenData, + uid: String, + mf2: serde_json::Value, + db: D, + http: reqwest::Client, + jobset: Arc>>, +) -> Result { + // Here, we have the following guarantees: + // - The MF2-JSON document is normalized (guaranteed by normalize_mf2) + // - The MF2-JSON document contains a UID + // - The MF2-JSON document's URL list contains its UID + // - The MF2-JSON document's "content" field contains an HTML blob, if present + // - The MF2-JSON document's publishing datetime is present + // - The MF2-JSON document's target channels are set + // - The MF2-JSON document's author is set + + // Security check! Do we have an OAuth2 scope to proceed? + if !user.check_scope(&Scope::Create) { + return Err(MicropubError { + error: ErrorType::InvalidScope, + error_description: "Not enough privileges - try acquiring the \"create\" scope." + .to_owned(), + }); + } + + // Security check #2! Are we posting to our own website? + if !uid.starts_with(user.me.as_str()) + || mf2["properties"]["channel"] + .as_array() + .unwrap_or(&vec![]) + .iter() + .any(|url| !url.as_str().unwrap().starts_with(user.me.as_str())) + { + return Err(MicropubError { + error: ErrorType::Forbidden, + error_description: "You're posting to a website that's not yours.".to_owned(), + }); + } + + // Security check #3! Are we overwriting an existing document? + if db.post_exists(&uid).await? { + return Err(MicropubError { + error: ErrorType::AlreadyExists, + error_description: "UID clash was detected, operation aborted.".to_owned(), + }); + } + let user_domain = format!( + "{}{}", + user.me.host_str().unwrap(), + user.me.port() + .map(|port| format!(":{}", port)) + .unwrap_or_default() + ); + // Save the post + tracing::debug!("Saving post to database..."); + db.put_post(&mf2, &user_domain).await?; + + let mut channels = mf2["properties"]["channel"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap_or("")) + .filter(|i| !i.is_empty()); + + let default_channel = user + .me + .join(util::DEFAULT_CHANNEL_PATH) + .unwrap() + .to_string(); + let vcards_channel = user + .me + .join(util::CONTACTS_CHANNEL_PATH) + .unwrap() + .to_string(); + let food_channel = user.me.join(util::FOOD_CHANNEL_PATH).unwrap().to_string(); + let default_channels = vec![default_channel, vcards_channel, food_channel]; + + for chan in &mut channels { + debug!("Adding post {} to channel {}", uid, chan); + if db.post_exists(chan).await? { + db.add_to_feed(chan, &uid).await?; + } else if default_channels.iter().any(|i| chan == i) { + util::create_feed(&db, &uid, chan, user).await?; + } else { + warn!("Ignoring non-existent channel: {}", chan); + } + } + + let reply = + IntoResponse::into_response((StatusCode::ACCEPTED, [("Location", uid.as_str())])); + + #[cfg(not(tokio_unstable))] + jobset.lock().await.spawn(background_processing(db, mf2, http)); + #[cfg(tokio_unstable)] + jobset.lock().await.build_task() + .name(format!("Kittybox background processing for post {}", uid.as_str()).as_str()) + .spawn(background_processing(db, mf2, http)); + + Ok(reply) +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(rename_all = "snake_case")] +enum ActionType { + Delete, + Update, +} + +#[derive(Serialize, Deserialize, Debug)] +#[serde(untagged)] +pub enum MicropubPropertyDeletion { + Properties(Vec), + Values(HashMap>) +} +#[derive(Serialize, Deserialize)] +struct MicropubFormAction { + action: ActionType, + url: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct MicropubAction { + action: ActionType, + url: String, + #[serde(flatten)] + #[serde(skip_serializing_if = "Option::is_none")] + update: Option +} + +#[derive(Serialize, Deserialize, Debug, Default)] +pub struct MicropubUpdate { + #[serde(skip_serializing_if = "Option::is_none")] + pub replace: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub add: Option>>, + #[serde(skip_serializing_if = "Option::is_none")] + pub delete: Option, + +} + +impl From for MicropubAction { + fn from(a: MicropubFormAction) -> Self { + debug_assert!(matches!(a.action, ActionType::Delete)); + Self { + action: a.action, + url: a.url, + update: None + } + } +} + +#[tracing::instrument(skip(db))] +async fn post_action( + action: MicropubAction, + db: D, + user: User, +) -> Result<(), MicropubError> { + let uri = if let Ok(uri) = action.url.parse::() { + uri + } else { + return Err(MicropubError { + error: ErrorType::InvalidRequest, + error_description: "Your URL doesn't parse properly.".to_owned(), + }); + }; + + if uri.authority().unwrap() + != user + .me + .as_str() + .parse::() + .unwrap() + .authority() + .unwrap() + { + return Err(MicropubError { + error: ErrorType::Forbidden, + error_description: "Don't tamper with others' posts!".to_owned(), + }); + } + + match action.action { + ActionType::Delete => { + if !user.check_scope(&Scope::Delete) { + return Err(MicropubError { + error: ErrorType::InvalidScope, + error_description: "You need a \"delete\" scope for this.".to_owned(), + }); + } + + db.delete_post(&action.url).await? + } + ActionType::Update => { + if !user.check_scope(&Scope::Update) { + return Err(MicropubError { + error: ErrorType::InvalidScope, + error_description: "You need an \"update\" scope for this.".to_owned(), + }); + } + + db.update_post( + &action.url, + action.update.ok_or(MicropubError { + error: ErrorType::InvalidRequest, + error_description: "Update request is not set.".to_owned(), + })? + ) + .await? + } + } + + Ok(()) +} + +enum PostBody { + Action(MicropubAction), + MF2(serde_json::Value), +} + +#[tracing::instrument] +async fn dispatch_body( + mut body: BodyStream, + content_type: ContentType, +) -> Result { + let body: Vec = { + debug!("Buffering body..."); + use tokio_stream::StreamExt; + let mut buf = Vec::default(); + + while let Some(chunk) = body.next().await { + buf.extend_from_slice(&chunk.unwrap()) + } + + buf + }; + + debug!("Content-Type: {:?}", content_type); + if content_type == ContentType::json() { + if let Ok(action) = serde_json::from_slice::(&body) { + Ok(PostBody::Action(action)) + } else if let Ok(body) = serde_json::from_slice::(&body) { + // quick sanity check + if !body.is_object() || !body["type"].is_array() { + return Err(MicropubError { + error: ErrorType::InvalidRequest, + error_description: "Invalid MF2-JSON detected: `.` should be an object, `.type` should be an array of MF2 types".to_owned() + }); + } + + Ok(PostBody::MF2(body)) + } else { + Err(MicropubError { + error: ErrorType::InvalidRequest, + error_description: "Invalid JSON object passed.".to_owned(), + }) + } + } else if content_type == ContentType::form_url_encoded() { + if let Ok(body) = serde_urlencoded::from_bytes::(&body) { + Ok(PostBody::Action(body.into())) + } else if let Ok(body) = serde_urlencoded::from_bytes::>(&body) { + Ok(PostBody::MF2(form_to_mf2_json(body))) + } else { + Err(MicropubError { + error: ErrorType::InvalidRequest, + error_description: "Invalid form-encoded data. Try h=entry&content=Hello!" + .to_owned(), + }) + } + } else { + Err(MicropubError::new( + ErrorType::UnsupportedMediaType, + "This Content-Type is not recognized. Try application/json instead?", + )) + } +} + +#[tracing::instrument(skip(db, http))] +pub(crate) async fn post( + Extension(db): Extension, + Extension(http): Extension, + Extension(jobset): Extension>>>, + TypedHeader(content_type): TypedHeader, + user: User, + body: BodyStream, +) -> axum::response::Response { + match dispatch_body(body, content_type).await { + Ok(PostBody::Action(action)) => match post_action(action, db, user).await { + Ok(()) => Response::default(), + Err(err) => err.into_response(), + }, + Ok(PostBody::MF2(mf2)) => { + let (uid, mf2) = normalize_mf2(mf2, &user); + match _post(&user, uid, mf2, db, http, jobset).await { + Ok(response) => response, + Err(err) => err.into_response(), + } + } + Err(err) => err.into_response(), + } +} + +#[tracing::instrument(skip(db))] +pub(crate) async fn query( + Extension(db): Extension, + query: Option>, + Host(host): Host, + user: User, +) -> axum::response::Response { + // We handle the invalid query case manually to return a + // MicropubError instead of HTTP 422 + let query = if let Some(Query(query)) = query { + query + } else { + return MicropubError::new( + ErrorType::InvalidRequest, + "Invalid query provided. Try ?q=config to see what you can do." + ).into_response(); + }; + + if axum::http::Uri::try_from(user.me.as_str()) + .unwrap() + .authority() + .unwrap() + != &host + { + return MicropubError::new( + ErrorType::NotAuthorized, + "This website doesn't belong to you.", + ) + .into_response(); + } + + let user_domain = format!( + "{}{}", + user.me.host_str().unwrap(), + user.me.port() + .map(|port| format!(":{}", port)) + .unwrap_or_default() + ); + match query.q { + QueryType::Config => { + let channels: Vec = match db.get_channels(user.me.as_str()).await { + Ok(chans) => chans, + Err(err) => { + return MicropubError::new( + ErrorType::InternalServerError, + &format!("Error fetching channels: {}", err), + ) + .into_response() + } + }; + + axum::response::Json(json!({ + "q": [ + QueryType::Source, + QueryType::Config, + QueryType::Channel, + QueryType::SyndicateTo + ], + "channels": channels, + "_kittybox_authority": user.me.as_str(), + "syndicate-to": [], + "media-endpoint": user.me.join("/.kittybox/media").unwrap().as_str() + })) + .into_response() + } + QueryType::Source => { + match query.url { + Some(url) => { + match db.get_post(&url).await { + Ok(some) => match some { + Some(post) => axum::response::Json(&post).into_response(), + None => MicropubError::new( + ErrorType::NotFound, + "The specified MF2 object was not found in database.", + ) + .into_response(), + }, + Err(err) => MicropubError::new( + ErrorType::InternalServerError, + &format!("Backend error: {}", err), + ) + .into_response(), + } + } + None => { + // Here, one should probably attempt to query at least the main feed and collect posts + // Using a pre-made query function can't be done because it does unneeded filtering + // Don't implement for now, this is optional + MicropubError::new( + ErrorType::InvalidRequest, + "Querying for post list is not implemented yet.", + ) + .into_response() + } + } + } + QueryType::Channel => match db.get_channels(&user_domain).await { + Ok(chans) => axum::response::Json(json!({ "channels": chans })).into_response(), + Err(err) => MicropubError::new( + ErrorType::InternalServerError, + &format!("Error fetching channels: {}", err), + ) + .into_response(), + }, + QueryType::SyndicateTo => { + axum::response::Json(json!({ "syndicate-to": [] })).into_response() + } + } +} + +#[must_use] +pub fn router( + storage: S, + http: reqwest::Client, + auth: A, + jobset: Arc>> +) -> axum::routing::MethodRouter +where + S: Storage + 'static, + A: AuthBackend +{ + axum::routing::get(query::) + .post(post::) + .layer::<_, _, std::convert::Infallible>(tower_http::cors::CorsLayer::new() + .allow_methods([ + axum::http::Method::GET, + axum::http::Method::POST, + ]) + .allow_origin(tower_http::cors::Any)) + .layer::<_, _, std::convert::Infallible>(axum::Extension(storage)) + .layer::<_, _, std::convert::Infallible>(axum::Extension(http)) + .layer::<_, _, std::convert::Infallible>(axum::Extension(auth)) + .layer::<_, _, std::convert::Infallible>(axum::Extension(jobset)) +} + +#[cfg(test)] +#[allow(dead_code)] +impl MicropubQuery { + fn config() -> Self { + Self { + q: QueryType::Config, + url: None, + } + } + + fn source(url: &str) -> Self { + Self { + q: QueryType::Source, + url: Some(url.to_owned()), + } + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use crate::{database::Storage, micropub::MicropubError}; + use hyper::body::HttpBody; + use serde_json::json; + use tokio::sync::Mutex; + + use super::FetchedPostContext; + use kittybox_indieauth::{Scopes, Scope, TokenData}; + use axum::extract::Host; + + #[test] + fn test_populate_reply_context() { + let already_expanded_reply_ctx = json!({ + "type": ["h-entry"], + "properties": { + "content": ["Hello world!"] + } + }); + let mf2 = json!({ + "type": ["h-entry"], + "properties": { + "like-of": [ + "https://fireburn.ru/posts/example", + already_expanded_reply_ctx, + "https://fireburn.ru/posts/non-existent" + ] + } + }); + let test_ctx = json!({ + "type": ["h-entry"], + "properties": { + "content": ["This is a post which was reacted to."] + } + }); + let reply_contexts = vec![FetchedPostContext { + url: "https://fireburn.ru/posts/example".parse().unwrap(), + mf2: json!({ "items": [test_ctx] }), + webmention: None, + }]; + + let like_of = super::populate_reply_context(&mf2, "like-of", &reply_contexts).unwrap(); + + assert_eq!(like_of[0], test_ctx); + assert_eq!(like_of[1], already_expanded_reply_ctx); + assert_eq!(like_of[2], "https://fireburn.ru/posts/non-existent"); + } + + #[tokio::test] + async fn test_post_reject_scope() { + let db = crate::database::MemoryStorage::new(); + + let post = json!({ + "type": ["h-entry"], + "properties": { + "content": ["Hello world!"] + } + }); + let user = TokenData { + me: "https://localhost:8080/".parse().unwrap(), + client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), + scope: Scopes::new(vec![Scope::Profile]), + iat: None, exp: None + }; + let (uid, mf2) = super::normalize_mf2(post, &user); + + let err = super::_post(&user, uid, mf2, db.clone(), reqwest::Client::new(), Arc::new(Mutex::new(tokio::task::JoinSet::new()))) + .await + .unwrap_err(); + + assert_eq!(err.error, super::ErrorType::InvalidScope); + + let hashmap = db.mapping.read().await; + assert!(hashmap.is_empty()); + } + + #[tokio::test] + async fn test_post_reject_different_user() { + let db = crate::database::MemoryStorage::new(); + + let post = json!({ + "type": ["h-entry"], + "properties": { + "content": ["Hello world!"], + "uid": ["https://fireburn.ru/posts/hello"], + "url": ["https://fireburn.ru/posts/hello"] + } + }); + let user = TokenData { + me: "https://aaronparecki.com/".parse().unwrap(), + client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), + scope: Scopes::new(vec![Scope::Profile, Scope::Create, Scope::Update, Scope::Media]), + iat: None, exp: None + }; + let (uid, mf2) = super::normalize_mf2(post, &user); + + let err = super::_post(&user, uid, mf2, db.clone(), reqwest::Client::new(), Arc::new(Mutex::new(tokio::task::JoinSet::new()))) + .await + .unwrap_err(); + + assert_eq!(err.error, super::ErrorType::Forbidden); + + let hashmap = db.mapping.read().await; + assert!(hashmap.is_empty()); + } + + #[tokio::test] + async fn test_post_mf2() { + let db = crate::database::MemoryStorage::new(); + + let post = json!({ + "type": ["h-entry"], + "properties": { + "content": ["Hello world!"] + } + }); + let user = TokenData { + me: "https://localhost:8080/".parse().unwrap(), + client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), + scope: Scopes::new(vec![Scope::Profile, Scope::Create]), + iat: None, exp: None + }; + let (uid, mf2) = super::normalize_mf2(post, &user); + + let res = super::_post(&user, uid, mf2, db.clone(), reqwest::Client::new(), Arc::new(Mutex::new(tokio::task::JoinSet::new()))) + .await + .unwrap(); + + assert!(res.headers().contains_key("Location")); + let location = res.headers().get("Location").unwrap(); + assert!(db.post_exists(location.to_str().unwrap()).await.unwrap()); + assert!(db + .post_exists("https://localhost:8080/feeds/main") + .await + .unwrap()); + } + + #[tokio::test] + async fn test_query_foreign_url() { + let mut res = super::query( + axum::Extension(crate::database::MemoryStorage::new()), + Some(axum::extract::Query(super::MicropubQuery::source( + "https://aaronparecki.com/feeds/main", + ))), + Host("aaronparecki.com".to_owned()), + crate::indieauth::User::( + TokenData { + me: "https://fireburn.ru/".parse().unwrap(), + client_id: "https://kittybox.fireburn.ru/".parse().unwrap(), + scope: Scopes::new(vec![Scope::Profile, Scope::Create, Scope::Update, Scope::Media]), + iat: None, exp: None + }, std::marker::PhantomData + ) + ) + .await; + + assert_eq!(res.status(), 401); + let body = res.body_mut().data().await.unwrap().unwrap(); + let json: MicropubError = serde_json::from_slice(&body as &[u8]).unwrap(); + assert_eq!(json.error, super::ErrorType::NotAuthorized); + } +} diff --git a/src/micropub/util.rs b/src/micropub/util.rs new file mode 100644 index 0000000..940d7c3 --- /dev/null +++ b/src/micropub/util.rs @@ -0,0 +1,444 @@ +use crate::database::Storage; +use kittybox_indieauth::TokenData; +use chrono::prelude::*; +use core::iter::Iterator; +use newbase60::num_to_sxg; +use serde_json::json; +use std::convert::TryInto; + +pub(crate) const DEFAULT_CHANNEL_PATH: &str = "/feeds/main"; +const DEFAULT_CHANNEL_NAME: &str = "Main feed"; +pub(crate) const CONTACTS_CHANNEL_PATH: &str = "/feeds/vcards"; +const CONTACTS_CHANNEL_NAME: &str = "My address book"; +pub(crate) const FOOD_CHANNEL_PATH: &str = "/feeds/food"; +const FOOD_CHANNEL_NAME: &str = "My recipe book"; + +fn get_folder_from_type(post_type: &str) -> String { + (match post_type { + "h-feed" => "feeds/", + "h-card" => "vcards/", + "h-event" => "events/", + "h-food" => "food/", + _ => "posts/", + }) + .to_string() +} + +/// Reset the datetime to a proper datetime. +/// Do not attempt to recover the information. +/// Do not pass GO. Do not collect $200. +fn reset_dt(post: &mut serde_json::Value) -> DateTime { + let curtime: DateTime = Local::now(); + post["properties"]["published"] = json!([curtime.to_rfc3339()]); + chrono::DateTime::from(curtime) +} + +pub fn normalize_mf2(mut body: serde_json::Value, user: &TokenData) -> (String, serde_json::Value) { + // Normalize the MF2 object here. + let me = &user.me; + let folder = get_folder_from_type(body["type"][0].as_str().unwrap()); + let published: DateTime = + if let Some(dt) = body["properties"]["published"][0].as_str() { + // Check if the datetime is parsable. + match DateTime::parse_from_rfc3339(dt) { + Ok(dt) => dt, + Err(_) => reset_dt(&mut body), + } + } else { + // Set the datetime. + // Note: this code block duplicates functionality with the above failsafe. + // Consider refactoring it to a helper function? + reset_dt(&mut body) + }; + match body["properties"]["uid"][0].as_str() { + None => { + let uid = serde_json::Value::String( + me.join( + &(folder.clone() + + &num_to_sxg(published.timestamp_millis().try_into().unwrap())), + ) + .unwrap() + .to_string(), + ); + body["properties"]["uid"] = serde_json::Value::Array(vec![uid.clone()]); + match body["properties"]["url"].as_array_mut() { + Some(array) => array.push(uid), + None => body["properties"]["url"] = body["properties"]["uid"].clone(), + } + } + Some(uid_str) => { + let uid = uid_str.to_string(); + match body["properties"]["url"].as_array_mut() { + Some(array) => { + if !array.iter().any(|i| i.as_str().unwrap_or("") == uid) { + array.push(serde_json::Value::String(uid)) + } + } + None => body["properties"]["url"] = body["properties"]["uid"].clone(), + } + } + } + if let Some(slugs) = body["properties"]["mp-slug"].as_array() { + let new_urls = slugs + .iter() + .map(|i| i.as_str().unwrap_or("")) + .filter(|i| i != &"") + .map(|i| me.join(&((&folder).clone() + i)).unwrap().to_string()) + .collect::>(); + let urls = body["properties"]["url"].as_array_mut().unwrap(); + new_urls.iter().for_each(|i| urls.push(json!(i))); + } + let props = body["properties"].as_object_mut().unwrap(); + props.remove("mp-slug"); + + if body["properties"]["content"][0].is_string() { + // Convert the content to HTML using the `markdown` crate + body["properties"]["content"] = json!([{ + "html": markdown::to_html(body["properties"]["content"][0].as_str().unwrap()), + "value": body["properties"]["content"][0] + }]) + } + // TODO: apply this normalization to editing too + if body["properties"]["mp-channel"].is_array() { + let mut additional_channels = body["properties"]["mp-channel"].as_array().unwrap().clone(); + if let Some(array) = body["properties"]["channel"].as_array_mut() { + array.append(&mut additional_channels); + } else { + body["properties"]["channel"] = json!(additional_channels) + } + body["properties"] + .as_object_mut() + .unwrap() + .remove("mp-channel"); + } else if body["properties"]["mp-channel"].is_string() { + let chan = body["properties"]["mp-channel"] + .as_str() + .unwrap() + .to_owned(); + if let Some(array) = body["properties"]["channel"].as_array_mut() { + array.push(json!(chan)) + } else { + body["properties"]["channel"] = json!([chan]); + } + body["properties"] + .as_object_mut() + .unwrap() + .remove("mp-channel"); + } + if body["properties"]["channel"][0].as_str().is_none() { + match body["type"][0].as_str() { + Some("h-entry") => { + // Set the channel to the main channel... + // TODO find like posts and move them to separate private channel + let default_channel = me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string(); + + body["properties"]["channel"] = json!([default_channel]); + } + Some("h-card") => { + let default_channel = me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string(); + + body["properties"]["channel"] = json!([default_channel]); + } + Some("h-food") => { + let default_channel = me.join(FOOD_CHANNEL_PATH).unwrap().to_string(); + + body["properties"]["channel"] = json!([default_channel]); + } + // TODO h-event + /*"h-event" => { + let default_channel + },*/ + _ => { + body["properties"]["channel"] = json!([]); + } + } + } + body["properties"]["posted-with"] = json!([user.client_id]); + if body["properties"]["author"][0].as_str().is_none() { + body["properties"]["author"] = json!([me.as_str()]) + } + // TODO: maybe highlight #hashtags? + // Find other processing to do and insert it here + return ( + body["properties"]["uid"][0].as_str().unwrap().to_string(), + body, + ); +} + +pub(crate) fn form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value { + let mut mf2 = json!({"type": [], "properties": {}}); + for (k, v) in form { + if k == "h" { + mf2["type"] + .as_array_mut() + .unwrap() + .push(json!("h-".to_string() + &v)); + } else if k != "access_token" { + let key = k.strip_suffix("[]").unwrap_or(&k); + match mf2["properties"][key].as_array_mut() { + Some(prop) => prop.push(json!(v)), + None => mf2["properties"][key] = json!([v]), + } + } + } + if mf2["type"].as_array().unwrap().is_empty() { + mf2["type"].as_array_mut().unwrap().push(json!("h-entry")); + } + mf2 +} + +pub(crate) async fn create_feed( + storage: &impl Storage, + uid: &str, + channel: &str, + user: &TokenData, +) -> crate::database::Result<()> { + let path = url::Url::parse(channel).unwrap().path().to_string(); + + let name = match path.as_str() { + DEFAULT_CHANNEL_PATH => DEFAULT_CHANNEL_NAME, + CONTACTS_CHANNEL_PATH => CONTACTS_CHANNEL_NAME, + FOOD_CHANNEL_PATH => FOOD_CHANNEL_NAME, + _ => panic!("Tried to create an unknown default feed!"), + }; + + let (_, feed) = normalize_mf2( + json!({ + "type": ["h-feed"], + "properties": { + "name": [name], + "uid": [channel] + }, + }), + user, + ); + storage.put_post(&feed, user.me.as_str()).await?; + storage.add_to_feed(channel, uid).await +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + fn token_data() -> TokenData { + TokenData { + me: "https://fireburn.ru/".parse().unwrap(), + client_id: "https://quill.p3k.io/".parse().unwrap(), + scope: kittybox_indieauth::Scopes::new(vec![kittybox_indieauth::Scope::Create]), + exp: Some(u64::MAX), + iat: Some(0) + } + } + + #[test] + fn test_form_to_mf2() { + assert_eq!( + super::form_to_mf2_json( + serde_urlencoded::from_str("h=entry&content=something%20interesting").unwrap() + ), + json!({ + "type": ["h-entry"], + "properties": { + "content": ["something interesting"] + } + }) + ) + } + + #[test] + fn test_no_replace_uid() { + let mf2 = json!({ + "type": ["h-card"], + "properties": { + "uid": ["https://fireburn.ru/"], + "name": ["Vika Nezrimaya"], + "note": ["A crazy programmer girl who wants some hugs"] + } + }); + + let (uid, normalized) = normalize_mf2( + mf2.clone(), + &token_data(), + ); + assert_eq!( + normalized["properties"]["uid"][0], mf2["properties"]["uid"][0], + "UID was replaced" + ); + assert_eq!( + normalized["properties"]["uid"][0], uid, + "Returned post location doesn't match UID" + ); + } + + #[test] + fn test_mp_channel() { + let mf2 = json!({ + "type": ["h-entry"], + "properties": { + "uid": ["https://fireburn.ru/posts/test"], + "content": [{"html": "

Hello world!

"}], + "mp-channel": ["https://fireburn.ru/feeds/test"] + } + }); + + let (_, normalized) = normalize_mf2( + mf2.clone(), + &token_data(), + ); + + assert_eq!( + normalized["properties"]["channel"], + mf2["properties"]["mp-channel"] + ); + } + + #[test] + fn test_mp_channel_as_string() { + let mf2 = json!({ + "type": ["h-entry"], + "properties": { + "uid": ["https://fireburn.ru/posts/test"], + "content": [{"html": "

Hello world!

"}], + "mp-channel": "https://fireburn.ru/feeds/test" + } + }); + + let (_, normalized) = normalize_mf2( + mf2.clone(), + &token_data(), + ); + + assert_eq!( + normalized["properties"]["channel"][0], + mf2["properties"]["mp-channel"] + ); + } + + #[test] + fn test_normalize_mf2() { + let mf2 = json!({ + "type": ["h-entry"], + "properties": { + "content": ["This is content!"] + } + }); + + let (uid, post) = normalize_mf2( + mf2, + &token_data(), + ); + assert_eq!( + post["properties"]["published"] + .as_array() + .expect("post['published'] is undefined") + .len(), + 1, + "Post doesn't have a published time" + ); + DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap()) + .expect("Couldn't parse date from rfc3339"); + assert!( + !post["properties"]["url"] + .as_array() + .expect("post['url'] is undefined") + .is_empty(), + "Post doesn't have any URLs" + ); + assert_eq!( + post["properties"]["uid"] + .as_array() + .expect("post['uid'] is undefined") + .len(), + 1, + "Post doesn't have a single UID" + ); + assert_eq!( + post["properties"]["uid"][0], uid, + "UID of a post and its supposed location don't match" + ); + assert!( + uid.starts_with("https://fireburn.ru/posts/"), + "The post namespace is incorrect" + ); + assert_eq!( + post["properties"]["content"][0]["html"] + .as_str() + .expect("Post doesn't have a rich content object") + .trim(), + "

This is content!

", + "Parsed Markdown content doesn't match expected HTML" + ); + assert_eq!( + post["properties"]["channel"][0], "https://fireburn.ru/feeds/main", + "Post isn't posted to the main channel" + ); + assert_eq!( + post["properties"]["author"][0], "https://fireburn.ru/", + "Post author is unknown" + ); + } + + #[test] + fn test_mp_slug() { + let mf2 = json!({ + "type": ["h-entry"], + "properties": { + "content": ["This is content!"], + "mp-slug": ["hello-post"] + }, + }); + + let (_, post) = normalize_mf2( + mf2, + &token_data(), + ); + assert!( + post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap()) + .any(|i| i == "https://fireburn.ru/posts/hello-post"), + "Didn't found an URL pointing to the location expected by the mp-slug semantics" + ); + assert!( + post["properties"]["mp-slug"].as_array().is_none(), + "mp-slug wasn't deleted from the array!" + ) + } + + #[test] + fn test_normalize_feed() { + let mf2 = json!({ + "type": ["h-feed"], + "properties": { + "name": "Main feed", + "mp-slug": ["main"] + } + }); + + let (uid, post) = normalize_mf2( + mf2, + &token_data(), + ); + assert_eq!( + post["properties"]["uid"][0], uid, + "UID of a post and its supposed location don't match" + ); + assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/"); + assert!( + post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap()) + .any(|i| i == "https://fireburn.ru/feeds/main"), + "Didn't found an URL pointing to the location expected by the mp-slug semantics" + ); + assert!( + post["properties"]["mp-slug"].as_array().is_none(), + "mp-slug wasn't deleted from the array!" + ) + } +} diff --git a/src/tokenauth.rs b/src/tokenauth.rs new file mode 100644 index 0000000..244a045 --- /dev/null +++ b/src/tokenauth.rs @@ -0,0 +1,358 @@ +use serde::{Deserialize, Serialize}; +use url::Url; + +#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)] +pub struct User { + pub me: Url, + pub client_id: Url, + scope: String, +} + +#[derive(Debug, Clone, PartialEq, Copy)] +pub enum ErrorKind { + PermissionDenied, + NotAuthorized, + TokenEndpointError, + JsonParsing, + InvalidHeader, + Other, +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct TokenEndpointError { + error: String, + error_description: String, +} + +#[derive(Debug)] +pub struct IndieAuthError { + source: Option>, + kind: ErrorKind, + msg: String, +} + +impl std::error::Error for IndieAuthError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + self.source + .as_ref() + .map(|e| e.as_ref() as &dyn std::error::Error) + } +} + +impl std::fmt::Display for IndieAuthError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}: {}", + match self.kind { + ErrorKind::TokenEndpointError => "token endpoint returned an error: ", + ErrorKind::JsonParsing => "error while parsing token endpoint response: ", + ErrorKind::NotAuthorized => "token endpoint did not recognize the token: ", + ErrorKind::PermissionDenied => "token endpoint rejected the token: ", + ErrorKind::InvalidHeader => "authorization header parsing error: ", + ErrorKind::Other => "token endpoint communication error: ", + }, + self.msg + ) + } +} + +impl From for IndieAuthError { + fn from(err: serde_json::Error) -> Self { + Self { + msg: format!("{}", err), + source: Some(Box::new(err)), + kind: ErrorKind::JsonParsing, + } + } +} + +impl From for IndieAuthError { + fn from(err: reqwest::Error) -> Self { + Self { + msg: format!("{}", err), + source: Some(Box::new(err)), + kind: ErrorKind::Other, + } + } +} + +impl From for IndieAuthError { + fn from(err: axum::extract::rejection::TypedHeaderRejection) -> Self { + Self { + msg: format!("{:?}", err.reason()), + source: Some(Box::new(err)), + kind: ErrorKind::InvalidHeader, + } + } +} + +impl axum::response::IntoResponse for IndieAuthError { + fn into_response(self) -> axum::response::Response { + let status_code: StatusCode = match self.kind { + ErrorKind::PermissionDenied => StatusCode::FORBIDDEN, + ErrorKind::NotAuthorized => StatusCode::UNAUTHORIZED, + ErrorKind::TokenEndpointError => StatusCode::INTERNAL_SERVER_ERROR, + ErrorKind::JsonParsing => StatusCode::BAD_REQUEST, + ErrorKind::InvalidHeader => StatusCode::UNAUTHORIZED, + ErrorKind::Other => StatusCode::INTERNAL_SERVER_ERROR, + }; + + let body = serde_json::json!({ + "error": match self.kind { + ErrorKind::PermissionDenied => "forbidden", + ErrorKind::NotAuthorized => "unauthorized", + ErrorKind::TokenEndpointError => "token_endpoint_error", + ErrorKind::JsonParsing => "invalid_request", + ErrorKind::InvalidHeader => "unauthorized", + ErrorKind::Other => "unknown_error", + }, + "error_description": self.msg + }); + + (status_code, axum::response::Json(body)).into_response() + } +} + +impl User { + pub fn check_scope(&self, scope: &str) -> bool { + self.scopes().any(|i| i == scope) + } + pub fn scopes(&self) -> std::str::SplitAsciiWhitespace<'_> { + self.scope.split_ascii_whitespace() + } + pub fn new(me: &str, client_id: &str, scope: &str) -> Self { + Self { + me: Url::parse(me).unwrap(), + client_id: Url::parse(client_id).unwrap(), + scope: scope.to_string(), + } + } +} + +use axum::{ + extract::{Extension, FromRequest, RequestParts, TypedHeader}, + headers::{ + authorization::{Bearer, Credentials}, + Authorization, + }, + http::StatusCode, +}; + +// this newtype is required due to axum::Extension retrieving items by type +// it's based on compiler magic matching extensions by their type's hashes +#[derive(Debug, Clone)] +pub struct TokenEndpoint(pub url::Url); + +#[async_trait::async_trait] +impl FromRequest for User +where + B: Send, +{ + type Rejection = IndieAuthError; + + #[cfg_attr( + all(debug_assertions, not(test)), + allow(unreachable_code, unused_variables) + )] + async fn from_request(req: &mut RequestParts) -> Result { + // Return a fake user if we're running a debug build + // I don't wanna bother with authentication + #[cfg(all(debug_assertions, not(test)))] + return Ok(User::new( + "http://localhost:8080/", + "https://quill.p3k.io/", + "create update delete media", + )); + + let TypedHeader(Authorization(token)) = + TypedHeader::>::from_request(req) + .await + .map_err(IndieAuthError::from)?; + + let Extension(TokenEndpoint(token_endpoint)): Extension = + Extension::from_request(req).await.unwrap(); + + let Extension(http): Extension = + Extension::from_request(req).await.unwrap(); + + match http + .get(token_endpoint) + .header("Authorization", token.encode()) + .header("Accept", "application/json") + .send() + .await + { + Ok(res) => match res.status() { + StatusCode::OK => match res.json::().await { + Ok(json) => match serde_json::from_value::(json.clone()) { + Ok(user) => Ok(user), + Err(err) => { + if let Some(false) = json["active"].as_bool() { + Err(IndieAuthError { + source: None, + kind: ErrorKind::NotAuthorized, + msg: "The token is not active for this user.".to_owned(), + }) + } else { + Err(IndieAuthError::from(err)) + } + } + }, + Err(err) => Err(IndieAuthError::from(err)), + }, + StatusCode::BAD_REQUEST => match res.json::().await { + Ok(err) => { + if err.error == "unauthorized" { + Err(IndieAuthError { + source: None, + kind: ErrorKind::NotAuthorized, + msg: err.error_description, + }) + } else { + Err(IndieAuthError { + source: None, + kind: ErrorKind::TokenEndpointError, + msg: err.error_description, + }) + } + } + Err(err) => Err(IndieAuthError::from(err)), + }, + _ => Err(IndieAuthError { + source: None, + msg: format!("Token endpoint returned {}", res.status()), + kind: ErrorKind::TokenEndpointError, + }), + }, + Err(err) => Err(IndieAuthError::from(err)), + } + } +} + +#[cfg(test)] +mod tests { + use super::User; + use axum::{ + extract::FromRequest, + http::{Method, Request}, + }; + use wiremock::{MockServer, Mock, ResponseTemplate}; + use wiremock::matchers::{method, path, header}; + + #[test] + fn user_scopes_are_checkable() { + let user = User::new( + "https://fireburn.ru/", + "https://quill.p3k.io/", + "create update media", + ); + + assert!(user.check_scope("create")); + assert!(!user.check_scope("delete")); + } + + #[inline] + fn get_http_client() -> reqwest::Client { + reqwest::Client::new() + } + + fn request>>( + auth: A, + endpoint: String, + ) -> Request<()> { + let request = Request::builder().method(Method::GET); + + match auth.into() { + Some(auth) => request.header("Authorization", auth), + None => request, + } + .extension(super::TokenEndpoint(endpoint.parse().unwrap())) + .extension(get_http_client()) + .body(()) + .unwrap() + } + + #[tokio::test] + async fn test_require_token_with_token() { + let server = MockServer::start().await; + + Mock::given(path("/token")) + .and(header("Authorization", "Bearer token")) + .respond_with(ResponseTemplate::new(200) + .set_body_json(User::new( + "https://fireburn.ru/", + "https://quill.p3k.io/", + "create update media", + )) + ) + .mount(&server) + .await; + + let request = request("Bearer token", format!("{}/token", &server.uri())); + let mut parts = axum::extract::RequestParts::new(request); + let user = User::from_request(&mut parts).await.unwrap(); + + assert_eq!(user.me.as_str(), "https://fireburn.ru/") + } + + #[tokio::test] + async fn test_require_token_fake_token() { + let server = MockServer::start().await; + + Mock::given(path("/refuse_token")) + .respond_with(ResponseTemplate::new(200) + .set_body_json(serde_json::json!({"active": false})) + ) + .mount(&server) + .await; + + let request = request("Bearer token", format!("{}/refuse_token", &server.uri())); + let mut parts = axum::extract::RequestParts::new(request); + let err = User::from_request(&mut parts).await.unwrap_err(); + + assert_eq!(err.kind, super::ErrorKind::NotAuthorized) + } + + #[tokio::test] + async fn test_require_token_no_token() { + let server = MockServer::start().await; + + Mock::given(path("/should_never_be_called")) + .respond_with(ResponseTemplate::new(500)) + .expect(0) + .mount(&server) + .await; + + let request = request(None, format!("{}/should_never_be_called", &server.uri())); + let mut parts = axum::extract::RequestParts::new(request); + let err = User::from_request(&mut parts).await.unwrap_err(); + + assert_eq!(err.kind, super::ErrorKind::InvalidHeader); + } + + #[tokio::test] + async fn test_require_token_400_error_unauthorized() { + let server = MockServer::start().await; + + Mock::given(path("/refuse_token_with_400")) + .and(header("Authorization", "Bearer token")) + .respond_with(ResponseTemplate::new(400) + .set_body_json(serde_json::json!({ + "error": "unauthorized", + "error_description": "The token provided was malformed" + })) + ) + .mount(&server) + .await; + + let request = request( + "Bearer token", + format!("{}/refuse_token_with_400", &server.uri()), + ); + let mut parts = axum::extract::RequestParts::new(request); + let err = User::from_request(&mut parts).await.unwrap_err(); + + assert_eq!(err.kind, super::ErrorKind::NotAuthorized); + } +} diff --git a/src/webmentions/check.rs b/src/webmentions/check.rs new file mode 100644 index 0000000..f7322f7 --- /dev/null +++ b/src/webmentions/check.rs @@ -0,0 +1,113 @@ +use std::{cell::RefCell, rc::Rc}; +use microformats::{types::PropertyValue, html5ever::{self, tendril::TendrilSink}}; +use kittybox_util::MentionType; + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error("microformats error: {0}")] + Microformats(#[from] microformats::Error), + // #[error("json error: {0}")] + // Json(#[from] serde_json::Error), + #[error("url parse error: {0}")] + UrlParse(#[from] url::ParseError), +} + +#[tracing::instrument] +pub fn check_mention(document: impl AsRef + std::fmt::Debug, base_url: &url::Url, link: &url::Url) -> Result, Error> { + tracing::debug!("Parsing MF2 markup..."); + // First, check the document for MF2 markup + let document = microformats::from_html(document.as_ref(), base_url.clone())?; + + // Get an iterator of all items + let items_iter = document.items.iter() + .map(AsRef::as_ref) + .map(RefCell::borrow); + + for item in items_iter { + tracing::debug!("Processing item: {:?}", item); + + let props = item.properties.borrow(); + for (prop, interaction_type) in [ + ("in-reply-to", MentionType::Reply), ("like-of", MentionType::Like), + ("bookmark-of", MentionType::Bookmark), ("repost-of", MentionType::Repost) + ] { + if let Some(propvals) = props.get(prop) { + tracing::debug!("Has a u-{} property", prop); + for val in propvals { + if let PropertyValue::Url(url) = val { + if url == link { + tracing::debug!("URL matches! Webmention is valid"); + return Ok(Some((interaction_type, serde_json::to_value(&*item).unwrap()))) + } + } + } + } + } + // Process `content` + tracing::debug!("Processing e-content..."); + if let Some(PropertyValue::Fragment(content)) = props.get("content") + .map(Vec::as_slice) + .unwrap_or_default() + .first() + { + tracing::debug!("Parsing HTML data..."); + let root = html5ever::parse_document(html5ever::rcdom::RcDom::default(), Default::default()) + .from_utf8() + .one(content.html.to_owned().as_bytes()) + .document; + + // This is a trick to unwrap recursion into a loop + // + // A list of unprocessed node is made. Then, in each + // iteration, the list is "taken" and replaced with an + // empty list, which is populated with nodes for the next + // iteration of the loop. + // + // Empty list means all nodes were processed. + let mut unprocessed_nodes: Vec> = root.children.borrow().iter().cloned().collect(); + while !unprocessed_nodes.is_empty() { + // "Take" the list out of its memory slot, replace it with an empty list + let nodes = std::mem::take(&mut unprocessed_nodes); + tracing::debug!("Processing list of {} nodes", nodes.len()); + 'nodes_loop: for node in nodes.into_iter() { + // Add children nodes to the list for the next iteration + unprocessed_nodes.extend(node.children.borrow().iter().cloned()); + + if let html5ever::rcdom::NodeData::Element { ref name, ref attrs, .. } = node.data { + // If it's not `
`, skip it + if name.local != *"a" { continue; } + let mut is_mention: bool = false; + for attr in attrs.borrow().iter() { + if attr.name.local == *"rel" { + // Don't count `rel="nofollow"` links β€” a web crawler should ignore them + // and so for purposes of driving visitors they are useless + if attr.value + .as_ref() + .split([',', ' ']) + .any(|v| v == "nofollow") + { + // Skip the entire node. + continue 'nodes_loop; + } + } + // if it's not ``, skip it + if attr.name.local != *"href" { continue; } + // Be forgiving in parsing URLs, and resolve them against the base URL + if let Ok(url) = base_url.join(attr.value.as_ref()) { + if &url == link { + is_mention = true; + } + } + } + if is_mention { + return Ok(Some((MentionType::Mention, serde_json::to_value(&*item).unwrap()))); + } + } + } + } + + } + } + + Ok(None) +} diff --git a/src/webmentions/mod.rs b/src/webmentions/mod.rs new file mode 100644 index 0000000..95ea870 --- /dev/null +++ b/src/webmentions/mod.rs @@ -0,0 +1,195 @@ +use axum::{Form, response::{IntoResponse, Response}, Extension}; +use axum::http::StatusCode; +use tracing::error; + +use crate::database::{Storage, StorageError}; +use self::queue::JobQueue; +pub mod queue; + +#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize, serde::Deserialize)] +#[cfg_attr(feature = "sqlx", derive(sqlx::FromRow))] +pub struct Webmention { + source: String, + target: String, +} + +impl queue::JobItem for Webmention {} +impl queue::PostgresJobItem for Webmention { + const DATABASE_NAME: &'static str = "kittybox_webmention.incoming_webmention_queue"; + const NOTIFICATION_CHANNEL: &'static str = "incoming_webmention"; +} + +async fn accept_webmention>( + Extension(queue): Extension, + Form(webmention): Form, +) -> Response { + if let Err(err) = webmention.source.parse::() { + return (StatusCode::BAD_REQUEST, err.to_string()).into_response() + } + if let Err(err) = webmention.target.parse::() { + return (StatusCode::BAD_REQUEST, err.to_string()).into_response() + } + + match queue.put(&webmention).await { + Ok(id) => (StatusCode::ACCEPTED, [ + ("Location", format!("/.kittybox/webmention/{id}")) + ]).into_response(), + Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, [ + ("Content-Type", "text/plain") + ], err.to_string()).into_response() + } +} + +pub fn router, S: Storage + 'static>( + queue: Q, db: S, http: reqwest::Client, + cancellation_token: tokio_util::sync::CancellationToken +) -> (axum::Router, SupervisedTask) { + // Automatically spawn a background task to handle webmentions + let bgtask_handle = supervised_webmentions_task(queue.clone(), db, http, cancellation_token); + + let router = axum::Router::new() + .route("/.kittybox/webmention", + axum::routing::post(accept_webmention::) + ) + .layer(Extension(queue)); + + (router, bgtask_handle) +} + +#[derive(thiserror::Error, Debug)] +pub enum SupervisorError { + #[error("the task was explicitly cancelled")] + Cancelled +} + +pub type SupervisedTask = tokio::task::JoinHandle>; + +pub fn supervisor(mut f: F, cancellation_token: tokio_util::sync::CancellationToken) -> SupervisedTask +where + E: std::error::Error + std::fmt::Debug + Send + 'static, + A: std::future::Future> + Send + 'static, + F: FnMut() -> A + Send + 'static +{ + + let supervisor_future = async move { + loop { + // Don't spawn the task if we are already cancelled, but + // have somehow missed it (probably because the task + // crashed and we immediately received a cancellation + // request after noticing the crashed task) + if cancellation_token.is_cancelled() { + return Err(SupervisorError::Cancelled) + } + let task = tokio::task::spawn(f()); + tokio::select! { + _ = cancellation_token.cancelled() => { + tracing::info!("Shutdown of background task {:?} requested.", std::any::type_name::()); + return Err(SupervisorError::Cancelled) + } + task_result = task => match task_result { + Err(e) => tracing::error!("background task {:?} exited unexpectedly: {}", std::any::type_name::(), e), + Ok(Err(e)) => tracing::error!("background task {:?} returned error: {}", std::any::type_name::(), e), + Ok(Ok(_)) => unreachable!("task's Ok is Infallible") + } + } + tracing::debug!("Sleeping for a little while to back-off..."); + tokio::time::sleep(std::time::Duration::from_secs(5)).await; + } + }; + #[cfg(not(tokio_unstable))] + return tokio::task::spawn(supervisor_future); + #[cfg(tokio_unstable)] + return tokio::task::Builder::new() + .name(format!("supervisor for background task {}", std::any::type_name::()).as_str()) + .spawn(supervisor_future) + .unwrap(); +} + +mod check; + +#[derive(thiserror::Error, Debug)] +enum Error { + #[error("queue error: {0}")] + Queue(#[from] Q), + #[error("storage error: {0}")] + Storage(StorageError) +} + +async fn process_webmentions_from_queue, S: Storage + 'static>(queue: Q, db: S, http: reqwest::Client) -> Result> { + use futures_util::StreamExt; + use self::queue::Job; + + let mut stream = queue.into_stream().await?; + while let Some(item) = stream.next().await.transpose()? { + let job = item.job(); + let (source, target) = ( + job.source.parse::().unwrap(), + job.target.parse::().unwrap() + ); + + let (code, text) = match http.get(source.clone()).send().await { + Ok(response) => { + let code = response.status(); + if ![StatusCode::OK, StatusCode::GONE].iter().any(|i| i == &code) { + error!("error processing webmention: webpage fetch returned {}", code); + continue; + } + match response.text().await { + Ok(text) => (code, text), + Err(err) => { + error!("error processing webmention: error fetching webpage text: {}", err); + continue + } + } + } + Err(err) => { + error!("error processing webmention: error requesting webpage: {}", err); + continue + } + }; + + if code == StatusCode::GONE { + todo!("removing webmentions is not implemented yet"); + // db.remove_webmention(target.as_str(), source.as_str()).await.map_err(Error::::Storage)?; + } else { + // Verify webmention + let (mention_type, mut mention) = match tokio::task::block_in_place({ + || check::check_mention(text, &source, &target) + }) { + Ok(Some(mention_type)) => mention_type, + Ok(None) => { + error!("webmention {} -> {} invalid, rejecting", source, target); + item.done().await?; + continue; + } + Err(err) => { + error!("error processing webmention: error checking webmention: {}", err); + continue; + } + }; + + { + mention["type"] = serde_json::json!(["h-cite"]); + + if !mention["properties"].as_object().unwrap().contains_key("uid") { + let url = mention["properties"]["url"][0].as_str().unwrap_or_else(|| target.as_str()).to_owned(); + let props = mention["properties"].as_object_mut().unwrap(); + props.insert("uid".to_owned(), serde_json::Value::Array( + vec![serde_json::Value::String(url)]) + ); + } + } + + db.add_or_update_webmention(target.as_str(), mention_type, mention).await.map_err(Error::::Storage)?; + } + } + unreachable!() +} + +fn supervised_webmentions_task, S: Storage + 'static>( + queue: Q, db: S, + http: reqwest::Client, + cancellation_token: tokio_util::sync::CancellationToken +) -> SupervisedTask { + supervisor::, _, _>(move || process_webmentions_from_queue(queue.clone(), db.clone(), http.clone()), cancellation_token) +} diff --git a/src/webmentions/queue.rs b/src/webmentions/queue.rs new file mode 100644 index 0000000..b811e71 --- /dev/null +++ b/src/webmentions/queue.rs @@ -0,0 +1,303 @@ +use std::{pin::Pin, str::FromStr}; + +use futures_util::{Stream, StreamExt}; +use sqlx::{postgres::PgListener, Executor}; +use uuid::Uuid; + +use super::Webmention; + +static MIGRATOR: sqlx::migrate::Migrator = sqlx::migrate!("./migrations/webmention"); + +pub use kittybox_util::queue::{JobQueue, JobItem, Job}; + +pub trait PostgresJobItem: JobItem + sqlx::FromRow<'static, sqlx::postgres::PgRow> { + const DATABASE_NAME: &'static str; + const NOTIFICATION_CHANNEL: &'static str; +} + +#[derive(sqlx::FromRow)] +struct PostgresJobRow { + id: Uuid, + #[sqlx(flatten)] + job: T +} + +#[derive(Debug)] +pub struct PostgresJob { + id: Uuid, + job: T, + // This will normally always be Some, except on drop + txn: Option>, + runtime_handle: tokio::runtime::Handle, +} + + +impl Drop for PostgresJob { + // This is an emulation of "async drop" β€” the struct retains a + // runtime handle, which it uses to block on a future that does + // the actual cleanup. + // + // Of course, this is not portable between runtimes, but I don't + // care about that, since Kittybox is designed to work within the + // Tokio ecosystem. + fn drop(&mut self) { + tracing::error!("Job {:?} failed, incrementing attempts...", &self); + if let Some(mut txn) = self.txn.take() { + let id = self.id; + self.runtime_handle.spawn(async move { + tracing::debug!("Constructing query to increment attempts for job {}...", id); + // UPDATE "T::DATABASE_NAME" WHERE id = $1 SET attempts = attempts + 1 + sqlx::query_builder::QueryBuilder::new("UPDATE ") + // This is safe from a SQL injection standpoint, since it is a constant. + .push(T::DATABASE_NAME) + .push(" SET attempts = attempts + 1") + .push(" WHERE id = ") + .push_bind(id) + .build() + .execute(&mut *txn) + .await + .unwrap(); + sqlx::query_builder::QueryBuilder::new("NOTIFY ") + .push(T::NOTIFICATION_CHANNEL) + .build() + .execute(&mut *txn) + .await + .unwrap(); + txn.commit().await.unwrap(); + }); + } + } +} + +#[cfg(test)] +impl PostgresJob { + async fn attempts(&mut self) -> Result { + sqlx::query_builder::QueryBuilder::new("SELECT attempts FROM ") + .push(T::DATABASE_NAME) + .push(" WHERE id = ") + .push_bind(self.id) + .build_query_as::<(i32,)>() + // It's safe to unwrap here, because we "take" the txn only on drop or commit, + // where it's passed by value, not by reference. + .fetch_one(self.txn.as_deref_mut().unwrap()) + .await + .map(|(i,)| i as usize) + } +} + +#[async_trait::async_trait] +impl Job> for PostgresJob { + fn job(&self) -> &Webmention { + &self.job + } + async fn done(mut self) -> Result<(), as JobQueue>::Error> { + tracing::debug!("Deleting {} from the job queue", self.id); + sqlx::query("DELETE FROM kittybox_webmention.incoming_webmention_queue WHERE id = $1") + .bind(self.id) + .execute(self.txn.as_deref_mut().unwrap()) + .await?; + + self.txn.take().unwrap().commit().await + } +} + +pub struct PostgresJobQueue { + db: sqlx::PgPool, + _phantom: std::marker::PhantomData +} +impl Clone for PostgresJobQueue { + fn clone(&self) -> Self { + Self { + db: self.db.clone(), + _phantom: std::marker::PhantomData + } + } +} + +impl PostgresJobQueue { + pub async fn new(uri: &str) -> Result { + let mut options = sqlx::postgres::PgConnectOptions::from_str(uri)? + .options([("search_path", "kittybox_webmention")]); + if let Ok(password_file) = std::env::var("PGPASS_FILE") { + let password = tokio::fs::read_to_string(password_file).await.unwrap(); + options = options.password(&password); + } else if let Ok(password) = std::env::var("PGPASS") { + options = options.password(&password) + } + Self::from_pool( + sqlx::postgres::PgPoolOptions::new() + .max_connections(50) + .connect_with(options) + .await? + ).await + + } + + pub(crate) async fn from_pool(db: sqlx::PgPool) -> Result { + db.execute(sqlx::query("CREATE SCHEMA IF NOT EXISTS kittybox_webmention")).await?; + MIGRATOR.run(&db).await?; + Ok(Self { db, _phantom: std::marker::PhantomData }) + } +} + +#[async_trait::async_trait] +impl JobQueue for PostgresJobQueue { + type Job = PostgresJob; + type Error = sqlx::Error; + + async fn get_one(&self) -> Result, Self::Error> { + let mut txn = self.db.begin().await?; + + match sqlx::query_as::<_, PostgresJobRow>( + "SELECT id, source, target FROM kittybox_webmention.incoming_webmention_queue WHERE attempts < 5 FOR UPDATE SKIP LOCKED LIMIT 1" + ) + .fetch_optional(&mut *txn) + .await? + { + Some(job_row) => { + return Ok(Some(Self::Job { + id: job_row.id, + job: job_row.job, + txn: Some(txn), + runtime_handle: tokio::runtime::Handle::current(), + })) + }, + None => Ok(None) + } + } + + async fn put(&self, item: &Webmention) -> Result { + sqlx::query_scalar::<_, Uuid>("INSERT INTO kittybox_webmention.incoming_webmention_queue (source, target) VALUES ($1, $2) RETURNING id") + .bind(item.source.as_str()) + .bind(item.target.as_str()) + .fetch_one(&self.db) + .await + } + + async fn into_stream(self) -> Result> + Send>>, Self::Error> { + let mut listener = PgListener::connect_with(&self.db).await?; + listener.listen("incoming_webmention").await?; + + let stream: Pin> + Send>> = futures_util::stream::try_unfold((), { + let listener = std::sync::Arc::new(tokio::sync::Mutex::new(listener)); + move |_| { + let queue = self.clone(); + let listener = listener.clone(); + async move { + loop { + match queue.get_one().await? { + Some(item) => return Ok(Some((item, ()))), + None => { + listener.lock().await.recv().await?; + continue + } + } + } + } + } + }).boxed(); + + Ok(stream) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use super::{Webmention, PostgresJobQueue, Job, JobQueue, MIGRATOR}; + use futures_util::StreamExt; + + #[sqlx::test(migrator = "MIGRATOR")] + #[tracing_test::traced_test] + async fn test_webmention_queue(pool: sqlx::PgPool) -> Result<(), sqlx::Error> { + let test_webmention = Webmention { + source: "https://fireburn.ru/posts/lorem-ipsum".to_owned(), + target: "https://aaronparecki.com/posts/dolor-sit-amet".to_owned() + }; + + let queue = PostgresJobQueue::::from_pool(pool).await?; + tracing::debug!("Putting webmention into queue"); + queue.put(&test_webmention).await?; + { + let mut job_description = queue.get_one().await?.unwrap(); + assert_eq!(job_description.job(), &test_webmention); + assert_eq!(job_description.attempts().await?, 0); + } + tracing::debug!("Creating a stream"); + let mut stream = queue.clone().into_stream().await?; + + { + let mut guard = stream.next().await.transpose()?.unwrap(); + assert_eq!(guard.job(), &test_webmention); + assert_eq!(guard.attempts().await?, 1); + if let Some(item) = queue.get_one().await? { + panic!("Unexpected item {:?} returned from job queue!", item) + }; + } + + { + let mut guard = stream.next().await.transpose()?.unwrap(); + assert_eq!(guard.job(), &test_webmention); + assert_eq!(guard.attempts().await?, 2); + guard.done().await?; + } + + match queue.get_one().await? { + Some(item) => panic!("Unexpected item {:?} returned from job queue!", item), + None => Ok(()) + } + } + + #[sqlx::test(migrator = "MIGRATOR")] + #[tracing_test::traced_test] + async fn test_no_hangups_in_queue(pool: sqlx::PgPool) -> Result<(), sqlx::Error> { + let test_webmention = Webmention { + source: "https://fireburn.ru/posts/lorem-ipsum".to_owned(), + target: "https://aaronparecki.com/posts/dolor-sit-amet".to_owned() + }; + + let queue = PostgresJobQueue::::from_pool(pool.clone()).await?; + tracing::debug!("Putting webmention into queue"); + queue.put(&test_webmention).await?; + tracing::debug!("Creating a stream"); + let mut stream = queue.clone().into_stream().await?; + + // Synchronisation barrier that will be useful later + let barrier = Arc::new(tokio::sync::Barrier::new(2)); + { + // Get one job guard from a queue + let mut guard = stream.next().await.transpose()?.unwrap(); + assert_eq!(guard.job(), &test_webmention); + assert_eq!(guard.attempts().await?, 0); + + tokio::task::spawn({ + let barrier = barrier.clone(); + async move { + // Wait for the signal to drop the guard! + barrier.wait().await; + + drop(guard) + } + }); + } + tokio::time::timeout(std::time::Duration::from_secs(1), stream.next()).await.unwrap_err(); + + let future = tokio::task::spawn( + tokio::time::timeout( + std::time::Duration::from_secs(10), async move { + stream.next().await.unwrap().unwrap() + } + ) + ); + // Let the other task drop the guard it is holding + barrier.wait().await; + let mut guard = future.await + .expect("Timeout on fetching item") + .expect("Job queue error"); + assert_eq!(guard.job(), &test_webmention); + assert_eq!(guard.attempts().await?, 1); + + Ok(()) + } +} diff --git a/templates/Cargo.toml b/templates/Cargo.toml new file mode 100644 index 0000000..38e73b3 --- /dev/null +++ b/templates/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "kittybox-frontend-renderer" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[build-dependencies] +libflate = "^2.0.0" +walkdir = "^2.3.2" + +[dev-dependencies] +faker_rand = "^0.1.1" +rand = "^0.8.5" +[dev-dependencies.microformats] +version="^0.3.0" + +[dependencies] +ellipse = "^0.2.0" +http = "^0.2.7" +markup = "^0.13.1" +serde_json = "^1.0.64" +include_dir = "^0.7.2" +axum = "^0.6.18" +[dependencies.chrono] +version = "^0.4.19" +features = ["serde"] +[dependencies.kittybox-util] +version = "0.1.0" +path = "../util" +[dependencies.kittybox-indieauth] +version = "0.1.0" +path = "../indieauth" \ No newline at end of file diff --git a/templates/assets/jslicense.html b/templates/assets/jslicense.html new file mode 100644 index 0000000..90c681c --- /dev/null +++ b/templates/assets/jslicense.html @@ -0,0 +1,31 @@ + + + + JavaScript licensing information for Kittybox + + +

All JavaScript included with Kittybox is licensed as free software, most of it under AGPL-3.0.

+ + + + + + + + + + + + + + + + + + + + + +
onboarding.jsAGPL-3.0onboarding.ts (Kittybox source code)
indieauth.jsAGPL-3.0indieauth.ts (Kittybox source code)
lib.jsAGPL-3.0lib.ts (Kittybox source code)
indieauth.jsAGPL-3.0indieauth.ts (Kittybox source code)
+ + diff --git a/templates/assets/onboarding.css b/templates/assets/onboarding.css new file mode 100644 index 0000000..6f191b9 --- /dev/null +++ b/templates/assets/onboarding.css @@ -0,0 +1,33 @@ +form.onboarding > ul#progressbar > li.active { + font-weight: bold; +} +form.onboarding > ul#progressbar { + display: flex; list-style: none; justify-content: space-around; +} + +form.onboarding > fieldset > div.switch_card_buttons { + display: flex; + justify-content: space-between; + width: 100%; +} +form.onboarding > fieldset > div.switch_card_buttons button:last-child { + margin-left: auto; +} +.form_group, .multi_input { + display: flex; + flex-direction: column; +} +.multi_input { + align-items: start; +} +.multi_input > input { + width: 100%; + align-self: stretch; +} +form.onboarding > fieldset > .form_group + * { + margin-top: .75rem; +} +form.onboarding textarea { + width: 100%; + resize: vertical; +} diff --git a/templates/assets/style.css b/templates/assets/style.css new file mode 100644 index 0000000..4cd7c09 --- /dev/null +++ b/templates/assets/style.css @@ -0,0 +1,236 @@ +@import url('https://fonts.googleapis.com/css2?family=Caveat:wght@500&family=Lato&display=swap'); + +:root { + font-family: var(--font-normal); + --font-normal: 'Lato', sans-serif; + --font-accent: 'Caveat', cursive; + --type-scale: 1.250; + + --primary-accent: purple; + --secondary-accent: gold; +} +* { + box-sizing: border-box; +} +body { + margin: 0; +} +h1, h2, h3, h4, h5, h6 { + font-family: var(--font-accent); +} +.titanic { + font-size: 3.815rem +} +h1, .xxxlarge { + margin-top: 0; + margin-bottom: 0; + font-size: 3.052rem; +} +h2, .xxlarge {font-size: 2.441rem;} +h3, .xlarge {font-size: 1.953rem;} +h4, .larger {font-size: 1.563rem;} +h5, .large {font-size: 1.25rem;} +h6, .normal {font-size: 1rem;} +small, .small { font-size: 0.8em; } + +nav#headerbar { + background: var(--primary-accent); + color: whitesmoke; + border-bottom: .75rem solid var(--secondary-accent); + padding: .3rem; + vertical-align: center; + /*position: sticky; + top: 0;*/ +} +nav#headerbar a#homepage { + font-weight: bolder; + font-family: var(--font-accent); + font-size: 2rem; +} +nav#headerbar > ul { + display: flex; + padding: inherit; + margin: inherit; + gap: .75em; +} +nav#headerbar > ul > li { + display: inline-flex; + flex-direction: column; + marker: none; + padding: inherit; + margin: inherit; + justify-content: center; +} +nav#headerbar > ul > li.shiftright { + margin-left: auto; +} +nav#headerbar a { + color: white; +} +body > main { + max-width: 60rem; + margin: auto; + padding: .75rem; +} +body > footer { + text-align: center; +} +.sidebyside { + display: flex; + flex-wrap: wrap; + gap: .75rem; + margin-top: .75rem; + margin-bottom: .75rem; +} +.sidebyside > * { + width: 100%; + margin-top: 0; + margin-bottom: 0; + border: .125rem solid black; + border-radius: .75rem; + padding: .75rem; + margin-top: 0 !important; + margin-bottom: 0 !important; + flex-basis: 28rem; + flex-grow: 1; +} +article > * + * { + margin-top: .75rem; +} +article > header { + padding-bottom: .75rem; + border-bottom: 1px solid gray; +} +article > footer { + border-top: 1px solid gray; +} +article.h-entry, article.h-feed, article.h-card, article.h-event { + border: 2px solid black; + border-radius: .75rem; + padding: .75rem; + margin-top: .75rem; + margin-bottom: .75rem; +} +.webinteractions > ul.counters { + display: inline-flex; + padding: inherit; + margin: inherit; + gap: .75em; + flex-wrap: wrap; +} +.webinteractions > ul.counters > li > .icon { + font-size: 1.5em; +} +.webinteractions > ul.counters > li { + display: inline-flex; + align-items: center; + gap: .5em; +} +article.h-entry > header.metadata ul { + padding-inline-start: unset; + margin: unset; +} +article.h-entry > header.metadata ul.categories { + flex-wrap: wrap; + display: inline-flex; + list-style-type: none; +} +article.h-entry > header.metadata ul.categories li { + display: inline; + margin-inline-start: unset; +} +article.h-entry > header.metadata ul li { + margin-inline-start: 2.5em; +} +article.h-entry .e-content pre { + border: 1px solid gray; + border-radius: 0.5em; + overflow-y: auto; + padding: 0.5em; +} +article.h-entry img.u-photo { + max-width: 80%; + max-height: 90vh; + display: block; + margin: auto; +} +article.h-entry img.u-photo + * { + margin-top: .75rem; +} +article.h-entry > header.metadata span + span::before { + content: " | " +} +li.p-category::before { + content: " #"; +} + +article.h-entry ul.categories { + gap: .2em; +} +article.h-card img.u-photo { + border-radius: 100%; + float: left; + height: 8rem; + border: 1px solid gray; + margin-right: .75em; + object-fit: cover; + aspect-ratio: 1; +} + +.mini-h-card img, #indieauth_page img { + height: 2em; + display: inline-block; + border: 2px solid gray; + border-radius: 100%; + margin-right: 0.5rem; +} + +.mini-h-card * { + vertical-align: middle; +} + +.mini-h-card a { + text-decoration: none; +} + +#indieauth_page > #introduction { + border: .125rem solid gray; + border-radius: .75rem; + margin: 1.25rem; + padding: .75rem; +} + +/* Rainbow hearts */ +/* TODO make toggleable by splitting into separate CSS file */ +@counter-style rainbow-hearts { + system: cyclic; + symbols: "❀️" "🧑" "πŸ’›" "πŸ’š" "πŸ’™" "πŸ’œ"; +} +body { + counter-reset: like-icons; +} +span.like-icon::before { + counter-increment: like-icons; + content: "" counter(like-icons, rainbow-hearts); +} +span.like-icon-label { + display: none; +} +ul.h-feed { + list-style: none; +} + +body > a#skip-to-content { + position: absolute; + width: 1px; + height: 1px; + margin: 0; + overflow: hidden; + clip: rect(1px, 1px, 1px, 1px); + background: white; + padding: 16px; +} +body > a#skip-to-content:focus { + width: auto; height: auto; clip: auto; + z-index: 999; +} diff --git a/templates/build.rs b/templates/build.rs new file mode 100644 index 0000000..ccd5b19 --- /dev/null +++ b/templates/build.rs @@ -0,0 +1,90 @@ +use std::ffi::OsStr; + +use libflate::gzip::Encoder; +use walkdir::WalkDir; + +fn main() -> Result<(), std::io::Error> { + use std::env; + let out_dir = std::path::PathBuf::from(env::var("OUT_DIR").unwrap()); + + println!("cargo::rerun-if-changed=javascript/"); + if let Ok(exit) = std::process::Command::new("tsc") + .arg("--outDir") + .arg(&out_dir) + .current_dir("javascript") + .spawn()? + .wait() + { + if !exit.success() { + std::process::exit(exit.code().unwrap_or(1)) + } + } + + println!("cargo:rerun-if-changed=assets/"); + let assets_path = std::path::Path::new("assets"); + let mut assets = WalkDir::new(&assets_path) + .into_iter(); + while let Some(Ok(entry)) = assets.next() { + if entry.file_type().is_dir() { + if let Err(err) = std::fs::create_dir(&out_dir.join(entry.path())) { + if err.kind() != std::io::ErrorKind::AlreadyExists { + return Err(err) + } + } + } else { + std::fs::copy(entry.path(), &out_dir.join(entry.path().strip_prefix(assets_path).unwrap()))?; + } + } + + let walker = WalkDir::new(&out_dir) + .into_iter() + .map(Result::unwrap) + .filter(|e| { + e.file_type().is_file() && e.path().extension().unwrap() != "gz" + }); + for entry in walker { + let normal_path = entry.path(); + let gzip_path = normal_path.with_extension({ + let mut extension = normal_path + .extension() + .unwrap() + .to_owned(); + extension.push(OsStr::new(".gz")); + extension + }); + eprintln!( + "{} -> {}", + normal_path.strip_prefix(&out_dir).unwrap().display(), + gzip_path.strip_prefix(&out_dir).unwrap().display() + ); + { + let mut out_file = std::fs::OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(&gzip_path)?; + + let mut in_file = std::fs::File::open(&normal_path)?; + + let mut encoder = Encoder::new(&mut out_file)?; + std::io::copy(&mut in_file, &mut encoder)?; + encoder.finish().into_result()?; + } + + let normal_len: f64 = std::fs::metadata(&normal_path).unwrap().len() as f64; + let gzipped_len: f64 = std::fs::metadata(&gzip_path).unwrap().len() as f64; + let ratio = gzipped_len / normal_len; + eprintln!("Ratio: {}", ratio); + if ratio <= 0.9 { + std::fs::remove_file(&normal_path)? + } else { + println!( + "cargo:warning={} compression ratio is {} (> 0.9), leaving as is", + entry.path().display(), + ratio + ); + std::fs::remove_file(&gzip_path)? + } + } + Ok(()) +} diff --git a/templates/javascript/dist/indieauth.js b/templates/javascript/dist/indieauth.js new file mode 100644 index 0000000..297b4b5 --- /dev/null +++ b/templates/javascript/dist/indieauth.js @@ -0,0 +1,118 @@ +"use strict"; +const WEBAUTHN_TIMEOUT = 60 * 1000; +async function webauthn_create_credential() { + const response = await fetch("/.kittybox/webauthn/pre_register"); + const { challenge, rp, user } = await response.json(); + return await navigator.credentials.create({ + publicKey: { + challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), + rp: rp, + user: { + id: Uint8Array.from(user.cred_id, (c) => c.charCodeAt(0)), + name: user.name, + displayName: user.displayName + }, + pubKeyCredParams: [{ alg: -7, type: "public-key" }], + authenticatorSelection: {}, + timeout: WEBAUTHN_TIMEOUT, + attestation: "none" + } + }); +} +async function webauthn_authenticate() { + const response = await fetch("/.kittybox/webauthn/pre_auth"); + const { challenge, credentials } = await response.json(); + try { + return await navigator.credentials.get({ + publicKey: { + challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), + allowCredentials: credentials.map(cred => ({ + id: Uint8Array.from(cred.id, c => c.charCodeAt(0)), + type: cred.type + })), + timeout: WEBAUTHN_TIMEOUT + } + }); + } + catch (e) { + console.error("WebAuthn authentication failed:", e); + alert("Using your authenticator failed. (Check the DevTools for details)"); + throw e; + } +} +async function submit_handler(e) { + e.preventDefault(); + if (e.target != null && e.target instanceof HTMLFormElement) { + const form = e.target; + let scopes; + if (form.elements.namedItem("scope") === undefined) { + scopes = []; + } + else if (form.elements.namedItem("scope") instanceof Node) { + scopes = [form.elements.namedItem("scope")] + .filter((e) => e.checked) + .map((e) => e.value); + } + else { + scopes = Array.from(form.elements.namedItem("scope")) + .filter((e) => e.checked) + .map((e) => e.value); + } + const authorization_request = { + response_type: form.elements.namedItem("response_type").value, + client_id: form.elements.namedItem("client_id").value, + redirect_uri: form.elements.namedItem("redirect_uri").value, + state: form.elements.namedItem("state").value, + code_challenge: form.elements.namedItem("code_challenge").value, + code_challenge_method: form.elements.namedItem("code_challenge_method").value, + // I would love to leave that as a list, but such is the form of + // IndieAuth. application/x-www-form-urlencoded doesn't have + // lists, so scopes are space-separated instead. It is annoying. + scope: scopes.length > 0 ? scopes.join(" ") : undefined, + }; + let credential = null; + switch (form.elements.namedItem("auth_method").value) { + case "password": + credential = form.elements.namedItem("user_password").value; + if (credential.length == 0) { + alert("Please enter a password."); + return; + } + break; + case "webauthn": + // credential = await webauthn_authenticate(); + alert("WebAuthn isn't implemented yet!"); + return; + break; + default: + alert("Please choose an authentication method."); + return; + } + console.log("Authorization request:", authorization_request); + console.log("Authentication method:", credential); + const body = JSON.stringify({ + request: authorization_request, + authorization_method: credential + }); + console.log(body); + const response = await fetch(form.action, { + method: form.method, + body: body, + headers: { + "Content-Type": "application/json" + } + }); + if (response.ok) { + let location = response.headers.get("Location"); + if (location != null) { + window.location.href = location; + } + else { + throw "Error: didn't return a location"; + } + } + } + else { + return; + } +} diff --git a/templates/javascript/dist/webauthn/register.js b/templates/javascript/dist/webauthn/register.js new file mode 100644 index 0000000..3918c74 --- /dev/null +++ b/templates/javascript/dist/webauthn/register.js @@ -0,0 +1 @@ +"use strict"; diff --git a/templates/javascript/src/indieauth.ts b/templates/javascript/src/indieauth.ts new file mode 100644 index 0000000..01732b7 --- /dev/null +++ b/templates/javascript/src/indieauth.ts @@ -0,0 +1,150 @@ +import { unreachable } from "./lib.js"; + +const WEBAUTHN_TIMEOUT = 60 * 1000; + +interface KittyboxWebauthnPreRegistrationData { + challenge: string, + rp: PublicKeyCredentialRpEntity, + user: { + cred_id: string, + name: string, + displayName: string + } +} + +async function webauthn_create_credential() { + const response = await fetch("/.kittybox/webauthn/pre_register"); + const { challenge, rp, user }: KittyboxWebauthnPreRegistrationData = await response.json(); + + return await navigator.credentials.create({ + publicKey: { + challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), + rp: rp, + user: { + id: Uint8Array.from(user.cred_id, (c) => c.charCodeAt(0)), + name: user.name, + displayName: user.displayName + }, + pubKeyCredParams: [{alg: -7, type: "public-key"}], + authenticatorSelection: {}, + timeout: WEBAUTHN_TIMEOUT, + attestation: "none" + } + }); +} + +interface KittyboxWebauthnCredential { + id: string, + type: "public-key" +} + +interface KittyboxWebauthnPreAuthenticationData { + challenge: string, + credentials: KittyboxWebauthnCredential[] +} + +async function webauthn_authenticate() { + const response = await fetch("/.kittybox/webauthn/pre_auth"); + const { challenge, credentials } = await response.json() as unknown as KittyboxWebauthnPreAuthenticationData; + + try { + return await navigator.credentials.get({ + publicKey: { + challenge: Uint8Array.from(challenge, (c) => c.charCodeAt(0)), + allowCredentials: credentials.map(cred => ({ + id: Uint8Array.from(cred.id, c => c.charCodeAt(0)), + type: cred.type + })), + timeout: WEBAUTHN_TIMEOUT + } + }) + } catch (e) { + console.error("WebAuthn authentication failed:", e); + alert("Using your authenticator failed. (Check the DevTools for details)"); + throw e; + } +} + +export async function submit_handler(e: SubmitEvent) { + e.preventDefault(); + if (e.target != null && e.target instanceof HTMLFormElement) { + const form = e.target as HTMLFormElement; + + let scopes: Array; + let scope_elem = form.elements.namedItem("scope"); + if (scope_elem == null) { + scopes = [] + } else if (scope_elem instanceof Element) { + scopes = ([scope_elem] as Array) + .filter((e: HTMLInputElement) => e.checked) + .map((e: HTMLInputElement) => e.value); + } else if (scope_elem instanceof RadioNodeList) { + scopes = (Array.from(scope_elem) as Array) + .filter((e: HTMLInputElement) => e.checked) + .map((e: HTMLInputElement) => e.value); + } else { + unreachable("HTMLFormControlsCollection returned something that's not null, Element or RadioNodeList") + } + + const authorization_request = { + response_type: (form.elements.namedItem("response_type") as HTMLInputElement).value, + client_id: (form.elements.namedItem("client_id") as HTMLInputElement).value, + redirect_uri: (form.elements.namedItem("redirect_uri") as HTMLInputElement).value, + state: (form.elements.namedItem("state") as HTMLInputElement).value, + code_challenge: (form.elements.namedItem("code_challenge") as HTMLInputElement).value, + code_challenge_method: (form.elements.namedItem("code_challenge_method") as HTMLInputElement).value, + // I would love to leave that as a list, but such is the form of + // IndieAuth. application/x-www-form-urlencoded doesn't have + // lists, so scopes are space-separated instead. It is annoying. + scope: scopes.length > 0 ? scopes.join(" ") : undefined, + }; + + let credential = null; + switch ((form.elements.namedItem("auth_method") as HTMLInputElement).value) { + case "password": + credential = (form.elements.namedItem("user_password") as HTMLInputElement).value; + if (credential.length == 0) { + alert("Please enter a password.") + return + } + break; + case "webauthn": + // credential = await webauthn_authenticate(); + alert("WebAuthn isn't implemented yet!") + return + break + default: + alert("Please choose an authentication method.") + return + } + + console.log("Authorization request:", authorization_request); + console.log("Authentication method:", credential); + + const body = JSON.stringify({ + request: authorization_request, + authorization_method: credential + }); + console.log(body); + + const response = await fetch(form.action, { + method: form.method, + body: body, + headers: { + "Content-Type": "application/json" + } + }); + + if (response.ok) { + let location = response.headers.get("Location"); + if (location != null) { + window.location.href = location + } else { + throw "Error: didn't return a location" + } + } + } else { + return + } + +} diff --git a/templates/javascript/src/lib.ts b/templates/javascript/src/lib.ts new file mode 100644 index 0000000..38ba65b --- /dev/null +++ b/templates/javascript/src/lib.ts @@ -0,0 +1,3 @@ +export function unreachable(msg: string): never { + throw new Error(msg); +} diff --git a/templates/javascript/src/onboarding.ts b/templates/javascript/src/onboarding.ts new file mode 100644 index 0000000..0b455eb --- /dev/null +++ b/templates/javascript/src/onboarding.ts @@ -0,0 +1,120 @@ +const firstOnboardingCard = "intro"; + +function switchOnboardingCard(card: string) { + (Array.from(document.querySelectorAll("form.onboarding > fieldset")) as HTMLElement[]) + .map((node: HTMLElement) => { + if (node.id == card) { + node.style.display = "block"; + } else { + node.style.display = "none"; + } + }); + + (Array.from(document.querySelectorAll("form.onboarding > ul#progressbar > li")) as HTMLElement[]) + .map(node => { + if (node.id == card) { + node.classList.add("active") + } else { + node.classList.remove("active") + } + }); +}; + +interface Window { + kittybox_onboarding: { + switchOnboardingCard: (card: string) => void + } +} + +window.kittybox_onboarding = { + switchOnboardingCard +}; + +(document.querySelector("form.onboarding > ul#progressbar") as HTMLElement).style.display = ""; +switchOnboardingCard(firstOnboardingCard); + +function switchCardOnClick(event: MouseEvent) { + if (event.target instanceof HTMLElement) { + if (event.target.dataset.card !== undefined) { + switchOnboardingCard(event.target.dataset.card) + } + } +} + +function multiInputAddMore(event: (MouseEvent | { target: HTMLElement })) { + if (event.target instanceof HTMLElement) { + let parent = event.target.parentElement; + if (parent !== null) { + let template = (parent.querySelector("template") as HTMLTemplateElement).content.cloneNode(true); + parent.prepend(template); + } + } +} + +(Array.from( + document.querySelectorAll( + "form.onboarding > fieldset button.switch_card" + ) +) as HTMLButtonElement[]) + .map(button => { + button.addEventListener("click", switchCardOnClick) + }); + +(Array.from( + document.querySelectorAll( + "form.onboarding > fieldset div.multi_input > button.add_more" + ) +) as HTMLButtonElement[]) + .map(button => { + button.addEventListener("click", multiInputAddMore) + multiInputAddMore({ target: button }); + }); + +const form = document.querySelector("form.onboarding") as HTMLFormElement; +console.log(form); +form.onsubmit = async (event: SubmitEvent) => { + console.log(event); + event.preventDefault(); + const form = event.target as HTMLFormElement; + const json = { + user: { + type: ["h-card"], + properties: { + name: [(form.querySelector("#hcard_name") as HTMLInputElement).value], + pronoun: (Array.from( + form.querySelectorAll("#hcard_pronouns") + ) as HTMLInputElement[]) + .map(input => input.value).filter(i => i != ""), + url: (Array.from(form.querySelectorAll("#hcard_url")) as HTMLInputElement[]) + .map(input => input.value).filter(i => i != ""), + note: [(form.querySelector("#hcard_note") as HTMLInputElement).value] + } + }, + first_post: { + type: ["h-entry"], + properties: { + content: [(form.querySelector("#first_post_content") as HTMLTextAreaElement).value] + } + }, + blog_name: (form.querySelector("#blog_name") as HTMLInputElement).value, + feeds: (Array.from( + form.querySelectorAll(".multi_input#custom_feeds > fieldset.feed") + ) as HTMLElement[]) + .map(form => { + return { + name: (form.querySelector("#feed_name") as HTMLInputElement).value, + slug: (form.querySelector("#feed_slug") as HTMLInputElement).value + } + }).filter(feed => feed.name == "" || feed.slug == "") + }; + + await fetch("/.kittybox/onboarding", { + method: "POST", + body: JSON.stringify(json), + headers: { "Content-Type": "application/json" } + }).then(response => { + if (response.status == 201) { + window.location.href = window.location.href; + } + }) +} diff --git a/templates/javascript/src/webauthn/register.ts b/templates/javascript/src/webauthn/register.ts new file mode 100644 index 0000000..e69de29 diff --git a/templates/javascript/tsconfig.json b/templates/javascript/tsconfig.json new file mode 100644 index 0000000..18b94c7 --- /dev/null +++ b/templates/javascript/tsconfig.json @@ -0,0 +1,104 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "es2022", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "es2022", /* Specify what module code is generated. */ + // "rootDir": "./", /* Specify the root folder within your source files. */ + // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + "outDir": "./dist", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + }, + "include": ["src/**/*"] +} diff --git a/templates/src/indieauth.rs b/templates/src/indieauth.rs new file mode 100644 index 0000000..6a46773 --- /dev/null +++ b/templates/src/indieauth.rs @@ -0,0 +1,201 @@ +use kittybox_indieauth::{AuthorizationRequest, Scope}; +use kittybox_util::auth::EnrolledCredential; + +markup::define! { + AuthorizationRequestPage( + request: AuthorizationRequest, + credentials: Vec, + app: Option, + user: serde_json::Value + ) { + script[type="module"] { + @markup::raw(r#"import { submit_handler } from "/.kittybox/static/indieauth.js"; + +document.getElementById("indieauth_page").addEventListener("submit", submit_handler); +"#) + } + main { + form #indieauth_page[action="/.kittybox/indieauth/auth/confirm", method="POST"] { + noscript { + p {"I know how annoyed you can be about scripts." } + p { "But WebAuthn doesn't work without JavaScript. And passwords are horribly insecure, and everyone knows it deep inside their heart." } + p { b { "Please enable JavaScript for this page to work properly 😭" } } + } + div #introduction { + h1."mini-h-card" { + "Hi, " + @if let Some(photo) = user["properties"]["photo"][0].as_str() { + img.user_avatar[src=photo]; + } else if let Some(photo) = user["properties"]["photo"][0].as_object() { + img[ + src=photo["value"].as_str().unwrap(), + alt=photo["alt"].as_str().unwrap(), + loading="lazy" + ]; + } + @user["properties"]["name"][0].as_str().unwrap_or("administrator") + } + + p."mini-h-card" { + @if let Some(icon) = app + .as_ref() + .and_then(|app| app["properties"]["logo"][0].as_str()) + { + img.app_icon[src=icon]; + } else if let Some(icon) = app + .as_ref() + .and_then(|app| app["properties"]["logo"][0].as_object()) + { + img.app_icon[src=icon["src"].as_str().unwrap(), alt=icon["alt"].as_str().unwrap()]; + } + span { + a[href=app + .as_ref() + .and_then(|app| app["properties"]["url"][0].as_str()) + .unwrap_or_else(|| request.client_id.as_str()) + ] { + @app + .as_ref() + .and_then(|app| app["properties"]["name"][0].as_str()) + .unwrap_or_else(|| request.client_id.as_str()) + } + " wants to confirm your identity." + } + } + } + + @if request.scope.is_some() { + p { + "An application just requested access to your website. This can give access to your data, including private content." + } + + p { + "You can review the permissions the application requested below. You are free to not grant any permissions that the application requested if you don't trust it, at the cost of potentially reducing its functionality." + } + } + + fieldset #scopes { + legend { "Permissions to grant the app:" } + div { + input[type="checkbox", disabled="true", checked="true"]; + label[for="identify"] { + "Identify you as the owner of " + @user["properties"]["uid"][0].as_str().unwrap() + } + } + @if let Some(scopes) = &request.scope { + @for scope in scopes.iter() { + div { + input[type="checkbox", name="scope", id=scope.as_ref(), value=scope.as_ref()]; + label[for=scope.as_ref()] { + @match scope { + Scope::Profile => { + "Access your publicly visible profile information" + } + Scope::Email => { + "Access your email address" + } + Scope::Create => { + "Create new content on your website" + } + Scope::Update => { + "Modify content on your website" + } + Scope::Delete => { + "Delete content on your website" + } + Scope::Media => { + "Interact with your media storage" + } + other => { + @markup::raw(format!( + "(custom or unknown scope) {}", + other.as_ref() + )) + } + } + } + } + } + } + } + + fieldset { + legend { "Choose your preferred authentication method:" } + div { + input[type="radio", + name="auth_method", + id="auth_with_webauthn", + disabled=!credentials.iter().any(|e| *e == EnrolledCredential::WebAuthn), + checked=credentials.iter().any(|e| *e == EnrolledCredential::WebAuthn) + ]; + label[for="auth_with_webauthn"] { "Use an authenticator device to log in" } + } + div { + input[type="radio", + name="auth_method", value="password", + id="auth_with_password", + disabled=!credentials.iter().any(|e| *e == EnrolledCredential::Password), + checked=credentials.iter().all(|e| *e == EnrolledCredential::Password) + ]; + label[for="auth_with_password"] { "Password" } + br; + input[type="password", name="user_password", id="user_password"]; + } + } + + input[type="submit", value="Authenticate"]; + br; + + details { + summary { "View detailed data about this request" } + + p { + "More info about meanings of these fields can be found in " + a[href="https://indieauth.spec.indieweb.org/20220212/#authorization-request"] { + "the IndieAuth specification" + } ", which this webpage uses." + } + fieldset { + div { + label[for="response_type"] { "Response type (will most likely be \"code\")" } + br; + input[name="response_type", id="response_type", readonly, + value=request.response_type.as_str()]; + } + div { + label[for="state"] { "Request state" } + br; + input[name="state", id="state", readonly, + value=request.state.as_ref()]; + } + div { + label[for="client_id"] { "Client ID" } + br; + input[name="client_id", id="client_id", readonly, + value=request.client_id.as_str()]; + } + div { + label[for="redirect_uri"] { "Redirect URI" } + br; + input[name="redirect_uri", id="redirect_uri", readonly, + value=request.redirect_uri.as_str()]; + } + div { + label[for="code_challenge"] { "PKCE code challenge" } + br; + input[name="code_challenge", id="code_challenge", readonly, + value=request.code_challenge.as_str()]; + } + div { + label[for="code_challenge_method"] { "PKCE method (should be S256)" } + br; + input[name="code_challenge_method", id="code_challenge_method", readonly, + value=request.code_challenge.method().as_str()]; + } + } + } + } + } + } +} diff --git a/templates/src/lib.rs b/templates/src/lib.rs new file mode 100644 index 0000000..8d5d5fa --- /dev/null +++ b/templates/src/lib.rs @@ -0,0 +1,367 @@ +mod templates; +pub use templates::{ErrorPage, MainPage, Template}; +mod onboarding; +pub use onboarding::OnboardingPage; +mod indieauth; +pub use indieauth::AuthorizationRequestPage; +mod login; +pub use login::LoginPage; +mod mf2; +pub use mf2::{Entry, VCard, Feed, Food, POSTS_PER_PAGE}; + +pub mod assets { + use axum::response::{IntoResponse, Response}; + use axum::extract::Path; + use axum::http::StatusCode; + use axum::http::header::{CONTENT_TYPE, CONTENT_ENCODING, CACHE_CONTROL}; + + const ASSETS: include_dir::Dir<'static> = include_dir::include_dir!("$OUT_DIR/"); + const CACHE_FOR_A_DAY: &str = "max-age=86400"; + const GZIP: &str = "gzip"; + + pub async fn statics( + Path(path): Path + ) -> Response { + let content_type: &'static str = if path.ends_with(".js") { + "application/javascript" + } else if path.ends_with(".css") { + "text/css" + } else if path.ends_with(".html") { + "text/html; charset=\"utf-8\"" + } else { + "application/octet-stream" + }; + + match ASSETS.get_file(path.clone() + ".gz") { + Some(file) => (StatusCode::OK, + [(CONTENT_TYPE, content_type), + (CONTENT_ENCODING, GZIP), + (CACHE_CONTROL, CACHE_FOR_A_DAY)], + file.contents()).into_response(), + None => match ASSETS.get_file(path) { + Some(file) => (StatusCode::OK, + [(CONTENT_TYPE, content_type), + (CACHE_CONTROL, CACHE_FOR_A_DAY)], + file.contents()).into_response(), + None => StatusCode::NOT_FOUND.into_response() + } + } + } +} + +#[cfg(test)] +mod tests { + use faker_rand::en_us::internet::Domain; + use faker_rand::lorem::Word; + use microformats::types::{Document, Item, PropertyValue, Url}; + use serde_json::json; + use std::cell::RefCell; + use std::rc::Rc; + + enum PostType { + Note, + Article, + ReplyTo(serde_json::Value), + ReplyToLink(String), + LikeOf(serde_json::Value), + LikeOfLink(String), + } + + fn gen_hcard(domain: &str) -> serde_json::Value { + use faker_rand::en_us::names::FirstName; + + json!({ + "type": ["h-card"], + "properties": { + "name": [rand::random::().to_string()], + "photo": [format!("https://{domain}/media/me.png")], + "uid": [format!("https://{domain}/")], + "url": [format!("https://{domain}/")] + } + }) + } + + fn gen_random_post(domain: &str, kind: PostType) -> serde_json::Value { + use faker_rand::lorem::{Paragraph, Sentence}; + + fn html(content: Paragraph) -> serde_json::Value { + json!({ + "html": format!("

{}

", content), + "value": content.to_string() + }) + } + + let uid = format!( + "https://{domain}/posts/{}-{}-{}", + rand::random::(), + rand::random::(), + rand::random::() + ); + let dt = chrono::offset::Local::now().to_rfc3339_opts(chrono::SecondsFormat::Secs, true); + + match kind { + PostType::Note => { + let content = rand::random::(); + + json!({ + "type": ["h-entry"], + "properties": { + "content": [html(content)], + "published": [dt], + "uid": [&uid], "url": [&uid], + "author": [gen_hcard(domain)] + } + }) + } + PostType::Article => { + let content = rand::random::(); + let name = rand::random::(); + + json!({ + "type": ["h-entry"], + "properties": { + "content": [html(content)], + "published": [dt], + "uid": [&uid], "url": [&uid], + "author": [gen_hcard(domain)], + "name": [name.to_string()] + } + }) + } + PostType::ReplyTo(ctx) => { + let content = rand::random::(); + + json!({ + "type": ["h-entry"], + "properties": { + "content": [html(content)], + "published": [dt], + "uid": [&uid], "url": [&uid], + "author": [gen_hcard(domain)], + "in-reply-to": [{ + "type": ["h-cite"], + "properties": ctx["properties"] + }] + } + }) + } + PostType::ReplyToLink(link) => { + let content = rand::random::(); + + json!({ + "type": ["h-entry"], + "properties": { + "content": [html(content)], + "published": [dt], + "uid": [&uid], "url": [&uid], + "author": [gen_hcard(domain)], + "in-reply-to": [link] + } + }) + } + PostType::LikeOf(ctx) => { + json!({ + "type": ["h-entry"], + "properties": { + "published": [dt], + "author": [gen_hcard(domain)], + "uid": [&uid], "url": [&uid], + "like-of": [{ + "type": ["h-cite"], + "properties": ctx["properties"] + }] + } + }) + } + PostType::LikeOfLink(link) => { + json!({ + "type": ["h-entry"], + "properties": { + "published": [dt], + "author": [gen_hcard(domain)], + "uid": [&uid], "url": [&uid], + "like-of": [link] + } + }) + } + } + } + + fn check_dt_published(mf2: &serde_json::Value, item: &Rc>) { + use microformats::types::temporal::Value as TemporalValue; + + let _item = item.borrow(); + let props = _item.properties.borrow(); + assert!(props.contains_key("published")); + + if let Some(PropertyValue::Temporal(TemporalValue::Timestamp(item))) = + props.get("published").and_then(|v| v.first()) + { + use chrono::{DateTime, FixedOffset, NaiveDateTime}; + + // Faithfully reconstruct the original datetime + // I wonder why not just have an Enum that would + // get you either date, time or a datetime, + // potentially with an offset? + let offset = item.as_offset().unwrap().data; + let ndt: NaiveDateTime = item.as_date().unwrap().data + .and_time(item.as_time().unwrap().data) + // subtract the offset here, since we will add it back + - offset; + let dt = DateTime::::from_utc(ndt, offset); + + let expected: DateTime = chrono::DateTime::parse_from_rfc3339( + mf2["properties"]["published"][0].as_str().unwrap(), + ) + .unwrap(); + + assert_eq!(dt, expected); + } else { + unreachable!() + } + } + + fn check_e_content(mf2: &serde_json::Value, item: &Rc>) { + let _item = item.borrow(); + let props = _item.properties.borrow(); + assert!(props.contains_key("content")); + + if let Some(PropertyValue::Fragment(content)) = props.get("content").and_then(|v| v.first()) + { + assert_eq!( + content.html, + mf2["properties"]["content"][0]["html"].as_str().unwrap() + ); + } else { + unreachable!() + } + } + + #[test] + #[ignore = "see https://gitlab.com/maxburon/microformats-parser/-/issues/7"] + fn test_note() { + let mf2 = gen_random_post(&rand::random::().to_string(), PostType::Note); + + let html = crate::mf2::Entry { post: &mf2 }.to_string(); + + let url: Url = mf2 + .pointer("/properties/uid/0") + .and_then(|i| i.as_str()) + .and_then(|u| u.parse().ok()) + .unwrap(); + let parsed: Document = microformats::from_html(&html, url.clone()).unwrap(); + + if let Some(PropertyValue::Item(item)) = parsed.get_item_by_url(&url) { + let _item = item.borrow(); + let props = _item.properties.borrow(); + + check_e_content(&mf2, &item); + check_dt_published(&mf2, &item); + assert!(props.contains_key("uid")); + assert!(props.contains_key("url")); + assert!(props + .get("url") + .unwrap() + .iter() + .any(|i| i == props.get("uid").and_then(|v| v.first()).unwrap())); + // XXX: fails because of https://gitlab.com/maxburon/microformats-parser/-/issues/7 + assert!(!props.contains_key("name")); + } else { + unreachable!() + } + } + + #[test] + fn test_article() { + let mf2 = gen_random_post(&rand::random::().to_string(), PostType::Article); + let html = crate::mf2::Entry { post: &mf2 }.to_string(); + let url: Url = mf2 + .pointer("/properties/uid/0") + .and_then(|i| i.as_str()) + .and_then(|u| u.parse().ok()) + .unwrap(); + let parsed: Document = microformats::from_html(&html, url.clone()).unwrap(); + + if let Some(PropertyValue::Item(item)) = parsed.get_item_by_url(&url) { + let _item = item.borrow(); + let props = _item.properties.borrow(); + + check_e_content(&mf2, &item); + check_dt_published(&mf2, &item); + assert!(props.contains_key("uid")); + assert!(props.contains_key("url")); + assert!(props + .get("url") + .unwrap() + .iter() + .any(|i| i == props.get("uid").and_then(|v| v.first()).unwrap())); + assert!(props.contains_key("name")); + if let Some(PropertyValue::Plain(name)) = props.get("name").and_then(|v| v.first()) { + assert_eq!( + name, + mf2.pointer("/properties/name/0") + .and_then(|v| v.as_str()) + .unwrap() + ); + } else { + panic!("Name wasn't a plain property!"); + } + } else { + unreachable!() + } + } + + #[test] + fn test_like_of() { + for likeof in [ + PostType::LikeOf(gen_random_post( + &rand::random::().to_string(), + PostType::Note, + )), + PostType::LikeOfLink(format!( + "https://{}/posts/{}-{}-{}", + &rand::random::(), + &rand::random::(), + &rand::random::(), + &rand::random::(), + )), + ] { + let mf2 = gen_random_post(&rand::random::().to_string(), likeof); + let url: Url = mf2 + .pointer("/properties/uid/0") + .and_then(|i| i.as_str()) + .and_then(|u| u.parse().ok()) + .unwrap(); + let html = crate::mf2::Entry { post: &mf2 }.to_string(); + let parsed: Document = microformats::from_html(&html, url.clone()).unwrap(); + + if let Some(item) = parsed.items.get(0) { + let _item = item.borrow(); + let props = _item.properties.borrow(); + + check_dt_published(&mf2, item); + assert!(props.contains_key("like-of")); + match props.get("like-of").and_then(|v| v.first()) { + Some(PropertyValue::Url(url)) => { + assert_eq!( + url, + &mf2.pointer("/properties/like-of/0") + .and_then(|i| i.as_str()) + .or_else(|| mf2 + .pointer("/properties/like-of/0/properties/uid/0") + .and_then(|i| i.as_str())) + .and_then(|u| u.parse::().ok()) + .unwrap() + ); + } + Some(PropertyValue::Item(_cite)) => { + todo!() + } + other => panic!("Unexpected value in like-of: {:?}", other), + } + } else { + unreachable!() + } + } + } +} diff --git a/templates/src/login.rs b/templates/src/login.rs new file mode 100644 index 0000000..042c308 --- /dev/null +++ b/templates/src/login.rs @@ -0,0 +1,17 @@ +markup::define! { + LoginPage { + form[method="POST"] { + h1 { "Sign in with your website" } + p { + "Signing in to Kittybox might allow you to view private content " + "intended for your eyes only." + } + + section { + label[for="url"] { "Your website URL" } + input[id="url", name="url", placeholder="https://example.com/"]; + input[type="submit"]; + } + } + } +} diff --git a/templates/src/mf2.rs b/templates/src/mf2.rs new file mode 100644 index 0000000..33cd84a --- /dev/null +++ b/templates/src/mf2.rs @@ -0,0 +1,478 @@ +use ellipse::Ellipse; + +pub static POSTS_PER_PAGE: usize = 20; + +/// Return a pretty location specifier from a geo: URI. +fn decode_geo_uri(uri: &str) -> String { + if let Some(part) = uri.split(':').collect::>().get(1) { + if let Some(part) = part.split(';').next() { + let mut parts = part.split(','); + let lat = parts.next().unwrap(); + let lon = parts.next().unwrap(); + + // TODO - format them as proper latitude and longitude + format!("{}, {}", lat, lon) + } else { + uri.to_string() + } + } else { + uri.to_string() + } +} + +markup::define! { + Entry<'a>(post: &'a serde_json::Value) { + @if post.pointer("/properties/like-of").is_none() && post.pointer("/properties/bookmark-of").is_none() { + @FullEntry { post } + } else { + // Show a mini-post. + @MiniEntry { post } + } + } + MiniEntry<'a>(post: &'a serde_json::Value) { + article."h-entry mini-entry" { + @if let Some(author) = post["properties"]["author"][0].as_object() { + span."mini-h-card"."u-author" { + a."u-author"[href=author["properties"]["uid"][0].as_str().unwrap()] { + @if let Some(photo) = author["properties"]["photo"][0].as_str() { + img[src=photo, loading="lazy"]; + } else if author["properties"]["photo"][0].is_object() { + img[ + src=author["properties"]["photo"][0]["value"].as_str().unwrap(), + alt=author["properties"]["photo"][0]["alt"].as_str().unwrap(), + loading="lazy" + ]; + } + @author["properties"]["name"][0].as_str().unwrap() + } + } + @if let Some(likeof) = post["properties"]["like-of"][0].as_str() { + " " + span."like-icon"["aria-label"="liked"] { + span."like-icon-label"["aria-hidden"="true"] { + "❀️" + } + } + " " + a."u-like-of"[href=likeof] { @likeof } + } else if let Some(likeof) = post["properties"]["like-of"][0].as_object() { + a."u-like-of"[href=likeof["properties"]["url"][0].as_str().unwrap()] { + @likeof["properties"]["name"][0] + .as_str() + .unwrap_or_else(|| likeof["properties"]["url"][0].as_str().unwrap()) + } + } + @if let Some(bookmarkof) = post["properties"]["bookmark-of"][0].as_str() { + " πŸ”– " + a."u-bookmark-of"[href=bookmarkof] { @bookmarkof } + } else if let Some(bookmarkof) = post["properties"]["bookmark-of"][0].as_object() { + a."u-bookmark-of"[href=bookmarkof["properties"]["url"][0].as_str().unwrap()] { + @bookmarkof["properties"]["name"][0] + .as_str() + .unwrap_or_else(|| bookmarkof["properties"]["url"][0].as_str().unwrap()) + } + } + " " + a."u-url"."u-uid"[href=post["properties"]["uid"][0].as_str().unwrap()] { + @if let Some(published) = post["properties"]["published"][0].as_str() { + time."dt-published"[datetime=published] { + @chrono::DateTime::parse_from_rfc3339(published) + .map(|dt| dt.format("on %a %b %e %T %Y").to_string()) + .unwrap_or("sometime in the past".to_string()) + } + } else { + "sometime in the past" + } + } + } + } + } + FullEntry<'a>(post: &'a serde_json::Value) { + article."h-entry" { + header.metadata { + @if let Some(name) = post["properties"]["name"][0].as_str() { + h1."p-name" { @name } + } + @if let Some(author) = post["properties"]["author"][0].as_object() { + section."mini-h-card" { + a.larger."u-author"[href=author["properties"]["uid"][0].as_str().unwrap()] { + @if let Some(photo) = author["properties"]["photo"][0].as_str() { + img[src=photo, loading="lazy"]; + } else if let Some(photo) = author["properties"]["photo"][0].as_object() { + img[ + src=photo["value"].as_str().unwrap(), + alt=photo["alt"].as_str().unwrap(), + loading="lazy" + ]; + } + + @author["properties"]["name"][0].as_str().unwrap() + } + } + } + div { + span { + a."u-url"."u-uid"[href=post["properties"]["uid"][0].as_str().unwrap()] { + @if let Some(published) = post["properties"]["published"][0].as_str() { + time."dt-published"[datetime=published] { + @chrono::DateTime::parse_from_rfc3339(published) + .map(|dt| dt.format("%a %b %e %T %Y").to_string()) + .unwrap_or("sometime in the past".to_string()) + } + } + } + } + @if post["properties"]["visibility"][0].as_str().unwrap_or("public") != "public" { + span."p-visibility"[value=post["properties"]["visibility"][0].as_str().unwrap()] { + @post["properties"]["visibility"][0].as_str().unwrap() + } + } + @if post["properties"]["category"].is_array() { + span { + ul.categories { + "Tagged: " + @for cat in post["properties"]["category"].as_array().unwrap() { + li."p-category" { @cat.as_str().unwrap() } + } + } + } + } + @if post["properties"]["in-reply-to"].is_array() { + span { + "In reply to: " + ul.replyctx { + @for ctx in post["properties"]["in-reply-to"].as_array().unwrap() { + @if let Some(ctx) = ctx.as_str() { + li { + a."u-in-reply-to"[href=ctx] { + @ctx.truncate_ellipse(48).as_ref() + } + } + } else if let Some(ctx) = ctx.as_object() { + li { + a."u-in-reply-to"[href=ctx["properties"]["uid"][0] + .as_str() + .unwrap_or_else(|| ctx["properties"]["url"][0].as_str().unwrap())] + { + @ctx["properties"]["uid"][0] + .as_str() + .unwrap_or_else(|| ctx["properties"]["url"][0].as_str().unwrap()) + .truncate_ellipse(48) + .as_ref() + } + } + } + } + } + } + } + } + @if post["properties"]["url"].as_array().unwrap().len() > 1 { + hr; + ul { + "Pretty permalinks for this post:" + @for url in post["properties"]["url"].as_array().unwrap().iter().filter(|i| **i != post["properties"]["uid"][0]).map(|i| i.as_str().unwrap()) { + li { + a."u-url"[href=url] { @url } + } + } + } + } + @if let Some(links) = post["properties"]["syndication"].as_array() { + @if !links.is_empty() { + hr; + ul { + "Also published on:" + @for url in links.iter().filter_map(|i| i.as_str()) { + li { a."u-syndication"[href=url] { @url } } + } + } + } + } + @if post["properties"]["location"].is_array() || post["properties"]["checkin"].is_array() { + div { + @if post["properties"]["checkin"].is_array() { + span { + "Check-in to: " + @if post["properties"]["checkin"][0].is_string() { + // It's a URL + a."u-checkin"[href=post["properties"]["checkin"][0].as_str().unwrap()] { + @post["properties"]["checkin"][0].as_str().unwrap().truncate_ellipse(24).as_ref() + } + } else { + a."u-checkin"[href=post["properties"]["checkin"][0]["properties"]["uid"][0].as_str().unwrap()] { + @post["properties"]["checkin"][0]["properties"]["name"][0].as_str().unwrap() + } + } + } + } + @if post["properties"]["location"].is_array() { + span { + "Location: " + @if post["properties"]["location"][0].is_string() { + // It's a geo: URL + // We need to decode it + a."u-location"[href=post["properties"]["location"][0].as_str().unwrap()] { + @decode_geo_uri(post["properties"]["location"][0].as_str().unwrap()) + } + } else { + // It's an inner h-geo object + a."u-location"[href=post["properties"]["location"][0]["value"].as_str().map(|x| x.to_string()).unwrap_or(format!("geo:{},{}", post["properties"]["location"][0]["properties"]["latitude"][0].as_str().unwrap(), post["properties"]["location"][0]["properties"]["longitude"][0].as_str().unwrap()))] { + // I'm a lazy bitch + @decode_geo_uri(&post["properties"]["location"][0]["value"].as_str().map(|x| x.to_string()).unwrap_or(format!("geo:{},{}", post["properties"]["location"][0]["properties"]["latitude"][0].as_str().unwrap(), post["properties"]["location"][0]["properties"]["longitude"][0].as_str().unwrap()))) + } + } + } + } + } + } + @if post["properties"]["ate"].is_array() || post["properties"]["drank"].is_array() { + div { + @if post["properties"]["ate"].is_array() { + span { ul { + "Ate:" + @for food in post["properties"]["ate"].as_array().unwrap() { + li { + @if food.is_string() { + // If this is a string, it's a URL. + a."u-ate"[href=food.as_str().unwrap()] { + @food.as_str().unwrap().truncate_ellipse(24).as_ref() + } + } else { + // This is a rich food object (mm, sounds tasty! I wanna eat something tasty) + a."u-ate"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] { + @food["properties"]["name"][0].as_str() + .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref()) + } + } + } + } + } } + } + @if post["properties"]["drank"].is_array() { + span { ul { + "Drank:" + @for food in post["properties"]["drank"].as_array().unwrap() { + li { + @if food.is_string() { + // If this is a string, it's a URL. + a."u-drank"[href=food.as_str().unwrap()] { + @food.as_str().unwrap().truncate_ellipse(24).as_ref() + } + } else { + // This is a rich food object (mm, sounds tasty! I wanna eat something tasty) + a."u-drank"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] { + @food["properties"]["name"][0].as_str() + .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref()) + } + } + } + } + } } + } + } + } + } + @PhotoGallery { photos: post["properties"]["photo"].as_array() } + @if post["properties"]["content"][0]["html"].is_string() { + main."e-content" { + @markup::raw(post["properties"]["content"][0]["html"].as_str().unwrap().trim()) + } + } + @WebInteractions { post } + } + } + VCard<'a>(card: &'a serde_json::Value) { + article."h-card" { + @if card["properties"]["photo"][0].is_string() { + img."u-photo"[src=card["properties"]["photo"][0].as_str().unwrap()]; + } else if card["properties"]["photo"][0].is_object() { + img."u-photo"[ + src=card["properties"]["photo"][0]["value"].as_str().unwrap(), + alt=card["properties"]["photo"][0]["alt"].as_str().unwrap() + ]; + } + h1 { + a."u-url"."u-uid"."p-name"[href=card["properties"]["uid"][0].as_str().unwrap()] { + @card["properties"]["name"][0].as_str().unwrap() + } + } + @if card["properties"]["pronoun"].is_array() { + span { + "(" + @for (i, pronoun) in card["properties"]["pronoun"].as_array().unwrap().iter().filter_map(|v| v.as_str()).enumerate() { + span."p-pronoun" { + @pronoun + } + // Insert commas between multiple sets of pronouns + @if i < (card["properties"]["pronoun"].as_array().unwrap().len() - 1) {", "} + } + ")" + } + } + @if card["properties"]["note"].is_array() { + p."p-note" { + @card["properties"]["note"][0]["value"].as_str().unwrap_or_else(|| card["properties"]["note"][0].as_str().unwrap()) + } + } + @if card["properties"]["url"].is_array() { + ul { + "Can be found elsewhere at:" + @for url in card["properties"]["url"] + .as_array() + .unwrap() + .iter() + .filter_map(|v| v.as_str()) + .filter(|v| v != &card["properties"]["uid"][0].as_str().unwrap()) + .filter(|v| !card["properties"]["author"][0].as_str().is_some_and(|a| v.starts_with(a))) + { + li { a."u-url"[href=url, rel="me"] { @url } } + } + } + } + } + } + Food<'a>(food: &'a serde_json::Value) { + article."h-food" { + header.metadata { + h1 { + a."p-name"."u-url"[href=food["properties"]["url"][0].as_str().unwrap()] { + @food["properties"]["name"][0].as_str().unwrap() + } + } + } + @PhotoGallery { photos: food["properties"]["photo"].as_array() } + } + } + Feed<'a>(feed: &'a serde_json::Value, cursor: Option<&'a str>) { + div."h-feed" { + div.metadata { + @if feed["properties"]["name"][0].is_string() { + h1."p-name".titanic { + a[href=feed["properties"]["uid"][0].as_str().unwrap(), rel="feed"] { + @feed["properties"]["name"][0].as_str().unwrap() + } + } + } + } + @if feed["children"].is_array() { + @for child in feed["children"].as_array().unwrap() { + @match child["type"][0].as_str().unwrap() { + "h-entry" => { @Entry { post: child } } + "h-feed" => { @Feed { feed: child, cursor: None } } + "h-food" => { @Food { food: child } } + //"h-event" => { } + "h-card" => { @VCard { card: child } } + something_else => { + p { + "There's supposed to be an " + @something_else + " object here. But Kittybox can't render it right now." + small { "Sorry! TToTT" } + } + } + } + } + } + @if let Some(cursor) = cursor { + a[rel="prev", href=format!("{}?after={}", feed["properties"]["uid"][0].as_str().unwrap(), cursor)] { + "Older posts" + } + } else { + p { + "Looks like you reached the end. Wanna jump back to the " + a[href=feed["properties"]["uid"][0].as_str().unwrap()] { + "beginning" + } "?" + } + } + } + } + + //======================================= + // Components library + //======================================= + PhotoGallery<'a>(photos: Option<&'a Vec>) { + @if let Some(photos) = photos { + @for photo in photos.iter() { + @if let Some(photo) = photo.as_str() { + img."u-photo"[src=photo, loading="lazy"]; + } else if photo.is_object() { + @if let Some(thumbnail) = photo["thumbnail"].as_str() { + a."u-photo"[href=photo["value"].as_str().unwrap()] { + img[src=thumbnail, + loading="lazy", + alt=photo["alt"].as_str().unwrap_or("") + ]; + } + } else { + img."u-photo"[src=photo["value"].as_str().unwrap(), + loading="lazy", + alt=photo["alt"].as_str().unwrap_or("") + ]; + } + } + } + } + } + WebInteractions<'a>(post: &'a serde_json::Value) { + footer.webinteractions { + p[style="display: none", "aria-hidden"="false"] { + "Webmention counters:" + } + ul.counters { + li { + span."icon like-icon"["aria-label"="likes"] { + span."like-icon-label"["aria-hidden"="true"] { + "❀️" + } + } + span.counter { @post["properties"]["like"].as_array().map(|a| a.len()).unwrap_or(0) } + } + li { + span.icon["aria-label"="replies"] { "πŸ’¬" } + span.counter { @post["properties"]["comment"].as_array().map(|a| a.len()).unwrap_or(0) } + } + li { + span.icon["aria-label"="reposts"] { "πŸ”„" } + span.counter { @post["properties"]["repost"].as_array().map(|a| a.len()).unwrap_or(0) } + } + li { + span.icon["aria-label"="bookmarks"] { "πŸ”–" } + span.counter { @post["properties"]["bookmark"].as_array().map(|a| a.len()).unwrap_or(0) } + } + } + /*@if ( + post["properties"]["like"].as_array().map(|a| a.len()).unwrap_or(0) + + post["properties"]["bookmark"].as_array().map(|a| a.len()).unwrap_or(0) + + post["properties"]["repost"].as_array().map(|a| a.len()).unwrap_or(0) + + post["properties"]["comment"].as_array().map(|a| a.len()).unwrap_or(0) + ) > 0 { + details { + summary { "Show comments and reactions" } + // TODO actually render facepiles and comments + @if let Some(likes) = post["properties"]["like"].as_array() { + @if !likes.is_empty() { + // Show a facepile of likes for a post + } + } + @if let Some(bookmarks) = post["properties"]["bookmark"].as_array() { + @if !bookmarks.is_empty() { + // Show a facepile of bookmarks for a post + } + } + @if let Some(reposts) = post["properties"]["repost"].as_array() { + @if !reposts.is_empty() { + // Show a facepile of reposts for a post + } + } + @if let Some(comments) = post["properties"]["comment"].as_array() { + @for comment in comments.iter() { + // Show all the comments recursively (so we could do Salmention with them) + } + } + } + }*/ + } + } +} diff --git a/templates/src/onboarding.rs b/templates/src/onboarding.rs new file mode 100644 index 0000000..e2f07df --- /dev/null +++ b/templates/src/onboarding.rs @@ -0,0 +1,196 @@ +markup::define! { + OnboardingPage { + h1[style="text-align: center"] { + "Welcome to Kittybox" + } + script[type="module", src="/.kittybox/static/onboarding.js"] {} + link[rel="stylesheet", href="/.kittybox/static/onboarding.css"]; + form.onboarding[action="", method="POST"] { + noscript { + p { + "Ok, let's be honest. Most of this software doesn't require JS to be enabled " + "to view pages (and in some cases, even edit them if logged in)." + } + p { "This page is a little bit different. It uses JavaScript to provide interactive features, such as:" } + ul { + li { "Multiple-input questions" } + li { "Answers spanning multiple fields" } + li { "Preview of files being uploaded" } + li { "Pretty pagination so you won't feel overwhelmed" } + } + p { + "Sadly, it's very hard or even impossible to recreate this without any JavaScript. " + "Good news though - the code is " b { "open-source AND free software" } + " (under GNU AGPLv3) " + "and I promise to not obfuscate it or minify it. " + a[href="/.kittybox/static/onboarding.js"] { "Here" } + "'s the link - you can try reading it so you'll be 200% sure " + "it won't steal your cookies or turn your kitty into a soulless monster." + @markup::raw("") + } + hr; + p { + "In other words: " + b { "please enable JavaScript for this page to work properly." } + small { "sorry T__T" } + } + } + ul #progressbar[style="display: none"] { + li #intro { "Introduction" } + li #hcard { "Your profile" } + li #settings { "Your website" } + li #firstpost { "Your first post" } + } + fieldset #intro[style="display: none"] { + legend { "Introduction" } + p { + "Kittybox is a CMS that can act as a member of the IndieWeb. " + "IndieWeb is a global distributed social network built on top of open Web standards " + "and composed of blogs around the Internet supporting these standards." + } + p { "There is no registration or centralized database of any sort - everyone owns their data and is responsible for it." } + p { "If you're seeing this page, it looks like your configuration is correct and we can proceed with the setup." } + + div.switch_card_buttons { + button.switch_card.next_card[type="button", "data-card"="hcard"] { "Next" } + } + } + + fieldset #hcard[style="display: none"] { + legend { "Your profile" } + p { "An h-card is an IndieWeb social profile, and we're gonna make you one!" } + p { "Thanks to some clever markup, it will be readable by both humans and machines looking at your homepage."} + p { + "If you make a mistake, don't worry, you're gonna be able to edit this later." + "The only mandatory field is your name." + } + + div.form_group { + label[for="hcard_name"] { "Your name" } + input #hcard_name[name="hcard_name", placeholder="Your name"]; + small { + "No need to write the name as in your passport, this is not a legal document " + "- just write how you want to be called on the network. This name will be also " + "shown whenever you leave a comment on someone else's post using your website." + } + } + + div.form_group { + label[for="pronouns"] { "Your pronouns" } + div.multi_input #pronouns { + template { + input #hcard_pronouns[name="hcard_pronouns", placeholder="they/them?"]; + } + button.add_more[type="button", "aria-label"="Add more"] { "[+] Add more" } + } + small { + "Write which pronouns you use for yourself. It's a free-form field " + "so don't feel constrained - but keep it compact, as it'll be shown in a lot of places." + } + } + + div.form_group { + label[for="urls"] { "Links to other pages of you" } + div.multi_input #urls { + template { + input #hcard_url[name="hcard_url", placeholder="https://example.com/"]; + } + button.add_more[type="button", "aria-label"="Add more"] { "[+] Add more" } + } + small { + "These URLs will help your readers find you elsewhere and will help you that whoever owns these pages owns your website too" + " in case the links are mutual. So make sure to put a link to your site in your other social profiles!" + } + } + + div.form_group { + label[for="hcard_note"] { "A little about yourself" } + textarea #hcard_note[name="hcard_note", placeholder="Loves cooking, plants, cats, dogs and racoons."] {} + small { "A little bit of introduction. Just one paragraph, and note, you can't use HTML here (yet)." } + // TODO: HTML e-note instead of p-note + } + + // TODO: u-photo upload - needs media endpoint cooperation + + div.switch_card_buttons { + button.switch_card.prev_card[type="button", "data-card"="intro"] { "Previous" } + button.switch_card.next_card[type="button", "data-card"="settings"] { "Next" } + } + } + + fieldset #settings[style="display: none"] { + legend { "Your website" } + p { "Ok, it's nice to know you more. Tell me about what you'll be writing and how you want to name your blog." } + // TODO: site-name, saved to settings + + div.form_group { + label[for="blog_name"] { "Your website's name"} + input #blog_name[name="blog_name", placeholder="Kitty Box!"]; + small { "It'll get shown in the title of your blog, in the upper left corner!" } + } + + div.form_group { + label[for="custom_feeds"] { "Custom feeds" } + small { + p { + "You can set up custom feeds to post your stuff to. " + "This is a nice way to organize stuff into huge folders, like all your trips or your quantified-self data." + } + p { + "Feeds can be followed individually, which makes it easy for users who are interested in certain types " + "of content you produce to follow your adventures in certain areas of your life without cluttering their " + "readers." + } + p { + "We will automatically create some feeds for you aside from these so you won't have to - including a main feed, " + "address book (for venues you go to and people you talk about), a cookbook for your recipes and some more." + // TODO: Put a link to documentation explaining feeds in more detail. + } + } + div.multi_input #custom_feeds { + template { + fieldset.feed { + div.form_group { + label[for="feed_name"] { "Name" } + input #feed_name[name="feed_name", placeholder="My cool feed"]; + small { "This is a name that will identify this feed to the user. Make it short and descriptive!" } + } + div.form_group { + label[for="feed_slug"] { "Slug" } + input #feed_slug[name="feed_slug", placeholder="my-cool-feed"]; + small { "This will form a pretty URL for the feed. For example: https://example.com/feeds/my-cool-feed" } + } + } + } + button.add_more[type="button", "aria-label"="Add more"] { "[+] Add More" } + } + } + + div.switch_card_buttons { + button.switch_card.prev_card[type="button", "data-card"="hcard"] { "Previous" } + button.switch_card.next_card[type="button", "data-card"="firstpost"] { "Next" } + } + } + + fieldset #firstpost[style="display: none"] { + legend { "Your first post" } + p { "Maybe you should start writing your first posts now. How about a short note?" } + p { "A note is a short-form post (not unlike a tweet - but without the actual character limit) that doesn't bear a title." } + p { + "Consider telling more about yourself, your skills and interests in this note " + @markup::raw("—") + " though you're free to write anything you want. (By the way, you can use " + a[href="https://daringfireball.net/projects/markdown/syntax"] { "Markdown" } + " here to spice up your note!)" + } + + textarea #first_post_content[style="width: 100%; height: 8em", placeholder="Hello! I am really excited about #IndieWeb"] {} + + div.switch_card_buttons { + button.switch_card.prev_card[type="button", "data-card"="settings"] { "Previous" } + button[type="submit"] { "Finish" } + } + } + } + } +} diff --git a/templates/src/templates.rs b/templates/src/templates.rs new file mode 100644 index 0000000..63c67c5 --- /dev/null +++ b/templates/src/templates.rs @@ -0,0 +1,168 @@ +use http::StatusCode; +use kittybox_util::MicropubChannel; + +use crate::{Feed, VCard}; + +markup::define! { + Template<'a>(title: &'a str, blog_name: &'a str, feeds: Vec, user: Option, content: String) { + @markup::doctype() + html { + head { + title { @title } + link[rel="preconnect", href="https://fonts.gstatic.com"]; + link[rel="stylesheet", href="/.kittybox/static/style.css"]; + meta[name="viewport", content="initial-scale=1, width=device-width"]; + + link[rel="micropub", href="/.kittybox/micropub"]; + link[rel="micropub_media", href="/.kittybox/media"]; + link[rel="indieauth_metadata", href="/.kittybox/indieauth/metadata"]; + link[rel="webmention", href="/.kittybox/webmention"]; + // legacy links for some dumb clients + link[rel="authorization_endpoint", href="/.kittybox/indieauth/auth"]; + link[rel="token_endpoint", href="/.kittybox/indieauth/token"]; + // LibreJS-compliant JS licensing info (because TypeScript is a bitch) + link[rel="jslicense", href="/.kittybox/static/jslicense.html"]; + /*@if let Some(endpoints) = endpoints { + @if let Some(webmention) = &endpoints.webmention { + link[rel="webmention", href=&webmention]; + } + @if let Some(microsub) = &endpoints.microsub { + link[rel="microsub", href=µsub]; + } + }*/ + } + body { + a[href="#main_content", id="skip-to-content"] { "Skip to content" } + // TODO Somehow compress headerbar into a menu when the screen space is tight + nav #headerbar { + ul { + li { a #homepage[href="/"] { @blog_name } } + @for feed in feeds.iter() { + li { a[href=&feed.uid] { @feed.name } } + } + li.shiftright { + @if user.is_none() { + a #login[href="/login"] { "Sign in" } + } else { + span { + @user.as_ref().unwrap() " - " a #logout[href="/logout"] { "Sign out" } + } + } + } + } + } + main #main_content { + @markup::raw(content) + } + footer { + p { + "Powered by " a[href="https://sr.ht/~vikanezrimaya/kittybox"] { + "Kittybox" + } + } + } + } + } + } + MainPage<'a>(feed: &'a serde_json::Value, card: &'a serde_json::Value, cursor: Option<&'a str>, webring: bool) { + .sidebyside { + @VCard { card } + #dynamicstuff { + div { + p { "This section will provide interesting statistics or tidbits about my life in this exact moment (with maybe a small delay)." } + p { "It will probably require JavaScript to self-update, but I promise to keep this widget lightweight and open-source!" } + p { small { + "JavaScript isn't a menace, stop fearing it or I will switch to WebAssembly " + "and knock your nico-nico-kneecaps so fast with its speed you won't even notice that... " + small { "omae ha mou shindeiru" } + @markup::raw("") + } } + } + @if *webring { + #webring { + a[href="https://xn--sr8hvo.ws/previous"] { "←" } + " An " + a[href="https://xn--sr8hvo.ws"] { + "IndieWeb Webring" + } + " πŸ•ΈπŸ’ " + a[href="https://xn--sr8hvo.ws/next"] { "β†’" } + } + } + } + } + @Feed { feed, cursor: *cursor } + } + ErrorPage(code: StatusCode, msg: Option) { + h1 { @format!("HTTP {code}") } + @match *code { + StatusCode::UNAUTHORIZED => { + p { "Looks like you need to authenticate yourself before seeing this page. Try logging in with IndieAuth using the Login button above!" } + } + StatusCode::FORBIDDEN => { + p { "Looks like you're forbidden from viewing this page." } + p { + "This might've been caused by being banned from viewing my website" + "or simply by trying to see what you're not supposed to see, " + "like a private post that's not intended for you. It's ok, it happens." + } + } + StatusCode::GONE => { + p { "Looks like the page you're trying to find is gone and is never coming back." } + } + StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS => { + p { "The page is there, but I can't legally provide it to you because the censorship said so." } + } + StatusCode::NOT_FOUND => { + p { "Looks like there's no such page. Maybe you or someone else mistyped a URL or my database experienced data loss." } + } + StatusCode::IM_A_TEAPOT => { + p { "Wait, do you seriously expect my website to brew you coffee? It's not a coffee machine!" } + + p { + small { + "I could brew you some coffee tho if we meet one day... " + small { + i { + "i-it's nothing personal, I just like brewing coffee, b-baka!!!~ >. { + @match msg { + None => { + p { + "There was an undescribed error in your request. " + "Please try again later or with a different request." + } + } + Some(msg) => { + p { + "There was a following error in your request:" + } + blockquote { pre { @msg } } + } + } + } + StatusCode::INTERNAL_SERVER_ERROR => { + @match msg { + None => { + p { "It seems like you have found an error. Not to worry, it has already been logged." } + } + Some(msg) => { + p { "The server encountered an error while processing your request:" } + blockquote { @msg } + p { "Don't worry, it has already been logged." } + } + } + } + _ => { + p { "It seems like you have found an error. Not to worry, it has already been logged." } + } + } + P { "For now, may I suggest to visit " a[href="/"] {"the main page"} " of this website?" } + + } +} diff --git a/util/Cargo.toml b/util/Cargo.toml new file mode 100644 index 0000000..0425849 --- /dev/null +++ b/util/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "kittybox-util" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[features] +fs = ["rand", "tokio", "tokio/fs"] + +[dependencies] +serde = { version = "^1.0.170", features = ["derive"] } +serde_json = "^1.0.64" +axum-core = "^0.3.4" +http = "^0.2.7" +async-trait = "^0.1.50" +futures-util = "^0.3.14" +uuid = "^1.3.3" +[dependencies.rand] +version = "^0.8.5" +optional = true +[dependencies.tokio] +version = "^1.16.1" +features = ["tracing"] +optional = true +[dependencies.sqlx] +version = "0.7" +features = ["json"] +optional = true \ No newline at end of file diff --git a/util/src/error.rs b/util/src/error.rs new file mode 100644 index 0000000..1c95020 --- /dev/null +++ b/util/src/error.rs @@ -0,0 +1,95 @@ +use serde::{Deserialize, Serialize}; +use http::StatusCode; +use axum_core::response::{Response, IntoResponse}; + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug)] +#[serde(rename_all = "snake_case")] +/// Kinds of errors that can happen within a Micropub operation. +pub enum ErrorType { + /// An erroneous attempt to create something that already exists. + AlreadyExists, + /// Current user is expressly forbidden from performing this action. + Forbidden, + /// The Micropub server experienced an internal error. + InternalServerError, + /// The request was invalid or malformed. + InvalidRequest, + /// The provided OAuth2 scopes were insufficient to allow performing this action. + InvalidScope, + /// There was no token or other means of authorization in the request. + NotAuthorized, + /// Whatever was requested was not found. + NotFound, + /// The request payload was of a type unsupported by the Micropub endpoint. + UnsupportedMediaType, +} + +/// Representation of the Micropub API error. +#[derive(Serialize, Deserialize, Debug)] +pub struct MicropubError { + /// General kind of an error that occured. + pub error: ErrorType, + /// A human-readable error description intended for application developers. + // TODO use Cow<'static, str> to save on heap allocations + pub error_description: String, +} + +impl std::error::Error for MicropubError {} + +impl std::fmt::Display for MicropubError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str("Micropub error: ")?; + f.write_str(&self.error_description) + } +} + +impl From for MicropubError { + fn from(err: serde_json::Error) -> Self { + use ErrorType::*; + Self { + error: InvalidRequest, + error_description: err.to_string(), + } + } +} + +impl MicropubError { + /// Create a new Micropub error. + pub fn new(error: ErrorType, error_description: &str) -> Self { + Self { + error, + error_description: error_description.to_owned(), + } + } +} + +impl From<&MicropubError> for StatusCode { + fn from(err: &MicropubError) -> Self { + use ErrorType::*; + match err.error { + AlreadyExists => StatusCode::CONFLICT, + Forbidden => StatusCode::FORBIDDEN, + InternalServerError => StatusCode::INTERNAL_SERVER_ERROR, + InvalidRequest => StatusCode::BAD_REQUEST, + InvalidScope => StatusCode::UNAUTHORIZED, + NotAuthorized => StatusCode::UNAUTHORIZED, + NotFound => StatusCode::NOT_FOUND, + UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE, + } + } +} +impl From for StatusCode { + fn from(err: MicropubError) -> Self { + (&err).into() + } +} + +impl IntoResponse for MicropubError { + fn into_response(self) -> Response { + IntoResponse::into_response(( + StatusCode::from(&self), + [("Content-Type", "application/json")], + serde_json::to_string(&self).unwrap(), + )) + } +} diff --git a/util/src/lib.rs b/util/src/lib.rs new file mode 100644 index 0000000..c49bdf5 --- /dev/null +++ b/util/src/lib.rs @@ -0,0 +1,123 @@ +#![warn(missing_docs)] +//! Small things that couldn't fit elsewhere in Kittybox, yet may be +//! useful on their own or in multiple Kittybox crates. +//! +//! Some things are gated behind features, namely: +//! - `fs` - enables use of filesystem-related utilities +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct IndiewebEndpoints { + pub authorization_endpoint: String, + pub token_endpoint: String, + pub webmention: Option, + pub microsub: Option, +} + +/// Data structure representing a Micropub channel in the ?q=channels output. +#[derive(Serialize, Deserialize, PartialEq, Debug)] +#[cfg_attr(feature = "sqlx", derive(sqlx::FromRow))] +pub struct MicropubChannel { + /// The channel's UID. It is usually also a publically accessible permalink URL. + pub uid: String, + /// The channel's user-friendly name used to recognize it in lists. + pub name: String, +} + +#[derive(Debug, Default)] +/// Common types of webmentions. +pub enum MentionType { + /// Corresponds to a `u-in-reply-to` link. + Reply, + /// Corresponds to a `u-like-of` link. + Like, + /// Corresponds to a `u-repost-of` link. + Repost, + /// Corresponds to a `u-bookmark-of` link. + Bookmark, + /// A plain link without MF2 annotations. + #[default] + Mention +} + +/// Common errors from the IndieWeb protocols that can be reused between modules. +pub mod error; +pub use error::{ErrorType, MicropubError}; + +/// Common data-types useful in creating smart authentication systems. +pub mod auth { + #[derive(PartialEq, Eq, Hash, Clone, Copy)] + pub enum EnrolledCredential { + /// An indicator that a password is enrolled. Passwords can be + /// used to recover from a lost token. + Password, + /// An indicator that one or more WebAuthn credentials were + /// enrolled. + WebAuthn + } +} + +/// A collection of traits for implementing a robust job queue. +pub mod queue; + +#[cfg(feature = "fs")] +/// Commonly-used operations with the file system in Kittybox's +/// underlying storage mechanisms. +pub mod fs { + use std::io::{self, Result}; + use std::path::{Path, PathBuf}; + use rand::{Rng, distributions::Alphanumeric}; + use tokio::fs; + + /// Create a temporary file named `temp.[a-zA-Z0-9]{length}` in + /// the given location and immediately open it. Returns the + /// filename and the corresponding file handle. It is the caller's + /// responsibility to clean up the temporary file when it is no + /// longer needed. + /// + /// Uses [`OpenOptions::create_new`][fs::OpenOptions::create_new] + /// to detect filename collisions, in which case it will + /// automatically retry until the operation succeeds. + /// + /// # Errors + /// + /// Returns the underlying [`io::Error`] if the operation fails + /// due to reasons other than filename collision. + pub async fn mktemp(dir: T, basename: B, length: usize) -> Result<(PathBuf, fs::File)> + where + T: AsRef, + B: Into> + { + let dir = dir.as_ref(); + let basename = basename.into().unwrap_or(""); + fs::create_dir_all(dir).await?; + + loop { + let filename = dir.join(format!( + "{}{}{}", + basename, + if basename.is_empty() { "" } else { "." }, + { + let string = rand::thread_rng() + .sample_iter(&Alphanumeric) + .take(length) + .collect::>(); + String::from_utf8(string).unwrap() + } + )); + + match fs::OpenOptions::new() + .create_new(true) + .write(true) + .open(&filename) + .await + { + Ok(file) => return Ok((filename, file)), + Err(err) => match err.kind() { + io::ErrorKind::AlreadyExists => continue, + _ => return Err(err) + } + } + } + } +} diff --git a/util/src/queue.rs b/util/src/queue.rs new file mode 100644 index 0000000..c880597 --- /dev/null +++ b/util/src/queue.rs @@ -0,0 +1,66 @@ +use futures_util::Stream; +use std::pin::Pin; +use uuid::Uuid; + +#[async_trait::async_trait] +/// A job queue that can store and return jobs. +pub trait JobQueue: Send + Sync + Sized + Clone + 'static { + /// A type of job object that will be returned by the queue. + type Job: Job; + /// Error type that the queue can produce in its work. + type Error: std::error::Error + Send + Sync + Sized; + + /// Get one item from the job queue, if the job queue has pending + /// items available. + /// + /// # Errors + /// + /// Returns an error if a job queue failed in some way. Having no + /// items is not a failure, in which case `Ok(None)` is returned. + async fn get_one(&self) -> Result, Self::Error>; + /// Put an item into a job queue, returning its UUID. + async fn put(&self, item: &T) -> Result; + + /* + /// Check the amount of pending and stuck items in the job queue. + async fn len(&self) -> Result<(usize, usize), Self::Error>; + /// Returns whether the job queue has some pending items. + async fn is_empty(&self) -> Result { + Ok(self.len().await?.0 == 0) + } + /// Returns whether the job queue has some stuck items that + /// require manual cleanup. + async fn has_stuck(&self) -> Result { + Ok(self.len().await?.1 > 0) + } + */ + + /// Consume the job queue object and return a stream of unstuck + /// items from the job queue. + /// + /// Note that one item may be returned several times if it is not + /// marked as done. + async fn into_stream(self) -> Result> + Send>>, Self::Error>; +} + +#[async_trait::async_trait] +/// A job description yielded from a job queue. +/// +/// # Implementors +/// +/// On [`Drop`], the job should be returned to a job queue. If your +/// job queue tracks attempts, the counter should be incremented by +/// one. +/// +/// Figuring how to do this asynchronously from a synchronous trait +/// is left as an exercise to the reader. +pub trait Job>: Send + Sync + Sized { + /// Get the object describing the task itself. + fn job(&self) -> &T; + /// Mark the job as done and remove it from the job queue. + async fn done(self) -> Result<(), Q::Error>; +} + +/// An object describing the job itself, returned as part of a +/// [`Job`]. +pub trait JobItem: Send + Sync + Sized + std::fmt::Debug {} -- cgit 1.4.1