about summary refs log tree commit diff
path: root/kittybox-rs
diff options
context:
space:
mode:
authorVika <vika@fireburn.ru>2022-05-24 17:18:30 +0300
committerVika <vika@fireburn.ru>2022-05-24 17:18:30 +0300
commit5610a5f0bf1a9df02bd3d5b55e2cdebef2440360 (patch)
tree8394bcf1dcc204043d7adeb8dde2e2746977606e /kittybox-rs
parent2f93873122b47e42f7ee1c38f1f04d052a63599c (diff)
downloadkittybox-5610a5f0bf1a9df02bd3d5b55e2cdebef2440360.tar.zst
flake.nix: reorganize
 - Kittybox's source code is moved to a subfolder
   - This improves build caching by Nix since it doesn't take changes
     to other files into account
 - Package and test definitions were spun into separate files
   - This makes my flake.nix much easier to navigate
   - This also makes it somewhat possible to use without flakes (but
     it is still not easy, so use flakes!)
 - Some attributes were moved in compliance with Nix 2.8's changes to
   flake schema
Diffstat (limited to 'kittybox-rs')
-rw-r--r--kittybox-rs/Cargo.lock4247
-rw-r--r--kittybox-rs/Cargo.toml114
-rwxr-xr-xkittybox-rs/dev.sh17
-rw-r--r--kittybox-rs/src/bin/kittybox_bulk_import.rs66
-rw-r--r--kittybox-rs/src/bin/kittybox_database_converter.rs106
-rw-r--r--kittybox-rs/src/bin/pyindieblog_to_kittybox.rs68
-rw-r--r--kittybox-rs/src/database/file/mod.rs619
-rw-r--r--kittybox-rs/src/database/memory.rs200
-rw-r--r--kittybox-rs/src/database/mod.rs539
-rw-r--r--kittybox-rs/src/database/redis/edit_post.lua93
-rw-r--r--kittybox-rs/src/database/redis/mod.rs392
-rw-r--r--kittybox-rs/src/frontend/login.rs333
-rw-r--r--kittybox-rs/src/frontend/mod.rs459
-rw-r--r--kittybox-rs/src/frontend/onboarding.css33
-rw-r--r--kittybox-rs/src/frontend/onboarding.js87
-rw-r--r--kittybox-rs/src/frontend/style.css194
-rw-r--r--kittybox-rs/src/index.html182
-rw-r--r--kittybox-rs/src/indieauth.rs291
-rw-r--r--kittybox-rs/src/lib.rs103
-rw-r--r--kittybox-rs/src/main.rs256
-rw-r--r--kittybox-rs/src/media/mod.rs46
-rw-r--r--kittybox-rs/src/metrics.rs21
-rw-r--r--kittybox-rs/src/micropub/get.rs82
-rw-r--r--kittybox-rs/src/micropub/mod.rs964
-rw-r--r--kittybox-rs/src/micropub/post.rs944
-rw-r--r--kittybox-rs/templates/Cargo.toml26
-rw-r--r--kittybox-rs/templates/src/lib.rs347
-rw-r--r--kittybox-rs/templates/src/login.rs17
-rw-r--r--kittybox-rs/templates/src/onboarding.rs192
-rw-r--r--kittybox-rs/templates/src/templates.rs545
-rw-r--r--kittybox-rs/util/Cargo.toml12
-rw-r--r--kittybox-rs/util/src/lib.rs18
32 files changed, 11613 insertions, 0 deletions
diff --git a/kittybox-rs/Cargo.lock b/kittybox-rs/Cargo.lock
new file mode 100644
index 0000000..7b2bbef
--- /dev/null
+++ b/kittybox-rs/Cargo.lock
@@ -0,0 +1,4247 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "aead"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fc95d1bdb8e6666b2b217308eeeb09f2d6728d104be3e31916cc74d15420331"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "aes"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561"
+dependencies = [
+ "aes-soft",
+ "aesni",
+ "cipher",
+]
+
+[[package]]
+name = "aes-gcm"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5278b5fabbb9bd46e24aa69b2fdea62c99088e0a950a9be40e3e0101298f88da"
+dependencies = [
+ "aead",
+ "aes",
+ "cipher",
+ "ctr",
+ "ghash",
+ "subtle",
+]
+
+[[package]]
+name = "aes-soft"
+version = "0.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072"
+dependencies = [
+ "cipher",
+ "opaque-debug",
+]
+
+[[package]]
+name = "aesni"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce"
+dependencies = [
+ "cipher",
+ "opaque-debug",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
+dependencies = [
+ "memchr 0.1.11",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr 2.5.0",
+]
+
+[[package]]
+name = "alloc-no-stdlib"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "35ef4730490ad1c4eae5c4325b2a95f521d023e5c885853ff7aca0a6a1631db3"
+
+[[package]]
+name = "alloc-stdlib"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "697ed7edc0f1711de49ce108c541623a0af97c6c60b2f6e2b65229847ac843c2"
+dependencies = [
+ "alloc-no-stdlib",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.57"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc"
+
+[[package]]
+name = "ascii-canvas"
+version = "3.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8824ecca2e851cec16968d54a01dd372ef8f95b244fb84b84e70128be347c3c6"
+dependencies = [
+ "term",
+]
+
+[[package]]
+name = "assert-json-diff"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f1c3703dd33532d7f0ca049168930e9099ecac238e23cf932f3a69c42f06da"
+dependencies = [
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "async-channel"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319"
+dependencies = [
+ "concurrent-queue",
+ "event-listener",
+ "futures-core",
+]
+
+[[package]]
+name = "async-compression"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8589c784ff02ac80dafc5e4116c3a2a3743ac5e0c902483518a88eec6559cf99"
+dependencies = [
+ "brotli",
+ "flate2",
+ "futures-core",
+ "memchr 2.5.0",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "async-executor"
+version = "1.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "871f9bb5e0a22eeb7e8cf16641feb87c9dc67032ccf8ff49e772eb9941d3a965"
+dependencies = [
+ "async-task",
+ "concurrent-queue",
+ "fastrand",
+ "futures-lite",
+ "once_cell",
+ "slab",
+]
+
+[[package]]
+name = "async-global-executor"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c290043c9a95b05d45e952fb6383c67bcb61471f60cfa21e890dba6654234f43"
+dependencies = [
+ "async-channel",
+ "async-executor",
+ "async-io",
+ "async-mutex",
+ "blocking",
+ "futures-lite",
+ "num_cpus",
+ "once_cell",
+]
+
+[[package]]
+name = "async-io"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a811e6a479f2439f0c04038796b5cfb3d2ad56c230e0f2d3f7b04d68cfee607b"
+dependencies = [
+ "concurrent-queue",
+ "futures-lite",
+ "libc",
+ "log 0.4.17",
+ "once_cell",
+ "parking",
+ "polling",
+ "slab",
+ "socket2",
+ "waker-fn",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "async-lock"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e97a171d191782fba31bb902b14ad94e24a68145032b7eedf871ab0bc0d077b6"
+dependencies = [
+ "event-listener",
+]
+
+[[package]]
+name = "async-mutex"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e"
+dependencies = [
+ "event-listener",
+]
+
+[[package]]
+name = "async-object-pool"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aeb901c30ebc2fc4ab46395bbfbdba9542c16559d853645d75190c3056caf3bc"
+dependencies = [
+ "async-std",
+]
+
+[[package]]
+name = "async-process"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf2c06e30a24e8c78a3987d07f0930edf76ef35e027e7bdb063fccafdad1f60c"
+dependencies = [
+ "async-io",
+ "blocking",
+ "cfg-if",
+ "event-listener",
+ "futures-lite",
+ "libc",
+ "once_cell",
+ "signal-hook",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "async-std"
+version = "1.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "52580991739c5cdb36cde8b2a516371c0a3b70dda36d916cc08b82372916808c"
+dependencies = [
+ "async-channel",
+ "async-global-executor",
+ "async-io",
+ "async-lock",
+ "async-process",
+ "crossbeam-utils",
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-lite",
+ "gloo-timers",
+ "kv-log-macro",
+ "log 0.4.17",
+ "memchr 2.5.0",
+ "num_cpus",
+ "once_cell",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+ "wasm-bindgen-futures",
+]
+
+[[package]]
+name = "async-stream"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e"
+dependencies = [
+ "async-stream-impl",
+ "futures-core",
+]
+
+[[package]]
+name = "async-stream-impl"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "async-task"
+version = "4.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30696a84d817107fc028e049980e09d5e140e8da8f1caeb17e8e950658a3cea9"
+
+[[package]]
+name = "async-timer"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ba5fa6ed76cb2aa820707b4eb9ec46f42da9ce70b0eafab5e5e34942b38a44d5"
+dependencies = [
+ "libc",
+ "wasm-bindgen",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "async-trait"
+version = "0.1.53"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "atomic-waker"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "065374052e7df7ee4047b1160cca5e1467a12351a40b3da123c870ba0b8eda2a"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "autocfg"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78"
+dependencies = [
+ "autocfg 1.1.0",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "axum"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00f1e8a972137fad81e2a1a60b86ff17ce0338f8017264e45a9723d0083c39a1"
+dependencies = [
+ "async-trait",
+ "axum-core",
+ "bitflags",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "hyper",
+ "itoa 1.0.1",
+ "matchit",
+ "memchr 2.5.0",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "serde",
+ "sync_wrapper",
+ "tokio",
+ "tower",
+ "tower-http",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "axum-core"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da31c0ed7b4690e2c78fe4b880d21cd7db04a346ebc658b4270251b695437f17"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "mime",
+]
+
+[[package]]
+name = "base-x"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dc19a4937b4fbd3fe3379793130e42060d10627a360f2127802b10b87e7baf74"
+
+[[package]]
+name = "base64"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
+
+[[package]]
+name = "basic-cookies"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb53b6b315f924c7f113b162e53b3901c05fc9966baf84d201dfcc7432a4bb38"
+dependencies = [
+ "lalrpop",
+ "lalrpop-util",
+ "regex 1.5.5",
+]
+
+[[package]]
+name = "bit-set"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
+dependencies = [
+ "bit-vec",
+]
+
+[[package]]
+name = "bit-vec"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "block-buffer"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "blocking"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c6ccb65d468978a086b69884437ded69a90faab3bbe6e67f242173ea728acccc"
+dependencies = [
+ "async-channel",
+ "async-task",
+ "atomic-waker",
+ "fastrand",
+ "futures-lite",
+ "once_cell",
+]
+
+[[package]]
+name = "brotli"
+version = "3.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+ "brotli-decompressor",
+]
+
+[[package]]
+name = "brotli-decompressor"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ad2d4653bf5ca36ae797b1f4bb4dbddb60ce49ca4aed8a2ce4829f60425b80"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+]
+
+[[package]]
+name = "buf_redux"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f"
+dependencies = [
+ "memchr 2.5.0",
+ "safemem",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"
+
+[[package]]
+name = "byteorder"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
+
+[[package]]
+name = "bytes"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8"
+
+[[package]]
+name = "cache-padded"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c"
+
+[[package]]
+name = "castaway"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a2698f953def977c68f935bb0dfa959375ad4638570e969e2f1e9f433cbf1af6"
+
+[[package]]
+name = "cc"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chrono"
+version = "0.4.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
+dependencies = [
+ "libc",
+ "num-integer",
+ "num-traits",
+ "serde",
+ "time 0.1.44",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "cipher"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "cloudabi"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "colored"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
+dependencies = [
+ "atty",
+ "lazy_static",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "combine"
+version = "4.6.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a604e93b79d1808327a6fca85a6f2d69de66461e7620f5a4cbf5fb4d1d7c948"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "memchr 2.5.0",
+ "pin-project-lite",
+ "tokio",
+ "tokio-util 0.7.1",
+]
+
+[[package]]
+name = "concurrent-queue"
+version = "1.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3"
+dependencies = [
+ "cache-padded",
+]
+
+[[package]]
+name = "console-api"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24cb05777feccbb2642d4f2df44d0505601a2cd88ca517d8c913f263a5a8dc8b"
+dependencies = [
+ "prost",
+ "prost-types",
+ "tonic",
+ "tracing-core",
+]
+
+[[package]]
+name = "console-subscriber"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f21a16ee925aa9d2bad2e296beffd6c5b1bfaad50af509d305b8e7f23af20fb"
+dependencies = [
+ "console-api",
+ "crossbeam-channel",
+ "crossbeam-utils",
+ "futures",
+ "hdrhistogram",
+ "humantime",
+ "prost-types",
+ "serde",
+ "serde_json",
+ "thread_local 1.1.4",
+ "tokio",
+ "tokio-stream",
+ "tonic",
+ "tracing",
+ "tracing-core",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "const_fn"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbdcdcb6d86f71c5e97409ad45898af11cbc995b4ee8112d59095a28d376c935"
+
+[[package]]
+name = "convert_case"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
+
+[[package]]
+name = "cookie"
+version = "0.14.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03a5d7b21829bc7b4bf4754a978a241ae54ea55a40f92bb20216e54096f4b951"
+dependencies = [
+ "aes-gcm",
+ "base64",
+ "hkdf",
+ "hmac",
+ "percent-encoding",
+ "rand 0.8.5",
+ "sha2",
+ "time 0.2.27",
+ "version_check",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "cpuid-bool"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcb25d077389e53838a8158c8e99174c5a9d902dee4904320db714f3c653ffba"
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
+dependencies = [
+ "cfg-if",
+ "lazy_static",
+]
+
+[[package]]
+name = "crunchy"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
+
+[[package]]
+name = "crypto-common"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "crypto-mac"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bff07008ec701e8028e2ceb8f83f0e4274ee62bd2dbdc4fefff2e9a91824081a"
+dependencies = [
+ "generic-array",
+ "subtle",
+]
+
+[[package]]
+name = "cssparser"
+version = "0.27.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "754b69d351cdc2d8ee09ae203db831e005560fc6030da058f86ad60c92a9cb0a"
+dependencies = [
+ "cssparser-macros",
+ "dtoa-short",
+ "itoa 0.4.8",
+ "matches",
+ "phf 0.8.0",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "smallvec",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "cssparser-macros"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e"
+dependencies = [
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "ctor"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c"
+dependencies = [
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "ctr"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f"
+dependencies = [
+ "cipher",
+]
+
+[[package]]
+name = "curl"
+version = "0.4.43"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37d855aeef205b43f65a5001e0997d81f8efca7badad4fad7d897aa7f0d0651f"
+dependencies = [
+ "curl-sys",
+ "libc",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "socket2",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "curl-sys"
+version = "0.4.54+curl-7.83.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "25debbc3365c3e7ee79e30918df5759e84dbd4485807a18829188abf1786ec4e"
+dependencies = [
+ "cc",
+ "libc",
+ "libnghttp2-sys",
+ "libz-sys",
+ "openssl-sys",
+ "pkg-config",
+ "vcpkg",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "data-encoding"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57"
+
+[[package]]
+name = "derive_more"
+version = "0.99.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
+dependencies = [
+ "convert_case",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "rustc_version 0.4.0",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "deunicode"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2c9736e15e7df1638a7f6eee92a6511615c738246a052af5ba86f039b65aede"
+
+[[package]]
+name = "diff"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499"
+
+[[package]]
+name = "difference"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
+
+[[package]]
+name = "digest"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506"
+dependencies = [
+ "block-buffer 0.10.2",
+ "crypto-common",
+]
+
+[[package]]
+name = "dirs-next"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1"
+dependencies = [
+ "cfg-if",
+ "dirs-sys-next",
+]
+
+[[package]]
+name = "dirs-sys-next"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "discard"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "212d0f5754cb6769937f4501cc0e67f4f4483c8d2c3e1e922ee9edbe4ab4c7c0"
+
+[[package]]
+name = "dtoa"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0"
+
+[[package]]
+name = "dtoa-short"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bde03329ae10e79ede66c9ce4dc930aa8599043b0743008548680f25b91502d6"
+dependencies = [
+ "dtoa",
+]
+
+[[package]]
+name = "easy-scraper"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18a857bc01b5ae04874234f6b5d16b8b8fa86910aa5777479c2669b5df607fce"
+dependencies = [
+ "html5ever 0.25.2",
+ "kuchiki",
+ "regex 1.5.5",
+]
+
+[[package]]
+name = "either"
+version = "1.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+
+[[package]]
+name = "ellipse"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1835a82a08e5c9393639e7cf99786a65af71f7fa9df7c91a519f2d52e6fa052d"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "ena"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
+dependencies = [
+ "log 0.4.17",
+]
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
+dependencies = [
+ "log 0.3.9",
+ "regex 0.1.80",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3"
+dependencies = [
+ "atty",
+ "humantime",
+ "log 0.4.17",
+ "regex 1.5.5",
+ "termcolor",
+]
+
+[[package]]
+name = "event-listener"
+version = "2.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71"
+
+[[package]]
+name = "faker_rand"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "300d2ddbf2245b5b5e723995e0961033121b4fc2be9045fb661af82bd739ffb6"
+dependencies = [
+ "deunicode",
+ "lazy_static",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "fastrand"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "fixedbitset"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e"
+
+[[package]]
+name = "flate2"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af"
+dependencies = [
+ "cfg-if",
+ "crc32fast",
+ "libc",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
+dependencies = [
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "fuchsia-cprng"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
+
+[[package]]
+name = "futf"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843"
+dependencies = [
+ "mac",
+ "new_debug_unreachable",
+]
+
+[[package]]
+name = "futures"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b"
+
+[[package]]
+name = "futures-lite"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7694489acd39452c77daa48516b894c153f192c3578d5a839b62c58099fcbf48"
+dependencies = [
+ "fastrand",
+ "futures-core",
+ "futures-io",
+ "memchr 2.5.0",
+ "parking",
+ "pin-project-lite",
+ "waker-fn",
+]
+
+[[package]]
+name = "futures-macro"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "futures-sink"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868"
+
+[[package]]
+name = "futures-task"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a"
+
+[[package]]
+name = "futures-util"
+version = "0.3.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-macro",
+ "futures-sink",
+ "futures-task",
+ "memchr 2.5.0",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "fxhash"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
+dependencies = [
+ "byteorder",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi 0.9.0+wasi-snapshot-preview1",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi 0.10.0+wasi-snapshot-preview1",
+]
+
+[[package]]
+name = "ghash"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97304e4cd182c3846f7575ced3890c53012ce534ad9114046b0a9e00bb30a375"
+dependencies = [
+ "opaque-debug",
+ "polyval",
+]
+
+[[package]]
+name = "gloo-timers"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fb7d06c1c8cc2a29bee7ec961009a0b2caa0793ee4900c2ffb348734ba1c8f9"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "h2"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57"
+dependencies = [
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "futures-util",
+ "http",
+ "indexmap",
+ "slab",
+ "tokio",
+ "tokio-util 0.7.1",
+ "tracing",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "hdrhistogram"
+version = "7.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "31672b7011be2c4f7456c4ddbcb40e7e9a4a9fad8efe49a6ebaf5f307d0109c0"
+dependencies = [
+ "base64",
+ "byteorder",
+ "flate2",
+ "nom",
+ "num-traits",
+]
+
+[[package]]
+name = "headers"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4cff78e5788be1e0ab65b04d306b2ed5092c815ec97ec70f4ebd5aee158aa55d"
+dependencies = [
+ "base64",
+ "bitflags",
+ "bytes",
+ "headers-core",
+ "http",
+ "httpdate",
+ "mime",
+ "sha-1 0.10.0",
+]
+
+[[package]]
+name = "headers-core"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429"
+dependencies = [
+ "http",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hex"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
+[[package]]
+name = "hkdf"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51ab2f639c231793c5f6114bdb9bbe50a7dbbfcd7c7c6bd8475dec2d991e964f"
+dependencies = [
+ "digest 0.9.0",
+ "hmac",
+]
+
+[[package]]
+name = "hmac"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1441c6b1e930e2817404b5046f1f989899143a12bf92de603b69f4e0aee1e15"
+dependencies = [
+ "crypto-mac",
+ "digest 0.9.0",
+]
+
+[[package]]
+name = "html5ever"
+version = "0.22.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c213fa6a618dc1da552f54f85cba74b05d8e883c92ec4e89067736938084c26e"
+dependencies = [
+ "log 0.4.17",
+ "mac",
+ "markup5ever 0.7.5",
+ "proc-macro2 0.4.30",
+ "quote 0.6.13",
+ "syn 0.15.44",
+]
+
+[[package]]
+name = "html5ever"
+version = "0.25.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5c13fb08e5d4dfc151ee5e88bae63f7773d61852f3bdc73c9f4b9e1bde03148"
+dependencies = [
+ "log 0.4.17",
+ "mac",
+ "markup5ever 0.10.1",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "http"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff8670570af52249509a86f5e3e18a08c60b177071826898fde8997cf5f6bfbb"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa 1.0.1",
+]
+
+[[package]]
+name = "http-body"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6"
+dependencies = [
+ "bytes",
+ "http",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "http-range-header"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29"
+
+[[package]]
+name = "http-types"
+version = "2.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e9b187a72d63adbfba487f48095306ac823049cb504ee195541e91c7775f5ad"
+dependencies = [
+ "anyhow",
+ "async-channel",
+ "async-std",
+ "base64",
+ "cookie",
+ "futures-lite",
+ "http",
+ "infer",
+ "pin-project-lite",
+ "rand 0.7.3",
+ "serde",
+ "serde_json",
+ "serde_qs",
+ "serde_urlencoded",
+ "url",
+]
+
+[[package]]
+name = "httparse"
+version = "1.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c"
+
+[[package]]
+name = "httpdate"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421"
+
+[[package]]
+name = "httpmock"
+version = "0.6.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c159c4fc205e6c1a9b325cb7ec135d13b5f47188ce175dabb76ec847f331d9bd"
+dependencies = [
+ "assert-json-diff",
+ "async-object-pool",
+ "async-trait",
+ "base64",
+ "basic-cookies",
+ "crossbeam-utils",
+ "form_urlencoded",
+ "futures-util",
+ "hyper",
+ "isahc",
+ "lazy_static",
+ "levenshtein",
+ "log 0.4.17",
+ "regex 1.5.5",
+ "serde",
+ "serde_json",
+ "serde_regex",
+ "similar",
+ "tokio",
+ "url",
+]
+
+[[package]]
+name = "humantime"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
+
+[[package]]
+name = "hyper"
+version = "0.14.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2"
+dependencies = [
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "httparse",
+ "httpdate",
+ "itoa 1.0.1",
+ "pin-project-lite",
+ "socket2",
+ "tokio",
+ "tower-service",
+ "tracing",
+ "want",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.23.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d87c48c02e0dc5e3b849a2041db3029fd066650f8f717c07bf8ed78ccb895cac"
+dependencies = [
+ "http",
+ "hyper",
+ "rustls",
+ "tokio",
+ "tokio-rustls",
+]
+
+[[package]]
+name = "hyper-timeout"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1"
+dependencies = [
+ "hyper",
+ "pin-project-lite",
+ "tokio",
+ "tokio-io-timeout",
+]
+
+[[package]]
+name = "idna"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
+dependencies = [
+ "autocfg 1.1.0",
+ "hashbrown",
+]
+
+[[package]]
+name = "infer"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac"
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "879d54834c8c76457ef4293a689b2a8c59b076067ad77b15efafbb05f92a592b"
+
+[[package]]
+name = "isahc"
+version = "1.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "480d9158c9977bff0bc024a11dcad04efcd3955c1e55301092b13fc439d41720"
+dependencies = [
+ "async-channel",
+ "castaway",
+ "crossbeam-utils",
+ "curl",
+ "curl-sys",
+ "encoding_rs",
+ "event-listener",
+ "futures-lite",
+ "http",
+ "log 0.4.17",
+ "mime",
+ "once_cell",
+ "polling",
+ "slab",
+ "sluice",
+ "tracing",
+ "tracing-futures",
+ "url",
+ "waker-fn",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "js-sys"
+version = "0.3.57"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397"
+dependencies = [
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "kernel32-sys"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
+dependencies = [
+ "winapi 0.2.8",
+ "winapi-build",
+]
+
+[[package]]
+name = "kittybox"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "bytes",
+ "chrono",
+ "console-subscriber",
+ "data-encoding",
+ "easy-scraper",
+ "either",
+ "env_logger 0.8.4",
+ "faker_rand",
+ "futures",
+ "futures-util",
+ "http-types",
+ "httpmock",
+ "hyper",
+ "kittybox-templates",
+ "kittybox-util",
+ "lazy_static",
+ "listenfd",
+ "log 0.4.17",
+ "markdown",
+ "markup",
+ "mediatype",
+ "microformats",
+ "mockito",
+ "newbase60",
+ "prometheus",
+ "rand 0.8.5",
+ "redis",
+ "relative-path",
+ "reqwest",
+ "retainer",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "serde_variant",
+ "sha2",
+ "tempdir",
+ "test-logger",
+ "tokio",
+ "tokio-stream",
+ "url",
+ "warp",
+ "warp-prometheus",
+]
+
+[[package]]
+name = "kittybox-templates"
+version = "0.1.0"
+dependencies = [
+ "chrono",
+ "ellipse",
+ "faker_rand",
+ "http",
+ "kittybox-util",
+ "log 0.4.17",
+ "markup",
+ "microformats",
+ "rand 0.8.5",
+ "serde_json",
+ "test-logger",
+]
+
+[[package]]
+name = "kittybox-util"
+version = "0.1.0"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "kuchiki"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ea8e9c6e031377cff82ee3001dc8026cdf431ed4e2e6b51f98ab8c73484a358"
+dependencies = [
+ "cssparser",
+ "html5ever 0.25.2",
+ "matches",
+ "selectors",
+]
+
+[[package]]
+name = "kv-log-macro"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
+dependencies = [
+ "log 0.4.17",
+]
+
+[[package]]
+name = "lalrpop"
+version = "0.19.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b30455341b0e18f276fa64540aff54deafb54c589de6aca68659c63dd2d5d823"
+dependencies = [
+ "ascii-canvas",
+ "atty",
+ "bit-set",
+ "diff",
+ "ena",
+ "itertools",
+ "lalrpop-util",
+ "petgraph",
+ "pico-args",
+ "regex 1.5.5",
+ "regex-syntax 0.6.25",
+ "string_cache 0.8.4",
+ "term",
+ "tiny-keccak",
+ "unicode-xid 0.2.3",
+]
+
+[[package]]
+name = "lalrpop-util"
+version = "0.19.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcf796c978e9b4d983414f4caedc9273aa33ee214c5b887bd55fde84c85d2dc4"
+dependencies = [
+ "regex 1.5.5",
+]
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "levenshtein"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760"
+
+[[package]]
+name = "libc"
+version = "0.2.125"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
+
+[[package]]
+name = "libnghttp2-sys"
+version = "0.1.7+1.45.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57ed28aba195b38d5ff02b9170cbff627e336a20925e43b4945390401c5dc93f"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
+name = "libz-sys"
+version = "1.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92e7e15d7610cce1d9752e137625f14e61a28cd45929b6e12e47b50fe154ee2e"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "listenfd"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c02b14f35d9f5f082fd0b1b34aa0ef32e3354c859c721d7f3325b3f79a42ba54"
+dependencies = [
+ "libc",
+ "uuid",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "lock_api"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53"
+dependencies = [
+ "autocfg 1.1.0",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e19e8d5c34a3e0e2223db8e060f9e8264aeeb5c5fc64a4ee9965c062211c024b"
+dependencies = [
+ "log 0.4.17",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+ "value-bag",
+]
+
+[[package]]
+name = "mac"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
+
+[[package]]
+name = "markdown"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef3aab6a1d529b112695f72beec5ee80e729cb45af58663ec902c8fac764ecdd"
+dependencies = [
+ "lazy_static",
+ "pipeline",
+ "regex 1.5.5",
+]
+
+[[package]]
+name = "markup"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e56549270844a0e513d26db15562a783dd282e351baec8650c6f4c1bcaee54ef"
+dependencies = [
+ "itoa 0.4.8",
+ "markup-proc-macro",
+]
+
+[[package]]
+name = "markup-proc-macro"
+version = "0.12.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5948a1826d7a5ba7c550ff109894bca69b6db05ea72215f2e7d1a929b755f932"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "markup5ever"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "897636f9850c3eef4905a5540683ed53dc9393860f0846cab2c2ddf9939862ff"
+dependencies = [
+ "phf 0.7.24",
+ "phf_codegen 0.7.24",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "string_cache 0.7.5",
+ "string_cache_codegen 0.4.4",
+ "tendril",
+]
+
+[[package]]
+name = "markup5ever"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd"
+dependencies = [
+ "log 0.4.17",
+ "phf 0.8.0",
+ "phf_codegen 0.8.0",
+ "string_cache 0.8.4",
+ "string_cache_codegen 0.5.2",
+ "tendril",
+]
+
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
+
+[[package]]
+name = "matchit"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73cbba799671b762df5a175adf59ce145165747bb891505c43d09aefbbf38beb"
+
+[[package]]
+name = "mediatype"
+version = "0.19.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "763c573229266ff7ba98f919ad1e2de0804836772a48c2d55e3c32eb246114f9"
+
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "microformats"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82199da80bcfde7b0d6118afa68fe9fefd093fbfcd24a062adca758c41472b13"
+dependencies = [
+ "chrono",
+ "html5ever 0.22.5",
+ "lazy_static",
+ "log 0.4.17",
+ "regex 1.5.5",
+ "serde",
+ "serde_json",
+ "thiserror",
+ "url",
+]
+
+[[package]]
+name = "mime"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
+
+[[package]]
+name = "mime_guess"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
+dependencies = [
+ "mime",
+ "unicase",
+]
+
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799"
+dependencies = [
+ "libc",
+ "log 0.4.17",
+ "wasi 0.11.0+wasi-snapshot-preview1",
+ "windows-sys",
+]
+
+[[package]]
+name = "mockito"
+version = "0.30.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d10030163d67f681db11810bc486df3149e6d91c8b4f3f96fa8b62b546c2cef8"
+dependencies = [
+ "assert-json-diff",
+ "colored",
+ "difference",
+ "httparse",
+ "lazy_static",
+ "log 0.4.17",
+ "rand 0.8.5",
+ "regex 1.5.5",
+ "serde_json",
+ "serde_urlencoded",
+]
+
+[[package]]
+name = "multipart"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182"
+dependencies = [
+ "buf_redux",
+ "httparse",
+ "log 0.4.17",
+ "mime",
+ "mime_guess",
+ "quick-error",
+ "rand 0.8.5",
+ "safemem",
+ "tempfile",
+ "twoway",
+]
+
+[[package]]
+name = "new_debug_unreachable"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
+
+[[package]]
+name = "newbase60"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4f6fe19de628588cbd968ba26b247819c7b7a000b3b4b5bcbf69ea606a33b5ba"
+
+[[package]]
+name = "nodrop"
+version = "0.1.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
+
+[[package]]
+name = "nom"
+version = "7.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8903e5a29a317527874d0402f867152a3d21c908bb0b933e416c65e301d4c36"
+dependencies = [
+ "memchr 2.5.0",
+ "minimal-lexical",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.45"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
+dependencies = [
+ "autocfg 1.1.0",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
+dependencies = [
+ "autocfg 1.1.0",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
+
+[[package]]
+name = "opaque-debug"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d5fd19fb3e0a8191c1e34935718976a3e70c112ab9a24af6d7cadccd9d90bc0"
+dependencies = [
+ "autocfg 1.1.0",
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "parking"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "427c3892f9e783d91cc128285287e70a59e206ca452770ece88a76f7a3eddd72"
+
+[[package]]
+name = "parking_lot"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
+dependencies = [
+ "instant",
+ "lock_api",
+ "parking_lot_core 0.8.5",
+]
+
+[[package]]
+name = "parking_lot"
+version = "0.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58"
+dependencies = [
+ "lock_api",
+ "parking_lot_core 0.9.3",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
+dependencies = [
+ "cfg-if",
+ "instant",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-sys",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+
+[[package]]
+name = "petgraph"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f"
+dependencies = [
+ "fixedbitset",
+ "indexmap",
+]
+
+[[package]]
+name = "phf"
+version = "0.7.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b3da44b85f8e8dfaec21adae67f95d93244b2ecf6ad2a692320598dcc8e6dd18"
+dependencies = [
+ "phf_shared 0.7.24",
+]
+
+[[package]]
+name = "phf"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12"
+dependencies = [
+ "phf_macros",
+ "phf_shared 0.8.0",
+ "proc-macro-hack",
+]
+
+[[package]]
+name = "phf_codegen"
+version = "0.7.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b03e85129e324ad4166b06b2c7491ae27fe3ec353af72e72cd1654c7225d517e"
+dependencies = [
+ "phf_generator 0.7.24",
+ "phf_shared 0.7.24",
+]
+
+[[package]]
+name = "phf_codegen"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815"
+dependencies = [
+ "phf_generator 0.8.0",
+ "phf_shared 0.8.0",
+]
+
+[[package]]
+name = "phf_generator"
+version = "0.7.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09364cc93c159b8b06b1f4dd8a4398984503483891b0c26b867cf431fb132662"
+dependencies = [
+ "phf_shared 0.7.24",
+ "rand 0.6.5",
+]
+
+[[package]]
+name = "phf_generator"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526"
+dependencies = [
+ "phf_shared 0.8.0",
+ "rand 0.7.3",
+]
+
+[[package]]
+name = "phf_generator"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6"
+dependencies = [
+ "phf_shared 0.10.0",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "phf_macros"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c"
+dependencies = [
+ "phf_generator 0.8.0",
+ "phf_shared 0.8.0",
+ "proc-macro-hack",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.7.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "234f71a15de2288bcb7e3b6515828d22af7ec8598ee6d24c3b526fa0a80b67a0"
+dependencies = [
+ "siphasher 0.2.3",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7"
+dependencies = [
+ "siphasher 0.3.10",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096"
+dependencies = [
+ "siphasher 0.3.10",
+]
+
+[[package]]
+name = "pico-args"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
+
+[[package]]
+name = "pin-project"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pipeline"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d15b6607fa632996eb8a17c9041cb6071cb75ac057abd45dece578723ea8c7c0"
+
+[[package]]
+name = "pkg-config"
+version = "0.3.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
+
+[[package]]
+name = "polling"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "log 0.4.17",
+ "wepoll-ffi",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "polyval"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eebcc4aa140b9abd2bc40d9c3f7ccec842679cd79045ac3a7ac698c1a064b7cd"
+dependencies = [
+ "cpuid-bool",
+ "opaque-debug",
+ "universal-hash",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+
+[[package]]
+name = "precomputed-hash"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
+
+[[package]]
+name = "proc-macro-hack"
+version = "0.5.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
+
+[[package]]
+name = "proc-macro2"
+version = "0.4.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
+dependencies = [
+ "unicode-xid 0.1.0",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.38"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9027b48e9d4c9175fa2218adf3557f91c1137021739951d4932f5f8268ac48aa"
+dependencies = [
+ "unicode-xid 0.2.3",
+]
+
+[[package]]
+name = "procfs"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95e344cafeaeefe487300c361654bcfc85db3ac53619eeccced29f5ea18c4c70"
+dependencies = [
+ "bitflags",
+ "byteorder",
+ "flate2",
+ "hex",
+ "lazy_static",
+ "libc",
+]
+
+[[package]]
+name = "prometheus"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b7f64969ffd5dd8f39bd57a68ac53c163a095ed9d0fb707146da1b27025a3504"
+dependencies = [
+ "cfg-if",
+ "fnv",
+ "lazy_static",
+ "libc",
+ "memchr 2.5.0",
+ "parking_lot 0.11.2",
+ "procfs",
+ "protobuf",
+ "thiserror",
+]
+
+[[package]]
+name = "prost"
+version = "0.10.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc03e116981ff7d8da8e5c220e374587b98d294af7ba7dd7fda761158f00086f"
+dependencies = [
+ "bytes",
+ "prost-derive",
+]
+
+[[package]]
+name = "prost-derive"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b670f45da57fb8542ebdbb6105a925fe571b67f9e7ed9f47a06a84e72b4e7cc"
+dependencies = [
+ "anyhow",
+ "itertools",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "prost-types"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d0a014229361011dc8e69c8a1ec6c2e8d0f2af7c91e3ea3f5b2170298461e68"
+dependencies = [
+ "bytes",
+ "prost",
+]
+
+[[package]]
+name = "protobuf"
+version = "2.27.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf7e6d18738ecd0902d30d1ad232c9125985a3422929b16c65517b38adc14f96"
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "0.6.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
+dependencies = [
+ "proc-macro2 0.4.30",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
+dependencies = [
+ "proc-macro2 1.0.38",
+]
+
+[[package]]
+name = "rand"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
+dependencies = [
+ "fuchsia-cprng",
+ "libc",
+ "rand_core 0.3.1",
+ "rdrand",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rand"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
+dependencies = [
+ "autocfg 0.1.8",
+ "libc",
+ "rand_chacha 0.1.1",
+ "rand_core 0.4.2",
+ "rand_hc 0.1.0",
+ "rand_isaac",
+ "rand_jitter",
+ "rand_os",
+ "rand_pcg 0.1.2",
+ "rand_xorshift",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rand"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
+dependencies = [
+ "getrandom 0.1.16",
+ "libc",
+ "rand_chacha 0.2.2",
+ "rand_core 0.5.1",
+ "rand_hc 0.2.0",
+ "rand_pcg 0.2.1",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha 0.3.1",
+ "rand_core 0.6.3",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
+dependencies = [
+ "autocfg 0.1.8",
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core 0.6.3",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
+dependencies = [
+ "rand_core 0.4.2",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
+
+[[package]]
+name = "rand_core"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
+dependencies = [
+ "getrandom 0.1.16",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+dependencies = [
+ "getrandom 0.2.6",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
+dependencies = [
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
+dependencies = [
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "rand_isaac"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
+dependencies = [
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "rand_jitter"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
+dependencies = [
+ "libc",
+ "rand_core 0.4.2",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rand_os"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
+dependencies = [
+ "cloudabi",
+ "fuchsia-cprng",
+ "libc",
+ "rand_core 0.4.2",
+ "rdrand",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rand_pcg"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
+dependencies = [
+ "autocfg 0.1.8",
+ "rand_core 0.4.2",
+]
+
+[[package]]
+name = "rand_pcg"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429"
+dependencies = [
+ "rand_core 0.5.1",
+]
+
+[[package]]
+name = "rand_xorshift"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
+dependencies = [
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "rdrand"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
+dependencies = [
+ "rand_core 0.3.1",
+]
+
+[[package]]
+name = "redis"
+version = "0.21.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a80b5f38d7f5a020856a0e16e40a9cfabf88ae8f0e4c2dcd8a3114c1e470852"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "combine",
+ "dtoa",
+ "futures-util",
+ "itoa 0.4.8",
+ "percent-encoding",
+ "pin-project-lite",
+ "sha1",
+ "tokio",
+ "tokio-util 0.6.9",
+ "url",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+dependencies = [
+ "getrandom 0.2.6",
+ "redox_syscall",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "0.1.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
+dependencies = [
+ "aho-corasick 0.5.3",
+ "memchr 0.1.11",
+ "regex-syntax 0.3.9",
+ "thread_local 0.2.7",
+ "utf8-ranges",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+dependencies = [
+ "aho-corasick 0.7.18",
+ "memchr 2.5.0",
+ "regex-syntax 0.6.25",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax 0.6.25",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "relative-path"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b4e112eddc95bbf25365df3b5414354ad2fe7ee465eddb9965a515faf8c3b6d9"
+
+[[package]]
+name = "remove_dir_all"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
+dependencies = [
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "reqwest"
+version = "0.11.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "46a1f7aa4f35e5e8b4160449f51afc758f0ce6454315a9fa7d0d113e958c41eb"
+dependencies = [
+ "async-compression",
+ "base64",
+ "bytes",
+ "encoding_rs",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "hyper",
+ "hyper-rustls",
+ "ipnet",
+ "js-sys",
+ "lazy_static",
+ "log 0.4.17",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustls",
+ "rustls-pemfile",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "tokio",
+ "tokio-rustls",
+ "tokio-util 0.6.9",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "webpki-roots",
+ "winreg",
+]
+
+[[package]]
+name = "retainer"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59039dbf4a344af919780e9acdf7f9ce95deffb0152a72eca94b89d6a2bf66c0"
+dependencies = [
+ "async-lock",
+ "async-timer",
+ "log 0.4.17",
+ "rand 0.8.5",
+]
+
+[[package]]
+name = "ring"
+version = "0.16.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc"
+dependencies = [
+ "cc",
+ "libc",
+ "once_cell",
+ "spin",
+ "untrusted",
+ "web-sys",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
+dependencies = [
+ "semver 0.9.0",
+]
+
+[[package]]
+name = "rustc_version"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
+dependencies = [
+ "semver 1.0.9",
+]
+
+[[package]]
+name = "rustls"
+version = "0.20.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4fbfeb8d0ddb84706bc597a5574ab8912817c52a397f819e5b614e2265206921"
+dependencies = [
+ "log 0.4.17",
+ "ring",
+ "sct",
+ "webpki",
+]
+
+[[package]]
+name = "rustls-pemfile"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ee86d63972a7c661d1536fefe8c3c8407321c3df668891286de28abcd087360"
+dependencies = [
+ "base64",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "safemem"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
+
+[[package]]
+name = "schannel"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75"
+dependencies = [
+ "lazy_static",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "scoped-tls"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "sct"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4"
+dependencies = [
+ "ring",
+ "untrusted",
+]
+
+[[package]]
+name = "selectors"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df320f1889ac4ba6bc0cdc9c9af7af4bd64bb927bccdf32d81140dc1f9be12fe"
+dependencies = [
+ "bitflags",
+ "cssparser",
+ "derive_more",
+ "fxhash",
+ "log 0.4.17",
+ "matches",
+ "phf 0.8.0",
+ "phf_codegen 0.8.0",
+ "precomputed-hash",
+ "servo_arc",
+ "smallvec",
+ "thin-slice",
+]
+
+[[package]]
+name = "semver"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403"
+dependencies = [
+ "semver-parser",
+]
+
+[[package]]
+name = "semver"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8cb243bdfdb5936c8dc3c45762a19d12ab4550cdc753bc247637d4ec35a040fd"
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+
+[[package]]
+name = "serde"
+version = "1.0.137"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.137"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.81"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
+dependencies = [
+ "itoa 1.0.1",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_qs"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6"
+dependencies = [
+ "percent-encoding",
+ "serde",
+ "thiserror",
+]
+
+[[package]]
+name = "serde_regex"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf"
+dependencies = [
+ "regex 1.5.5",
+ "serde",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa 1.0.1",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_variant"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f988d71f968b960b45cf71e3210662f0b23906256c87820077e4f101225c494"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "servo_arc"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d98238b800e0d1576d8b6e3de32827c2d74bee68bb97748dcf5071fb53965432"
+dependencies = [
+ "nodrop",
+ "stable_deref_trait",
+]
+
+[[package]]
+name = "sha-1"
+version = "0.9.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6"
+dependencies = [
+ "block-buffer 0.9.0",
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.9.0",
+ "opaque-debug",
+]
+
+[[package]]
+name = "sha-1"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.10.3",
+]
+
+[[package]]
+name = "sha1"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770"
+dependencies = [
+ "sha1_smol",
+]
+
+[[package]]
+name = "sha1_smol"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
+
+[[package]]
+name = "sha2"
+version = "0.9.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800"
+dependencies = [
+ "block-buffer 0.9.0",
+ "cfg-if",
+ "cpufeatures",
+ "digest 0.9.0",
+ "opaque-debug",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "signal-hook"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "647c97df271007dcea485bb74ffdb57f2e683f1306c854f468a0c244badabf2d"
+dependencies = [
+ "libc",
+ "signal-hook-registry",
+]
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "similar"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3"
+
+[[package]]
+name = "siphasher"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b8de496cf83d4ed58b6be86c3a275b8602f6ffe98d3024a869e124147a9a3ac"
+
+[[package]]
+name = "siphasher"
+version = "0.3.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
+
+[[package]]
+name = "slab"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32"
+
+[[package]]
+name = "sluice"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d7400c0eff44aa2fcb5e31a5f24ba9716ed90138769e4977a2ba6014ae63eb5"
+dependencies = [
+ "async-channel",
+ "futures-core",
+ "futures-io",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
+
+[[package]]
+name = "socket2"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0"
+dependencies = [
+ "libc",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "spin"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
+[[package]]
+name = "standback"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e113fb6f3de07a243d434a56ec6f186dfd51cb08448239fe7bcae73f87ff28ff"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "stdweb"
+version = "0.4.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d022496b16281348b52d0e30ae99e01a73d737b2f45d38fed4edf79f9325a1d5"
+dependencies = [
+ "discard",
+ "rustc_version 0.2.3",
+ "stdweb-derive",
+ "stdweb-internal-macros",
+ "stdweb-internal-runtime",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "stdweb-derive"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c87a60a40fccc84bef0652345bbbbbe20a605bf5d0ce81719fc476f5c03b50ef"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "serde",
+ "serde_derive",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "stdweb-internal-macros"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "58fa5ff6ad0d98d1ffa8cb115892b6e69d67799f6763e162a1c9db421dc22e11"
+dependencies = [
+ "base-x",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "sha1",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "stdweb-internal-runtime"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "213701ba3370744dcd1a12960caa4843b3d68b4d1c0a5d575e0d65b2ee9d16c0"
+
+[[package]]
+name = "string_cache"
+version = "0.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89c058a82f9fd69b1becf8c274f412281038877c553182f1d02eb027045a2d67"
+dependencies = [
+ "lazy_static",
+ "new_debug_unreachable",
+ "phf_shared 0.7.24",
+ "precomputed-hash",
+ "serde",
+ "string_cache_codegen 0.4.4",
+ "string_cache_shared",
+]
+
+[[package]]
+name = "string_cache"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "213494b7a2b503146286049378ce02b482200519accc31872ee8be91fa820a08"
+dependencies = [
+ "new_debug_unreachable",
+ "once_cell",
+ "parking_lot 0.12.0",
+ "phf_shared 0.10.0",
+ "precomputed-hash",
+ "serde",
+]
+
+[[package]]
+name = "string_cache_codegen"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f0f45ed1b65bf9a4bf2f7b7dc59212d1926e9eaf00fa998988e420fd124467c6"
+dependencies = [
+ "phf_generator 0.7.24",
+ "phf_shared 0.7.24",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "string_cache_shared",
+]
+
+[[package]]
+name = "string_cache_codegen"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988"
+dependencies = [
+ "phf_generator 0.10.0",
+ "phf_shared 0.10.0",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+]
+
+[[package]]
+name = "string_cache_shared"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
+
+[[package]]
+name = "subtle"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
+
+[[package]]
+name = "syn"
+version = "0.15.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
+dependencies = [
+ "proc-macro2 0.4.30",
+ "quote 0.6.13",
+ "unicode-xid 0.1.0",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.93"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04066589568b72ec65f42d65a1a52436e954b168773148893c020269563decf2"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "unicode-xid 0.2.3",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20518fe4a4c9acf048008599e464deb21beeae3d3578418951a189c235a7a9a8"
+
+[[package]]
+name = "tempdir"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
+dependencies = [
+ "rand 0.4.6",
+ "remove_dir_all",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "libc",
+ "redox_syscall",
+ "remove_dir_all",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "tendril"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0"
+dependencies = [
+ "futf",
+ "mac",
+ "utf-8",
+]
+
+[[package]]
+name = "term"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f"
+dependencies = [
+ "dirs-next",
+ "rustversion",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "termcolor"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "test-logger"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55ec868b79cb8e63f8921843c10e3083137cfaa171a67209e6a2656ccd4d8a"
+dependencies = [
+ "env_logger 0.3.5",
+]
+
+[[package]]
+name = "thin-slice"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c"
+
+[[package]]
+name = "thiserror"
+version = "1.0.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "thread-id"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
+dependencies = [
+ "kernel32-sys",
+ "libc",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
+dependencies = [
+ "thread-id",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "time"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
+dependencies = [
+ "libc",
+ "wasi 0.10.0+wasi-snapshot-preview1",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "time"
+version = "0.2.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4752a97f8eebd6854ff91f1c1824cd6160626ac4bd44287f7f4ea2035a02a242"
+dependencies = [
+ "const_fn",
+ "libc",
+ "standback",
+ "stdweb",
+ "time-macros",
+ "version_check",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "time-macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "957e9c6e26f12cb6d0dd7fc776bb67a706312e7299aed74c8dd5b17ebb27e2f1"
+dependencies = [
+ "proc-macro-hack",
+ "time-macros-impl",
+]
+
+[[package]]
+name = "time-macros-impl"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd3c141a1b43194f3f56a1411225df8646c55781d5f26db825b3d98507eb482f"
+dependencies = [
+ "proc-macro-hack",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "standback",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "tiny-keccak"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
+dependencies = [
+ "crunchy",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "tokio"
+version = "1.18.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4903bf0427cf68dddd5aa6a93220756f8be0c34fcfa9f5e6191e103e15a31395"
+dependencies = [
+ "bytes",
+ "libc",
+ "memchr 2.5.0",
+ "mio",
+ "num_cpus",
+ "once_cell",
+ "parking_lot 0.12.0",
+ "pin-project-lite",
+ "signal-hook-registry",
+ "socket2",
+ "tokio-macros",
+ "tracing",
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "tokio-io-timeout"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf"
+dependencies = [
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-macros"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.23.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59"
+dependencies = [
+ "rustls",
+ "tokio",
+ "webpki",
+]
+
+[[package]]
+name = "tokio-stream"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-tungstenite"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "511de3f85caf1c98983545490c3d09685fa8eb634e57eec22bb4db271f46cbd8"
+dependencies = [
+ "futures-util",
+ "log 0.4.17",
+ "pin-project",
+ "tokio",
+ "tungstenite",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.6.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "log 0.4.17",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0edfdeb067411dba2044da6d1cb2df793dd35add7888d73c16e3381ded401764"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "pin-project-lite",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "tonic"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5be9d60db39854b30b835107500cf0aca0b0d14d6e1c3de124217c23a29c2ddb"
+dependencies = [
+ "async-stream",
+ "async-trait",
+ "axum",
+ "base64",
+ "bytes",
+ "futures-core",
+ "futures-util",
+ "h2",
+ "http",
+ "http-body",
+ "hyper",
+ "hyper-timeout",
+ "percent-encoding",
+ "pin-project",
+ "prost",
+ "prost-derive",
+ "tokio",
+ "tokio-stream",
+ "tokio-util 0.7.1",
+ "tower",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+ "tracing-futures",
+]
+
+[[package]]
+name = "tower"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a89fd63ad6adf737582df5db40d286574513c69a11dac5214dc3b5603d6713e"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "indexmap",
+ "pin-project",
+ "pin-project-lite",
+ "rand 0.8.5",
+ "slab",
+ "tokio",
+ "tokio-util 0.7.1",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower-http"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d342c6d58709c0a6d48d48dabbb62d4ef955cf5f0f3bbfd845838e7ae88dbae"
+dependencies = [
+ "bitflags",
+ "bytes",
+ "futures-core",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-range-header",
+ "pin-project-lite",
+ "tower",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62"
+
+[[package]]
+name = "tower-service"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6"
+
+[[package]]
+name = "tracing"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d0ecdcb44a79f0fe9844f0c4f33a342cbcbb5117de8001e6ba0dc2351327d09"
+dependencies = [
+ "cfg-if",
+ "log 0.4.17",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f54c8ca710e81886d498c2fd3331b56c93aa248d49de2222ad2742247c60072f"
+dependencies = [
+ "lazy_static",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-futures"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2"
+dependencies = [
+ "pin-project",
+ "tracing",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596"
+dependencies = [
+ "lazy_static",
+ "matchers",
+ "regex 1.5.5",
+ "sharded-slab",
+ "thread_local 1.1.4",
+ "tracing",
+ "tracing-core",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642"
+
+[[package]]
+name = "tungstenite"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0b2d8558abd2e276b0a8df5c05a2ec762609344191e5fd23e292c910e9165b5"
+dependencies = [
+ "base64",
+ "byteorder",
+ "bytes",
+ "http",
+ "httparse",
+ "log 0.4.17",
+ "rand 0.8.5",
+ "sha-1 0.9.8",
+ "thiserror",
+ "url",
+ "utf-8",
+]
+
+[[package]]
+name = "twoway"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1"
+dependencies = [
+ "memchr 2.5.0",
+]
+
+[[package]]
+name = "typenum"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
+
+[[package]]
+name = "unicode-xid"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
+
+[[package]]
+name = "universal-hash"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05"
+dependencies = [
+ "generic-array",
+ "subtle",
+]
+
+[[package]]
+name = "untrusted"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
+
+[[package]]
+name = "url"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "matches",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "utf-8"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
+
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
+
+[[package]]
+name = "uuid"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7"
+
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "value-bag"
+version = "1.0.0-alpha.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2209b78d1249f7e6f3293657c9779fe31ced465df091bbd433a1cf88e916ec55"
+dependencies = [
+ "ctor",
+ "version_check",
+]
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "waker-fn"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca"
+
+[[package]]
+name = "want"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0"
+dependencies = [
+ "log 0.4.17",
+ "try-lock",
+]
+
+[[package]]
+name = "warp"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3cef4e1e9114a4b7f1ac799f16ce71c14de5778500c5450ec6b7b920c55b587e"
+dependencies = [
+ "async-compression",
+ "bytes",
+ "futures-channel",
+ "futures-util",
+ "headers",
+ "http",
+ "hyper",
+ "log 0.4.17",
+ "mime",
+ "mime_guess",
+ "multipart",
+ "percent-encoding",
+ "pin-project",
+ "scoped-tls",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "tokio",
+ "tokio-stream",
+ "tokio-tungstenite",
+ "tokio-util 0.6.9",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "warp-prometheus"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "63a77e844a8110e3551913c18f4f215e2c58dd9193668449fa34b67c205524a1"
+dependencies = [
+ "log 0.4.17",
+ "prometheus",
+ "warp",
+]
+
+[[package]]
+name = "wasi"
+version = "0.9.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+
+[[package]]
+name = "wasi"
+version = "0.10.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad"
+dependencies = [
+ "cfg-if",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4"
+dependencies = [
+ "bumpalo",
+ "lazy_static",
+ "log 0.4.17",
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f741de44b75e14c35df886aff5f1eb73aa114fa5d4d00dcd37b5e01259bf3b2"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5"
+dependencies = [
+ "quote 1.0.18",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b"
+dependencies = [
+ "proc-macro2 1.0.38",
+ "quote 1.0.18",
+ "syn 1.0.93",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744"
+
+[[package]]
+name = "web-sys"
+version = "0.3.57"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b17e741662c70c8bd24ac5c5b18de314a2c26c32bf8346ee1e6f53de919c283"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "webpki"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd"
+dependencies = [
+ "ring",
+ "untrusted",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "0.22.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf"
+dependencies = [
+ "webpki",
+]
+
+[[package]]
+name = "wepoll-ffi"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d743fdedc5c64377b5fc2bc036b01c7fd642205a0d96356034ae3404d49eb7fb"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "winapi"
+version = "0.2.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-build"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi 0.3.9",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "windows-sys"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+dependencies = [
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+
+[[package]]
+name = "winreg"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d"
+dependencies = [
+ "winapi 0.3.9",
+]
diff --git a/kittybox-rs/Cargo.toml b/kittybox-rs/Cargo.toml
new file mode 100644
index 0000000..d43f0f4
--- /dev/null
+++ b/kittybox-rs/Cargo.toml
@@ -0,0 +1,114 @@
+[package]
+name = "kittybox"
+version = "0.1.0"
+authors = ["Vika <vika@fireburn.ru>"]
+edition = "2021"
+default-run = "kittybox"
+
+[features]
+default = []
+util = ["anyhow"]
+
+[[bin]]
+name = "kittybox-bulk-import"
+path = "src/bin/kittybox_bulk_import.rs"
+required-features = ["util"]
+
+[[bin]]
+name = "pyindieblog-export"
+path = "src/bin/pyindieblog_to_kittybox.rs"
+required-features = ["util", "redis"]
+
+[[bin]]
+name = "kittybox-database-converter"
+path = "src/bin/kittybox_database_converter.rs"
+required-features = ["util", "redis"]
+
+[workspace]
+members = [".", "./util", "./templates"]
+default-members = [".", "./util", "./templates"]
+[dependencies.kittybox-util]
+version = "0.1.0"
+path = "./util"
+[dependencies.kittybox-templates]
+version = "0.1.0"
+path = "./templates"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dev-dependencies]
+mockito = "^0.30.0"          # HTTP mocking for Rust.
+tempdir = "^0.3.7"           # A library for managing a temporary directory and deleting all contents when it's dropped
+test-logger = "^0.1.0"       # Simple helper to initialize env_logger before unit and integration tests
+httpmock = "^0.6"            # HTTP mocking library that allows you to simulate responses from HTTP based services
+faker_rand = "^0.1.1"        # Seedable, rand-compatible generators of fake data
+rand = "^0.8.5"              # Utilities for random number generation
+
+[dependencies]
+async-trait = "^0.1.50"      # Type erasure for async trait methods
+bytes = "^1.1.0"
+data-encoding = "^2.3.2"     # Efficient and customizable data-encoding functions like base64, base32, and hex
+easy-scraper = "^0.2.0"      # HTML scraping library focused on ease of use
+either = "^1.6.1"            # A general purpose sum type with two cases
+env_logger = "^0.8.3"        # A logging implementation for `log` which is configured via an environment variable
+futures = "^0.3.14"          # An implementation of futures and streams
+futures-util = "^0.3.14"     # Common utilities and extension traits for the futures-rs library
+lazy_static = "^1.4.0"       # A macro for declaring lazily evaluated statics in Rust
+listenfd = "^0.5.0"          # A simple library to work with listenfds passed from the outside (systemd/catflap socket activation)
+log = "^0.4.14"              # A lightweight logging facade for Rust
+markdown = "^0.3.0"          # Native Rust library for parsing Markdown and (outputting HTML)
+markup = "^0.12.0"           # HTML templating engine... ok also very funny about markdown and markup... i just realized the pun...
+mediatype = "^0.19.1"        # MIME Media-type parsing
+newbase60 = "^0.1.3"         # A library that implements Tantek Çelik's New Base 60
+rand = "^0.8.4"              # Random number generators.
+retainer = "^0.2.2"          # Minimal async cache in Rust with support for key expirations
+serde_json = "^1.0.64"       # A JSON serialization file format
+serde_urlencoded = "^0.7.0"  # `x-www-form-urlencoded` meets Serde
+serde_variant = "^0.1.1"     # Retrieve serde provided variant names for enum objects
+relative-path = "^1.5.0"     # Portable relative paths for Rust
+sha2 = "^0.9.8"              # SHA-2 series of algorithms for Rust
+warp-prometheus = "^0.5.0"   # An afterthought of prometheus metrics for Warp
+[dependencies.tokio]
+version = "^1.16.1"
+features = ["full", "tracing"] # TODO determine if my app doesn't need some features
+[dependencies.console-subscriber]
+version = "0.1.3"
+[dependencies.tokio-stream]
+version = "^0.1.8"
+features = ["time", "net"]
+[dependencies.anyhow]
+version = "^1.0.42"
+optional = true
+[dependencies.chrono]        # Date and time library for Rust
+version = "^0.4.19"
+features = ["serde"]
+[dependencies.redis]
+version = "^0.21.3"
+optional = true
+features = ["aio", "tokio-comp"]
+[dependencies.prometheus]    # Prometheus instrumentation library for Rust applications
+version = "^0.13.0"
+features = ["process"]
+[dependencies.serde]         # A generic serialization/deserialization framework
+version = "^1.0.125"
+features = ["derive"]
+[dependencies.url]           # URL library for Rust, based on the WHATWG URL Standard
+version = "^2.2.1"
+features = ["serde"]
+[dependencies.warp]
+version = "^0.3.2"
+default-features = false
+features = ["multipart", "compression"]
+[dependencies.http-types]    # Common types for HTTP operations
+version = "^2.11.0"
+features = ["http"]
+[dependencies.hyper]
+version = "^0.14.17"
+features = ["stream", "runtime"]
+[dependencies.reqwest]
+version = "^0.11.10"
+default-features = false
+features = ["rustls-tls-webpki-roots", "gzip", "brotli", "json", "stream"]
+[dependencies.microformats]
+version = "^0.2.0"
+#git = "https://gitlab.com/maxburon/microformats-parser"
diff --git a/kittybox-rs/dev.sh b/kittybox-rs/dev.sh
new file mode 100755
index 0000000..fa39dcc
--- /dev/null
+++ b/kittybox-rs/dev.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+export RUST_LOG="info,kittybox=debug,retainer::cache=warn,h2=warn,rustls=warn"
+export BACKEND_URI=file://./test-dir
+export TOKEN_ENDPOINT=https://tokens.indieauth.com/token
+export AUTHORIZATION_ENDPOINT=https://indieauth.com/auth
+export COOKIE_SECRET=1234567890abcdefghijklmnopqrstuvwxyz
+#export COOKIE_SECRET_FILE=/dev/null
+if [[ "$1" == "watch" ]]; then
+	shift
+	exec systemfd --no-pid -s http::8080 -- cargo watch -x run "$@"
+elif [[ "$1" == "run" ]]; then
+	shift 1
+	exec cargo run "$@"
+else
+	echo "Usage: ./dev.sh [watch|run]"
+	exit 1
+fi
diff --git a/kittybox-rs/src/bin/kittybox_bulk_import.rs b/kittybox-rs/src/bin/kittybox_bulk_import.rs
new file mode 100644
index 0000000..7e1f6af
--- /dev/null
+++ b/kittybox-rs/src/bin/kittybox_bulk_import.rs
@@ -0,0 +1,66 @@
+use anyhow::{anyhow, bail, Context, Result};
+use std::fs::File;
+use std::io;
+
+#[async_std::main]
+async fn main() -> Result<()> {
+    let args = std::env::args().collect::<Vec<String>>();
+    if args.iter().skip(1).any(|s| s == "--help") {
+        println!("Usage: {} <url> [file]", args[0]);
+        println!("\nIf launched with no arguments, reads from stdin.");
+        println!(
+            "\nUse KITTYBOX_AUTH_TOKEN environment variable to authorize to the Micropub endpoint."
+        );
+        std::process::exit(0);
+    }
+
+    let token = std::env::var("KITTYBOX_AUTH_TOKEN")
+        .map_err(|_| anyhow!("No auth token found! Use KITTYBOX_AUTH_TOKEN env variable."))?;
+    let data: Vec<serde_json::Value> = (if args.len() == 2 || (args.len() == 3 && args[2] == "-") {
+        serde_json::from_reader(io::stdin())
+    } else if args.len() == 3 {
+        serde_json::from_reader(File::open(&args[2]).with_context(|| "Error opening input file")?)
+    } else {
+        bail!("See `{} --help` for usage.", args[0]);
+    })
+    .with_context(|| "Error while loading the input file")?;
+
+    let url = surf::Url::parse(&args[1])?;
+    let client = surf::Client::new();
+
+    let iter = data.into_iter();
+
+    for post in iter {
+        println!(
+            "Processing {}...",
+            post["properties"]["url"][0]
+                .as_str()
+                .or_else(|| post["properties"]["published"][0]
+                    .as_str()
+                    .or_else(|| post["properties"]["name"][0]
+                        .as_str()
+                        .or(Some("<unidentified post>"))))
+                .unwrap()
+        );
+        match client
+            .post(&url)
+            .body(surf::http::Body::from_string(serde_json::to_string(&post)?))
+            .header("Content-Type", "application/json")
+            .header("Authorization", format!("Bearer {}", &token))
+            .send()
+            .await
+        {
+            Ok(mut response) => {
+                if response.status() == 201 || response.status() == 202 {
+                    println!("Posted at {}", response.header("location").unwrap().last());
+                } else {
+                    println!("Error: {:?}", response.body_string().await);
+                }
+            }
+            Err(err) => {
+                println!("{}", err);
+            }
+        }
+    }
+    Ok(())
+}
diff --git a/kittybox-rs/src/bin/kittybox_database_converter.rs b/kittybox-rs/src/bin/kittybox_database_converter.rs
new file mode 100644
index 0000000..bc355c9
--- /dev/null
+++ b/kittybox-rs/src/bin/kittybox_database_converter.rs
@@ -0,0 +1,106 @@
+use anyhow::{anyhow, Context};
+use kittybox::database::FileStorage;
+use kittybox::database::Storage;
+use redis::{self, AsyncCommands};
+use std::collections::HashMap;
+
+/// Convert from a Redis storage to a new storage new_storage.
+async fn convert_from_redis<S: Storage>(from: String, new_storage: S) -> anyhow::Result<()> {
+    let db = redis::Client::open(from).context("Failed to open the Redis connection")?;
+
+    let mut conn = db
+        .get_async_std_connection()
+        .await
+        .context("Failed to connect to Redis")?;
+
+    // Rebinding to convince the borrow checker we're not smuggling stuff outta scope
+    let storage = &new_storage;
+
+    let mut stream = conn.hscan::<_, String>("posts").await?;
+
+    while let Some(key) = stream.next_item().await {
+        let value = serde_json::from_str::<serde_json::Value>(
+            &stream
+                .next_item()
+                .await
+                .ok_or(anyhow!("Failed to find a corresponding value for the key"))?,
+        )?;
+
+        println!("{}, {:?}", key, value);
+
+        if value["see_other"].is_string() {
+            continue;
+        }
+
+        let user = &(url::Url::parse(value["properties"]["uid"][0].as_str().unwrap())
+            .unwrap()
+            .origin()
+            .ascii_serialization()
+            .clone()
+            + "/");
+        if let Err(err) = storage.clone().put_post(&value, user).await {
+            eprintln!("Error saving post: {}", err);
+        }
+    }
+
+    let mut stream: redis::AsyncIter<String> = conn.scan_match("settings_*").await?;
+    while let Some(key) = stream.next_item().await {
+        let mut conn = db
+            .get_async_std_connection()
+            .await
+            .context("Failed to connect to Redis")?;
+        let user = key.strip_prefix("settings_").unwrap();
+        match conn
+            .hgetall::<&str, HashMap<String, String>>(&key)
+            .await
+            .context(format!("Failed getting settings from key {}", key))
+        {
+            Ok(settings) => {
+                for (k, v) in settings.iter() {
+                    if let Err(e) = storage
+                        .set_setting(k, user, v)
+                        .await
+                        .with_context(|| format!("Failed setting {} for {}", k, user))
+                    {
+                        eprintln!("{}", e);
+                    }
+                }
+            }
+            Err(e) => {
+                eprintln!("{}", e);
+            }
+        }
+    }
+
+    Ok(())
+}
+
+#[async_std::main]
+async fn main() -> anyhow::Result<()> {
+    let mut args = std::env::args();
+    args.next(); // skip argv[0]
+    let old_uri = args
+        .next()
+        .ok_or_else(|| anyhow!("No import source is provided."))?;
+    let new_uri = args
+        .next()
+        .ok_or_else(|| anyhow!("No import destination is provided."))?;
+
+    let storage = if new_uri.starts_with("file:") {
+        let folder = new_uri.strip_prefix("file://").unwrap();
+        let path = std::path::PathBuf::from(folder);
+        Box::new(
+            FileStorage::new(path)
+                .await
+                .context("Failed to construct the file storage")?,
+        )
+    } else {
+        anyhow::bail!("Cannot construct the storage abstraction for destination storage. Check the storage type?");
+    };
+
+    if old_uri.starts_with("redis") {
+        convert_from_redis(old_uri, *storage).await?
+    }
+
+    Ok(())
+}
diff --git a/kittybox-rs/src/bin/pyindieblog_to_kittybox.rs b/kittybox-rs/src/bin/pyindieblog_to_kittybox.rs
new file mode 100644
index 0000000..38590c3
--- /dev/null
+++ b/kittybox-rs/src/bin/pyindieblog_to_kittybox.rs
@@ -0,0 +1,68 @@
+use anyhow::{anyhow, Context, Result};
+
+use redis::AsyncCommands;
+use serde::{Deserialize, Serialize};
+use std::collections::HashMap;
+use std::fs::File;
+
+#[derive(Default, Serialize, Deserialize)]
+struct PyindieblogData {
+    posts: Vec<serde_json::Value>,
+    cards: Vec<serde_json::Value>,
+}
+
+#[async_std::main]
+async fn main() -> Result<()> {
+    let mut args = std::env::args();
+    args.next(); // skip argv[0] which is the name
+    let redis_uri = args
+        .next()
+        .ok_or_else(|| anyhow!("No Redis URI provided"))?;
+    let client = redis::Client::open(redis_uri.as_str())
+        .with_context(|| format!("Failed to construct Redis client on {}", redis_uri))?;
+
+    let filename = args
+        .next()
+        .ok_or_else(|| anyhow!("No filename provided for export"))?;
+
+    let mut data: Vec<serde_json::Value>;
+
+    let file = File::create(filename)?;
+
+    let mut conn = client
+        .get_async_std_connection()
+        .await
+        .with_context(|| "Failed to connect to the Redis server")?;
+
+    data = conn
+        .hgetall::<&str, HashMap<String, String>>("posts")
+        .await?
+        .values()
+        .map(|s| {
+            serde_json::from_str::<serde_json::Value>(s)
+                .with_context(|| format!("Failed to parse the following entry: {:?}", s))
+        })
+        .collect::<std::result::Result<Vec<serde_json::Value>, anyhow::Error>>()
+        .with_context(|| "Failed to export h-entries from pyindieblog")?;
+    data.extend(
+        conn.hgetall::<&str, HashMap<String, String>>("hcards")
+            .await?
+            .values()
+            .map(|s| {
+                serde_json::from_str::<serde_json::Value>(s)
+                    .with_context(|| format!("Failed to parse the following card: {:?}", s))
+            })
+            .collect::<std::result::Result<Vec<serde_json::Value>, anyhow::Error>>()
+            .with_context(|| "Failed to export h-cards from pyindieblog")?,
+    );
+
+    data.sort_by_key(|v| {
+        v["properties"]["published"][0]
+            .as_str()
+            .map(|s| s.to_string())
+    });
+
+    serde_json::to_writer(file, &data)?;
+
+    Ok(())
+}
diff --git a/kittybox-rs/src/database/file/mod.rs b/kittybox-rs/src/database/file/mod.rs
new file mode 100644
index 0000000..1e7aa96
--- /dev/null
+++ b/kittybox-rs/src/database/file/mod.rs
@@ -0,0 +1,619 @@
+//#![warn(clippy::unwrap_used)]
+use crate::database::{filter_post, ErrorKind, Result, Storage, StorageError, Settings};
+use std::io::ErrorKind as IOErrorKind;
+use tokio::fs::{File, OpenOptions};
+use tokio::io::{AsyncReadExt, AsyncWriteExt};
+use tokio::task::spawn_blocking;
+use async_trait::async_trait;
+use futures::{stream, StreamExt, TryStreamExt};
+use log::debug;
+use serde_json::json;
+use std::collections::HashMap;
+use std::path::{Path, PathBuf};
+
+impl From<std::io::Error> for StorageError {
+    fn from(source: std::io::Error) -> Self {
+        Self::with_source(
+            match source.kind() {
+                IOErrorKind::NotFound => ErrorKind::NotFound,
+                IOErrorKind::AlreadyExists => ErrorKind::Conflict,
+                _ => ErrorKind::Backend,
+            },
+            "file I/O error",
+            Box::new(source),
+        )
+    }
+}
+
+impl From<tokio::time::error::Elapsed> for StorageError {
+    fn from(source: tokio::time::error::Elapsed) -> Self {
+        Self::with_source(
+            ErrorKind::Backend,
+            "timeout on I/O operation",
+            Box::new(source)
+        )
+    }
+}
+
+// Copied from https://stackoverflow.com/questions/39340924
+// This routine is adapted from the *old* Path's `path_relative_from`
+// function, which works differently from the new `relative_from` function.
+// In particular, this handles the case on unix where both paths are
+// absolute but with only the root as the common directory.
+fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
+    use std::path::Component;
+
+    if path.is_absolute() != base.is_absolute() {
+        if path.is_absolute() {
+            Some(PathBuf::from(path))
+        } else {
+            None
+        }
+    } else {
+        let mut ita = path.components();
+        let mut itb = base.components();
+        let mut comps: Vec<Component> = vec![];
+        loop {
+            match (ita.next(), itb.next()) {
+                (None, None) => break,
+                (Some(a), None) => {
+                    comps.push(a);
+                    comps.extend(ita.by_ref());
+                    break;
+                }
+                (None, _) => comps.push(Component::ParentDir),
+                (Some(a), Some(b)) if comps.is_empty() && a == b => (),
+                (Some(a), Some(b)) if b == Component::CurDir => comps.push(a),
+                (Some(_), Some(b)) if b == Component::ParentDir => return None,
+                (Some(a), Some(_)) => {
+                    comps.push(Component::ParentDir);
+                    for _ in itb {
+                        comps.push(Component::ParentDir);
+                    }
+                    comps.push(a);
+                    comps.extend(ita.by_ref());
+                    break;
+                }
+            }
+        }
+        Some(comps.iter().map(|c| c.as_os_str()).collect())
+    }
+}
+
+#[allow(clippy::unwrap_used, clippy::expect_used)]
+#[cfg(test)]
+mod tests {
+    #[test]
+    fn test_relative_path_resolving() {
+        let path1 = std::path::Path::new("/home/vika/Projects/kittybox");
+        let path2 = std::path::Path::new("/home/vika/Projects/nixpkgs");
+        let relative_path = super::path_relative_from(path2, path1).unwrap();
+
+        assert_eq!(relative_path, std::path::Path::new("../nixpkgs"))
+    }
+}
+
+// TODO: Check that the path ACTUALLY IS INSIDE THE ROOT FOLDER
+// This could be checked by completely resolving the path
+// and checking if it has a common prefix
+fn url_to_path(root: &Path, url: &str) -> PathBuf {
+    let path = url_to_relative_path(url).to_logical_path(root);
+    if !path.starts_with(root) {
+        // TODO: handle more gracefully
+        panic!("Security error: {:?} is not a prefix of {:?}", path, root)
+    } else {
+        path
+    }
+}
+
+fn url_to_relative_path(url: &str) -> relative_path::RelativePathBuf {
+    let url = warp::http::Uri::try_from(url).expect("Couldn't parse a URL");
+    let mut path = relative_path::RelativePathBuf::new();
+    path.push(url.authority().unwrap().to_string() + url.path() + ".json");
+
+    path
+}
+
+fn modify_post(post: &serde_json::Value, update: &serde_json::Value) -> Result<serde_json::Value> {
+    let mut add_keys: HashMap<String, Vec<serde_json::Value>> = HashMap::new();
+    let mut remove_keys: Vec<String> = vec![];
+    let mut remove_values: HashMap<String, Vec<serde_json::Value>> = HashMap::new();
+    let mut post = post.clone();
+
+    if let Some(delete) = update["delete"].as_array() {
+        remove_keys.extend(
+            delete
+                .iter()
+                .filter_map(|v| v.as_str())
+                .map(|v| v.to_string()),
+        );
+    } else if let Some(delete) = update["delete"].as_object() {
+        for (k, v) in delete {
+            if let Some(v) = v.as_array() {
+                remove_values
+                    .entry(k.to_string())
+                    .or_default()
+                    .extend(v.clone());
+            } else {
+                return Err(StorageError::new(
+                    ErrorKind::BadRequest,
+                    "Malformed update object",
+                ));
+            }
+        }
+    }
+    if let Some(add) = update["add"].as_object() {
+        for (k, v) in add {
+            if let Some(v) = v.as_array() {
+                add_keys.insert(k.to_string(), v.clone());
+            } else {
+                return Err(StorageError::new(
+                    ErrorKind::BadRequest,
+                    "Malformed update object",
+                ));
+            }
+        }
+    }
+    if let Some(replace) = update["replace"].as_object() {
+        for (k, v) in replace {
+            remove_keys.push(k.to_string());
+            if let Some(v) = v.as_array() {
+                add_keys.insert(k.to_string(), v.clone());
+            } else {
+                return Err(StorageError::new(ErrorKind::BadRequest, "Malformed update object"));
+            }
+        }
+    }
+
+    if let Some(props) = post["properties"].as_object_mut() {
+        for k in remove_keys {
+            props.remove(&k);
+        }
+    }
+    for (k, v) in remove_values {
+        let k = &k;
+        let props = if k == "children" {
+            &mut post
+        } else {
+            &mut post["properties"]
+        };
+        v.iter().for_each(|v| {
+            if let Some(vec) = props[k].as_array_mut() {
+                if let Some(index) = vec.iter().position(|w| w == v) {
+                    vec.remove(index);
+                }
+            }
+        });
+    }
+    for (k, v) in add_keys {
+        let props = if k == "children" {
+            &mut post
+        } else {
+            &mut post["properties"]
+        };
+        let k = &k;
+        if let Some(prop) = props[k].as_array_mut() {
+            if k == "children" {
+                v.into_iter()
+                    .rev()
+                    .for_each(|v| prop.insert(0, v));
+            } else {
+                prop.extend(v.into_iter());
+            }
+        } else {
+            post["properties"][k] = serde_json::Value::Array(v)
+        }
+    }
+    Ok(post)
+}
+
+#[derive(Clone, Debug)]
+/// A backend using a folder with JSON files as a backing store.
+/// Uses symbolic links to represent a many-to-one mapping of URLs to a post.
+pub struct FileStorage {
+    root_dir: PathBuf,
+}
+
+impl FileStorage {
+    /// Create a new storage wrapping a folder specified by root_dir.
+    pub async fn new(root_dir: PathBuf) -> Result<Self> {
+        // TODO check if the dir is writable
+        Ok(Self { root_dir })
+    }
+}
+
+async fn hydrate_author<S: Storage>(
+    feed: &mut serde_json::Value,
+    user: &'_ Option<String>,
+    storage: &S,
+) {
+    let url = feed["properties"]["uid"][0]
+        .as_str()
+        .expect("MF2 value should have a UID set! Check if you used normalize_mf2 before recording the post!");
+    if let Some(author) = feed["properties"]["author"].as_array().cloned() {
+        if !feed["type"]
+            .as_array()
+            .expect("MF2 value should have a type set!")
+            .iter()
+            .any(|i| i == "h-card")
+        {
+            let author_list: Vec<serde_json::Value> = stream::iter(author.iter())
+                .then(|i| async move {
+                    if let Some(i) = i.as_str() {
+                        match storage.get_post(i).await {
+                            Ok(post) => match post {
+                                Some(post) => match filter_post(post, user) {
+                                    Some(author) => author,
+                                    None => json!(i),
+                                },
+                                None => json!(i),
+                            },
+                            Err(e) => {
+                                log::error!("Error while hydrating post {}: {}", url, e);
+                                json!(i)
+                            }
+                        }
+                    } else {
+                        i.clone()
+                    }
+                })
+                .collect::<Vec<_>>()
+                .await;
+            if let Some(props) = feed["properties"].as_object_mut() {
+                props["author"] = json!(author_list);
+            } else {
+                feed["properties"] = json!({"author": author_list});
+            }
+        }
+    }
+}
+
+#[async_trait]
+impl Storage for FileStorage {
+    async fn post_exists(&self, url: &str) -> Result<bool> {
+        let path = url_to_path(&self.root_dir, url);
+        debug!("Checking if {:?} exists...", path);
+        /*let result = match tokio::fs::metadata(path).await {
+            Ok(metadata) => {
+                Ok(true)
+            },
+            Err(err) => {
+                if err.kind() == IOErrorKind::NotFound {
+                    Ok(false)
+                } else {
+                    Err(err.into())
+                }
+            }
+        };*/
+        #[allow(clippy::unwrap_used)] // JoinHandle captures panics, this closure shouldn't panic
+        Ok(spawn_blocking(move || path.is_file()).await.unwrap())
+    }
+
+    async fn get_post(&self, url: &str) -> Result<Option<serde_json::Value>> {
+        let path = url_to_path(&self.root_dir, url);
+        // TODO: check that the path actually belongs to the dir of user who requested it
+        // it's not like you CAN access someone else's private posts with it
+        // so it's not exactly a security issue, but it's still not good
+        debug!("Opening {:?}", path);
+
+        match File::open(&path).await {
+            Ok(mut file) => {
+                let mut content = String::new();
+                // Typechecks because OS magic acts on references
+                // to FDs as if they were behind a mutex
+                AsyncReadExt::read_to_string(&mut file, &mut content).await?;
+                debug!("Read {} bytes successfully from {:?}", content.as_bytes().len(), &path);
+                Ok(Some(serde_json::from_str(&content)?))
+            },
+            Err(err) => {
+                if err.kind() == IOErrorKind::NotFound {
+                    Ok(None)
+                } else {
+                    Err(err.into())
+                }
+            }
+        }
+    }
+
+    async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()> {
+        let key = post["properties"]["uid"][0]
+            .as_str()
+            .expect("Tried to save a post without UID");
+        let path = url_to_path(&self.root_dir, key);
+        let tempfile = (&path).with_extension("tmp");
+        debug!("Creating {:?}", path);
+
+        let parent = path.parent().expect("Parent for this directory should always exist").to_owned();
+        if !parent.is_dir() {
+            tokio::fs::create_dir_all(parent).await?;
+        }
+
+        let mut file = tokio::fs::OpenOptions::new()
+            .write(true)
+            .create_new(true)
+            .open(&tempfile).await?;
+
+        file.write_all(post.to_string().as_bytes()).await?;
+        file.flush().await?;
+        drop(file);
+        tokio::fs::rename(&tempfile, &path).await?;
+
+        if let Some(urls) = post["properties"]["url"].as_array() {
+            for url in urls
+                .iter()
+                .map(|i| i.as_str().unwrap())
+            {
+                if url != key && url.starts_with(user) {
+                    let link = url_to_path(&self.root_dir, url);
+                    debug!("Creating a symlink at {:?}", link);
+                    let orig = path.clone();
+                    // We're supposed to have a parent here.
+                    let basedir = link.parent().ok_or_else(|| {
+                        StorageError::new(
+                            ErrorKind::Backend,
+                            "Failed to calculate parent directory when creating a symlink",
+                        )
+                    })?;
+                    let relative = path_relative_from(&orig, basedir).unwrap();
+                    println!("{:?} - {:?} = {:?}", &orig, &basedir, &relative);
+                    tokio::fs::symlink(relative, link).await?;
+                }
+            }
+        }
+
+        if post["type"]
+            .as_array()
+            .unwrap()
+            .iter()
+            .any(|s| s.as_str() == Some("h-feed"))
+        {
+            println!("Adding to channel list...");
+            // Add the h-feed to the channel list
+            let mut path = relative_path::RelativePathBuf::new();
+            path.push(warp::http::Uri::try_from(user.to_string()).unwrap().authority().unwrap().to_string());
+            path.push("channels");
+
+            let path = path.to_path(&self.root_dir);
+            let tempfilename = (&path).with_extension("tmp");
+            let channel_name = post["properties"]["name"][0]
+                .as_str()
+                .map(|s| s.to_string())
+                .unwrap_or_else(String::default);
+            let key = key.to_string();
+
+            let mut tempfile = OpenOptions::new()
+                .write(true)
+                .create_new(true)
+                .open(&tempfilename).await?;
+            let mut file = OpenOptions::new()
+                .read(true)
+                .write(true)
+                .truncate(false)
+                .create(true)
+                .open(&path).await?;
+
+            let mut content = String::new();
+            file.read_to_string(&mut content).await?;
+            drop(file);
+            let mut channels: Vec<super::MicropubChannel> = if !content.is_empty() {
+                serde_json::from_str(&content)?
+            } else {
+                Vec::default()
+            };
+
+            channels.push(super::MicropubChannel {
+                uid: key.to_string(),
+                name: channel_name,
+            });
+
+            tempfile.write_all(serde_json::to_string(&channels)?.as_bytes()).await?;
+            tempfile.flush().await?;
+            drop(tempfile);
+            tokio::fs::rename(tempfilename, path).await?;
+        }
+        Ok(())
+    }
+
+    async fn update_post(&self, url: &'_ str, update: serde_json::Value) -> Result<()> {
+        let path = url_to_path(&self.root_dir, url);
+        let tempfilename = path.with_extension("tmp");
+        #[allow(unused_variables)]
+        let (old_json, new_json) = {
+            let mut temp = OpenOptions::new()
+                .write(true)
+                .create_new(true)
+                .open(&tempfilename)
+                .await?;
+            let mut file = OpenOptions::new()
+                .read(true)
+                .open(&path)
+                .await?;
+
+            let mut content = String::new();
+            file.read_to_string(&mut content).await?;
+            let json: serde_json::Value = serde_json::from_str(&content)?;
+            drop(file);
+            // Apply the editing algorithms
+            let new_json = modify_post(&json, &update)?;
+
+            temp.write_all(new_json.to_string().as_bytes()).await?;
+            temp.flush().await?;
+            drop(temp);
+            tokio::fs::rename(tempfilename, path).await?;
+
+            (json, new_json)
+        };
+        // TODO check if URLs changed between old and new JSON
+        Ok(())
+    }
+
+    async fn get_channels(&self, user: &'_ str) -> Result<Vec<super::MicropubChannel>> {
+        let mut path = relative_path::RelativePathBuf::new();
+        path.push(warp::http::Uri::try_from(user.to_string()).unwrap().authority().unwrap().to_string());
+        path.push("channels");
+
+        let path = path.to_path(&self.root_dir);
+        match File::open(&path).await {
+            Ok(mut f) => {
+                let mut content = String::new();
+                f.read_to_string(&mut content).await?;
+                // This should not happen, but if it does, handle it gracefully
+                if content.is_empty() {
+                    return Ok(vec![]);
+                }
+                let channels: Vec<super::MicropubChannel> = serde_json::from_str(&content)?;
+                Ok(channels)
+            }
+            Err(e) => {
+                if e.kind() == IOErrorKind::NotFound {
+                    Ok(vec![])
+                } else {
+                    Err(e.into())
+                }
+            }
+        }
+    }
+
+    async fn read_feed_with_limit(
+        &self,
+        url: &'_ str,
+        after: &'_ Option<String>,
+        limit: usize,
+        user: &'_ Option<String>,
+    ) -> Result<Option<serde_json::Value>> {
+        if let Some(feed) = self.get_post(url).await? {
+            if let Some(mut feed) = filter_post(feed, user) {
+                if feed["children"].is_array() {
+                    // This code contains several clones. It looks
+                    // like the borrow checker thinks it is preventing
+                    // me from doing something incredibly stupid. The
+                    // borrow checker may or may not be right.
+                    let children = feed["children"].as_array().unwrap().clone();
+                    let mut posts_iter = children
+                        .into_iter()
+                        .map(|s: serde_json::Value| s.as_str().unwrap().to_string());
+                    // Note: we can't actually use skip_while here because we end up emitting `after`.
+                    // This imperative snippet consumes after instead of emitting it, allowing the
+                    // stream of posts to return only those items that truly come *after*
+                    if let Some(after) = after {
+                        for s in posts_iter.by_ref() {
+                            if &s == after {
+                                break
+                            }
+                        }
+                    };
+                    let posts = stream::iter(posts_iter)
+                        .map(|url: String| async move { self.get_post(&url).await })
+                        .buffered(std::cmp::min(3, limit))
+                        // Hack to unwrap the Option and sieve out broken links
+                        // Broken links return None, and Stream::filter_map skips Nones.
+                        .try_filter_map(|post: Option<serde_json::Value>| async move { Ok(post) })
+                        .try_filter_map(|post| async move { Ok(filter_post(post, user)) })
+                        .and_then(|mut post| async move {
+                            hydrate_author(&mut post, user, self).await;
+                            Ok(post)
+                        })
+                        .take(limit);
+
+                    match posts.try_collect::<Vec<serde_json::Value>>().await {
+                        Ok(posts) => feed["children"] = serde_json::json!(posts),
+                        Err(err) => {
+                            return Err(StorageError::with_source(
+                                ErrorKind::Other,
+                                "Feed assembly error",
+                                Box::new(err),
+                            ));
+                        }
+                    }
+                }
+                hydrate_author(&mut feed, user, self).await;
+                Ok(Some(feed))
+            } else {
+                Err(StorageError::new(
+                    ErrorKind::PermissionDenied,
+                    "specified user cannot access this post",
+                ))
+            }
+        } else {
+            Ok(None)
+        }
+    }
+
+    async fn delete_post(&self, url: &'_ str) -> Result<()> {
+        let path = url_to_path(&self.root_dir, url);
+        if let Err(e) = tokio::fs::remove_file(path).await {
+            Err(e.into())
+        } else {
+            // TODO check for dangling references in the channel list
+            Ok(())
+        }
+    }
+
+    async fn get_setting(&self, setting: Settings, user: &'_ str) -> Result<String> {
+        log::debug!("User for getting settings: {}", user);
+        let url = warp::http::Uri::try_from(user).expect("Couldn't parse a URL");
+        let mut path = relative_path::RelativePathBuf::new();
+        path.push(url.authority().unwrap().to_string());
+        path.push("settings");
+
+        let path = path.to_path(&self.root_dir);
+        log::debug!("Getting settings from {:?}", &path);
+        let setting = setting.to_string();
+        let mut file = File::open(path).await?;
+        let mut content = String::new();
+        file.read_to_string(&mut content).await?;
+
+        let settings: HashMap<String, String> = serde_json::from_str(&content)?;
+        // XXX consider returning string slices instead of cloning a string every time
+        // it might come with a performance hit and/or memory usage inflation
+        settings
+            .get(&setting)
+            .cloned()
+            .ok_or_else(|| StorageError::new(ErrorKind::Backend, "Setting not set"))
+    }
+
+    async fn set_setting(&self, setting: Settings, user: &'_ str, value: &'_ str) -> Result<()> {
+        let url = warp::http::Uri::try_from(user).expect("Couldn't parse a URL");
+        let mut path = relative_path::RelativePathBuf::new();
+        path.push(url.authority().unwrap().to_string());
+        path.push("settings");
+
+        let path = path.to_path(&self.root_dir);
+        let temppath = path.with_extension("tmp");
+
+        let parent = path.parent().unwrap().to_owned();
+        if !spawn_blocking(move || parent.is_dir()).await.unwrap() {
+            tokio::fs::create_dir_all(path.parent().unwrap()).await?;
+        }
+
+        let (setting, value) = (setting.to_string(), value.to_string());
+
+        let mut tempfile = OpenOptions::new()
+            .write(true)
+            .create_new(true)
+            .open(&temppath)
+            .await?;
+
+        let mut settings: HashMap<String, String> = match File::open(&path).await {
+            Ok(mut f) => {
+                let mut content = String::new();
+                f.read_to_string(&mut content).await?;
+                if content.is_empty() {
+                    HashMap::default()
+                } else {
+                    serde_json::from_str(&content)?
+                }
+            }
+            Err(err) => if err.kind() == IOErrorKind::NotFound {
+                HashMap::default()
+            } else {
+                return Err(err.into())
+            }
+        };
+        settings.insert(setting, value);
+        tempfile.write_all(serde_json::to_string(&settings)?.as_bytes()).await?;
+        drop(tempfile);
+        tokio::fs::rename(temppath, path).await?;
+        Ok(())
+    }
+}
diff --git a/kittybox-rs/src/database/memory.rs b/kittybox-rs/src/database/memory.rs
new file mode 100644
index 0000000..786466c
--- /dev/null
+++ b/kittybox-rs/src/database/memory.rs
@@ -0,0 +1,200 @@
+#![allow(clippy::todo)]
+use async_trait::async_trait;
+use std::collections::HashMap;
+use std::sync::Arc;
+use tokio::sync::RwLock;
+use futures_util::FutureExt;
+use serde_json::json;
+
+use crate::database::{Storage, Result, StorageError, ErrorKind, MicropubChannel, Settings};
+
+#[derive(Clone, Debug)]
+pub struct MemoryStorage {
+    pub mapping: Arc<RwLock<HashMap<String, serde_json::Value>>>,
+    pub channels: Arc<RwLock<HashMap<String, Vec<String>>>>
+}
+
+#[async_trait]
+impl Storage for MemoryStorage {
+    async fn post_exists(&self, url: &str) -> Result<bool> {
+        return Ok(self.mapping.read().await.contains_key(url))
+    }
+
+    async fn get_post(&self, url: &str) ->Result<Option<serde_json::Value>> {
+        let mapping = self.mapping.read().await;
+        match mapping.get(url) {
+            Some(val) => {
+                if let Some(new_url) = val["see_other"].as_str() {
+                    match mapping.get(new_url) {
+                        Some(val) => Ok(Some(val.clone())),
+                        None => {
+                            drop(mapping);
+                            self.mapping.write().await.remove(url);
+                            Ok(None)
+                        }
+                    }
+                } else {
+                    Ok(Some(val.clone()))
+                }
+            },
+            _ => Ok(None)
+        }
+    }
+
+    async fn put_post(&self, post: &'_ serde_json::Value, _user: &'_ str) -> Result<()> {
+        let mapping = &mut self.mapping.write().await;
+        let key: &str = match post["properties"]["uid"][0].as_str() {
+            Some(uid) => uid,
+            None => return Err(StorageError::new(ErrorKind::Other, "post doesn't have a UID"))
+        };
+        mapping.insert(key.to_string(), post.clone());
+        if post["properties"]["url"].is_array() {
+            for url in post["properties"]["url"].as_array().unwrap().iter().map(|i| i.as_str().unwrap().to_string()) {
+                if url != key {
+                    mapping.insert(url, json!({"see_other": key}));
+                }
+            }
+        }
+        if post["type"].as_array().unwrap().iter().any(|i| i == "h-feed") {
+            // This is a feed. Add it to the channels array if it's not already there.
+            println!("{:#}", post);
+            self.channels.write().await.entry(post["properties"]["author"][0].as_str().unwrap().to_string()).or_insert_with(Vec::new).push(key.to_string())
+        }
+        Ok(())
+    }
+
+    async fn update_post(&self, url: &'_ str, update: serde_json::Value) -> Result<()> {
+        let mut add_keys: HashMap<String, serde_json::Value> = HashMap::new();
+        let mut remove_keys: Vec<String> = vec![];
+        let mut remove_values: HashMap<String, Vec<serde_json::Value>> = HashMap::new();
+
+        if let Some(delete) = update["delete"].as_array() {
+            remove_keys.extend(delete.iter().filter_map(|v| v.as_str()).map(|v| v.to_string()));
+        } else if let Some(delete) = update["delete"].as_object() {
+            for (k, v) in delete {
+                if let Some(v) = v.as_array() {
+                    remove_values.entry(k.to_string()).or_default().extend(v.clone());
+                } else {
+                    return Err(StorageError::new(ErrorKind::BadRequest, "Malformed update object"));
+                }
+            }
+        }
+        if let Some(add) = update["add"].as_object() {
+            for (k, v) in add {
+                if v.is_array() {
+                    add_keys.insert(k.to_string(), v.clone());
+                } else {
+                    return Err(StorageError::new(ErrorKind::BadRequest, "Malformed update object"));
+                }
+            }
+        }
+        if let Some(replace) = update["replace"].as_object() {
+            for (k, v) in replace {
+                remove_keys.push(k.to_string());
+                add_keys.insert(k.to_string(), v.clone());
+            }
+        }
+        let mut mapping = self.mapping.write().await;
+        if let Some(mut post) = mapping.get(url) {
+            if let Some(url) = post["see_other"].as_str() {
+                if let Some(new_post) = mapping.get(url) {
+                    post = new_post
+                } else {
+                    return Err(StorageError::new(ErrorKind::NotFound, "The post you have requested is not found in the database."));
+                }
+            }
+            let mut post = post.clone();
+            for k in remove_keys {
+                post["properties"].as_object_mut().unwrap().remove(&k);
+            }
+            for (k, v) in remove_values {
+                let k = &k;
+                let props = if k == "children" {
+                    &mut post
+                } else {
+                    &mut post["properties"]
+                };
+                v.iter().for_each(|v| {
+                    if let Some(vec) = props[k].as_array_mut() {
+                        if let Some(index) = vec.iter().position(|w| w == v) {
+                            vec.remove(index);
+                        }
+                    }
+                });
+            }
+            for (k, v) in add_keys {
+                let props = if k == "children" {
+                    &mut post
+                } else {
+                    &mut post["properties"]
+                };
+                let k = &k;
+                if let Some(prop) = props[k].as_array_mut() {
+                    if k == "children" {
+                        v.as_array().unwrap().iter().cloned().rev().for_each(|v| prop.insert(0, v));
+                    } else {
+                        prop.extend(v.as_array().unwrap().iter().cloned());
+                    }
+                } else {
+                    post["properties"][k] = v
+                }
+            }
+            mapping.insert(post["properties"]["uid"][0].as_str().unwrap().to_string(), post);
+        } else {
+            return Err(StorageError::new(ErrorKind::NotFound, "The designated post wasn't found in the database."));
+        }
+        Ok(())
+    }
+
+    async fn get_channels(&self, user: &'_ str) -> Result<Vec<MicropubChannel>> {
+        match self.channels.read().await.get(user) {
+            Some(channels) => Ok(futures_util::future::join_all(channels.iter()
+                .map(|channel| self.get_post(channel)
+                    .map(|result| result.unwrap())
+                    .map(|post: Option<serde_json::Value>| {
+                        post.map(|post| MicropubChannel {
+                            uid: post["properties"]["uid"][0].as_str().unwrap().to_string(),
+                            name: post["properties"]["name"][0].as_str().unwrap().to_string()
+                        })
+                    })
+                ).collect::<Vec<_>>()).await.into_iter().flatten().collect::<Vec<_>>()),
+            None => Ok(vec![])
+        }
+        
+    }
+
+    #[allow(unused_variables)]
+    async fn read_feed_with_limit(&self, url: &'_ str, after: &'_ Option<String>, limit: usize, user: &'_ Option<String>) -> Result<Option<serde_json::Value>> {
+        todo!()
+    }
+
+    async fn delete_post(&self, url: &'_ str) -> Result<()> {
+        self.mapping.write().await.remove(url);
+        Ok(())
+    }
+
+    #[allow(unused_variables)]
+    async fn get_setting(&self, setting: Settings, user: &'_ str) -> Result<String> {
+        todo!()
+    }
+
+    #[allow(unused_variables)]
+    async fn set_setting(&self, setting: Settings, user: &'_ str, value: &'_ str) -> Result<()> {
+        todo!()
+    }
+}
+
+impl Default for MemoryStorage {
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
+impl MemoryStorage {
+    pub fn new() -> Self {
+        Self {
+            mapping: Arc::new(RwLock::new(HashMap::new())),
+            channels: Arc::new(RwLock::new(HashMap::new()))
+        }
+    }
+}
diff --git a/kittybox-rs/src/database/mod.rs b/kittybox-rs/src/database/mod.rs
new file mode 100644
index 0000000..6bf5409
--- /dev/null
+++ b/kittybox-rs/src/database/mod.rs
@@ -0,0 +1,539 @@
+#![warn(missing_docs)]
+use async_trait::async_trait;
+use serde::{Deserialize, Serialize};
+
+mod file;
+pub use crate::database::file::FileStorage;
+#[cfg(test)]
+mod memory;
+#[cfg(test)]
+pub use crate::database::memory::MemoryStorage;
+
+pub use kittybox_util::MicropubChannel;
+
+/// Enum representing different errors that might occur during the database query.
+#[derive(Debug, Clone, Copy)]
+pub enum ErrorKind {
+    /// Backend error (e.g. database connection error)
+    Backend,
+    /// Error due to insufficient contextual permissions for the query
+    PermissionDenied,
+    /// Error due to the database being unable to parse JSON returned from the backing storage.
+    /// Usually indicative of someone fiddling with the database manually instead of using proper tools.
+    JsonParsing,
+    ///  - ErrorKind::NotFound - equivalent to a 404 error. Note, some requests return an Option,
+    ///    in which case None is also equivalent to a 404.
+    NotFound,
+    /// The user's query or request to the database was malformed. Used whenever the database processes
+    /// the user's query directly, such as when editing posts inside of the database (e.g. Redis backend)
+    BadRequest,
+    /// the user's query collided with an in-flight request and needs to be retried
+    Conflict,
+    ///  - ErrorKind::Other - when something so weird happens that it becomes undescribable.
+    Other,
+}
+
+/// Enum representing settings that might be stored in the site's database.
+#[derive(Deserialize, Serialize, Debug, Clone, Copy)]
+#[serde(rename_all = "snake_case")]
+pub enum Settings {
+    /// The name of the website -- displayed in the header and the browser titlebar.
+    SiteName,
+}
+
+impl std::string::ToString for Settings {
+    fn to_string(&self) -> String {
+        serde_variant::to_variant_name(self).unwrap().to_string()
+    }
+}
+
+/// Error signalled from the database.
+#[derive(Debug)]
+pub struct StorageError {
+    msg: String,
+    source: Option<Box<dyn std::error::Error + Send + Sync>>,
+    kind: ErrorKind,
+}
+
+impl warp::reject::Reject for StorageError {}
+
+impl std::error::Error for StorageError {
+    fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+        self.source
+            .as_ref()
+            .map(|e| e.as_ref() as &dyn std::error::Error)
+    }
+}
+impl From<serde_json::Error> for StorageError {
+    fn from(err: serde_json::Error) -> Self {
+        Self {
+            msg: format!("{}", err),
+            source: Some(Box::new(err)),
+            kind: ErrorKind::JsonParsing,
+        }
+    }
+}
+impl std::fmt::Display for StorageError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match match self.kind {
+            ErrorKind::Backend => write!(f, "backend error: "),
+            ErrorKind::JsonParsing => write!(f, "error while parsing JSON: "),
+            ErrorKind::PermissionDenied => write!(f, "permission denied: "),
+            ErrorKind::NotFound => write!(f, "not found: "),
+            ErrorKind::BadRequest => write!(f, "bad request: "),
+            ErrorKind::Conflict => write!(f, "conflict with an in-flight request or existing data: "),
+            ErrorKind::Other => write!(f, "generic storage layer error: "),
+        } {
+            Ok(_) => write!(f, "{}", self.msg),
+            Err(err) => Err(err),
+        }
+    }
+}
+impl serde::Serialize for StorageError {
+    fn serialize<S: serde::Serializer>(
+        &self,
+        serializer: S,
+    ) -> std::result::Result<S::Ok, S::Error> {
+        serializer.serialize_str(&self.to_string())
+    }
+}
+impl StorageError {
+    /// Create a new StorageError of an ErrorKind with a message.
+    fn new(kind: ErrorKind, msg: &str) -> Self {
+        Self {
+            msg: msg.to_string(),
+            source: None,
+            kind,
+        }
+    }
+    /// Create a StorageError using another arbitrary Error as a source.
+    fn with_source(
+        kind: ErrorKind,
+        msg: &str,
+        source: Box<dyn std::error::Error + Send + Sync>,
+    ) -> Self {
+        Self {
+            msg: msg.to_string(),
+            source: Some(source),
+            kind,
+        }
+    }
+    /// Get the kind of an error.
+    pub fn kind(&self) -> ErrorKind {
+        self.kind
+    }
+    /// Get the message as a string slice.
+    pub fn msg(&self) -> &str {
+        &self.msg
+    }
+}
+
+/// A special Result type for the Micropub backing storage.
+pub type Result<T> = std::result::Result<T, StorageError>;
+
+/// Filter the post according to the value of `user`.
+///
+/// Anonymous users cannot view private posts and protected locations;
+/// Logged-in users can only view private posts targeted at them;
+/// Logged-in users can't view private location data
+pub fn filter_post(
+    mut post: serde_json::Value,
+    user: &'_ Option<String>,
+) -> Option<serde_json::Value> {
+    if post["properties"]["deleted"][0].is_string() {
+        return Some(serde_json::json!({
+            "type": post["type"],
+            "properties": {
+                "deleted": post["properties"]["deleted"]
+            }
+        }));
+    }
+    let empty_vec: Vec<serde_json::Value> = vec![];
+    let author = post["properties"]["author"]
+        .as_array()
+        .unwrap_or(&empty_vec)
+        .iter()
+        .map(|i| i.as_str().unwrap().to_string());
+    let visibility = post["properties"]["visibility"][0]
+        .as_str()
+        .unwrap_or("public");
+    let mut audience = author.chain(
+        post["properties"]["audience"]
+            .as_array()
+            .unwrap_or(&empty_vec)
+            .iter()
+            .map(|i| i.as_str().unwrap().to_string()),
+    );
+    if (visibility == "private" && !audience.any(|i| Some(i) == *user))
+        || (visibility == "protected" && user.is_none())
+    {
+        return None;
+    }
+    if post["properties"]["location"].is_array() {
+        let location_visibility = post["properties"]["location-visibility"][0]
+            .as_str()
+            .unwrap_or("private");
+        let mut author = post["properties"]["author"]
+            .as_array()
+            .unwrap_or(&empty_vec)
+            .iter()
+            .map(|i| i.as_str().unwrap().to_string());
+        if (location_visibility == "private" && !author.any(|i| Some(i) == *user))
+            || (location_visibility == "protected" && user.is_none())
+        {
+            post["properties"]
+                .as_object_mut()
+                .unwrap()
+                .remove("location");
+        }
+    }
+    Some(post)
+}
+
+/// A storage backend for the Micropub server.
+///
+/// Implementations should note that all methods listed on this trait MUST be fully atomic
+/// or lock the database so that write conflicts or reading half-written data should not occur.
+#[async_trait]
+pub trait Storage: std::fmt::Debug + Clone + Send + Sync {
+    /// Check if a post exists in the database.
+    async fn post_exists(&self, url: &str) -> Result<bool>;
+
+    /// Load a post from the database in MF2-JSON format, deserialized from JSON.
+    async fn get_post(&self, url: &str) -> Result<Option<serde_json::Value>>;
+
+    /// Save a post to the database as an MF2-JSON structure.
+    ///
+    /// Note that the `post` object MUST have `post["properties"]["uid"][0]` defined.
+    async fn put_post(&self, post: &'_ serde_json::Value, user: &'_ str) -> Result<()>;
+
+    /// Modify a post using an update object as defined in the Micropub spec.
+    ///
+    /// Note to implementors: the update operation MUST be atomic and
+    /// SHOULD lock the database to prevent two clients overwriting
+    /// each other's changes or simply corrupting something. Rejecting
+    /// is allowed in case of concurrent updates if waiting for a lock
+    /// cannot be done.
+    async fn update_post(&self, url: &'_ str, update: serde_json::Value) -> Result<()>;
+
+    /// Get a list of channels available for the user represented by the URL `user` to write to.
+    async fn get_channels(&self, user: &'_ str) -> Result<Vec<MicropubChannel>>;
+
+    /// Fetch a feed at `url` and return a an h-feed object containing
+    /// `limit` posts after a post by url `after`, filtering the content
+    /// in context of a user specified by `user` (or an anonymous user).
+    ///
+    /// Specifically, private posts that don't include the user in the audience
+    /// will be elided from the feed, and the posts containing location and not
+    /// specifying post["properties"]["location-visibility"][0] == "public"
+    /// will have their location data (but not check-in data) stripped.
+    ///
+    /// This function is used as an optimization so the client, whatever it is,
+    /// doesn't have to fetch posts, then realize some of them are private, and
+    /// fetch more posts.
+    ///
+    /// Note for implementors: if you use streams to fetch posts in parallel
+    /// from the database, preferably make this method use a connection pool
+    /// to reduce overhead of creating a database connection per post for
+    /// parallel fetching.
+    async fn read_feed_with_limit(
+        &self,
+        url: &'_ str,
+        after: &'_ Option<String>,
+        limit: usize,
+        user: &'_ Option<String>,
+    ) -> Result<Option<serde_json::Value>>;
+
+    /// Deletes a post from the database irreversibly. 'nuff said. Must be idempotent.
+    async fn delete_post(&self, url: &'_ str) -> Result<()>;
+
+    /// Gets a setting from the setting store and passes the result.
+    async fn get_setting(&self, setting: Settings, user: &'_ str) -> Result<String>;
+
+    /// Commits a setting to the setting store.
+    async fn set_setting(&self, setting: Settings, user: &'_ str, value: &'_ str) -> Result<()>;
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{MicropubChannel, Storage};
+    use serde_json::json;
+
+    async fn test_basic_operations<Backend: Storage>(backend: Backend) {
+        let post: serde_json::Value = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": ["Test content"],
+                "author": ["https://fireburn.ru/"],
+                "uid": ["https://fireburn.ru/posts/hello"],
+                "url": ["https://fireburn.ru/posts/hello", "https://fireburn.ru/posts/test"]
+            }
+        });
+        let key = post["properties"]["uid"][0].as_str().unwrap().to_string();
+        let alt_url = post["properties"]["url"][1].as_str().unwrap().to_string();
+
+        // Reading and writing
+        backend
+            .put_post(&post, "https://fireburn.ru/")
+            .await
+            .unwrap();
+        if let Some(returned_post) = backend.get_post(&key).await.unwrap() {
+            assert!(returned_post.is_object());
+            assert_eq!(
+                returned_post["type"].as_array().unwrap().len(),
+                post["type"].as_array().unwrap().len()
+            );
+            assert_eq!(
+                returned_post["type"].as_array().unwrap(),
+                post["type"].as_array().unwrap()
+            );
+            let props: &serde_json::Map<String, serde_json::Value> =
+                post["properties"].as_object().unwrap();
+            for key in props.keys() {
+                assert_eq!(
+                    returned_post["properties"][key].as_array().unwrap(),
+                    post["properties"][key].as_array().unwrap()
+                )
+            }
+        } else {
+            panic!("For some reason the backend did not return the post.")
+        }
+        // Check the alternative URL - it should return the same post
+        if let Ok(Some(returned_post)) = backend.get_post(&alt_url).await {
+            assert!(returned_post.is_object());
+            assert_eq!(
+                returned_post["type"].as_array().unwrap().len(),
+                post["type"].as_array().unwrap().len()
+            );
+            assert_eq!(
+                returned_post["type"].as_array().unwrap(),
+                post["type"].as_array().unwrap()
+            );
+            let props: &serde_json::Map<String, serde_json::Value> =
+                post["properties"].as_object().unwrap();
+            for key in props.keys() {
+                assert_eq!(
+                    returned_post["properties"][key].as_array().unwrap(),
+                    post["properties"][key].as_array().unwrap()
+                )
+            }
+        } else {
+            panic!("For some reason the backend did not return the post.")
+        }
+    }
+
+    /// Note: this is merely a smoke check and is in no way comprehensive.
+    // TODO updates for feeds must update children using special logic
+    async fn test_update<Backend: Storage>(backend: Backend) {
+        let post: serde_json::Value = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": ["Test content"],
+                "author": ["https://fireburn.ru/"],
+                "uid": ["https://fireburn.ru/posts/hello"],
+                "url": ["https://fireburn.ru/posts/hello", "https://fireburn.ru/posts/test"]
+            }
+        });
+        let key = post["properties"]["uid"][0].as_str().unwrap().to_string();
+
+        // Reading and writing
+        backend
+            .put_post(&post, "https://fireburn.ru/")
+            .await
+            .unwrap();
+
+        backend
+            .update_post(
+                &key,
+                json!({
+                    "url": &key,
+                    "add": {
+                        "category": ["testing"],
+                    },
+                    "replace": {
+                        "content": ["Different test content"]
+                    }
+                }),
+            )
+            .await
+            .unwrap();
+
+        match backend.get_post(&key).await {
+            Ok(Some(returned_post)) => {
+                assert!(returned_post.is_object());
+                assert_eq!(
+                    returned_post["type"].as_array().unwrap().len(),
+                    post["type"].as_array().unwrap().len()
+                );
+                assert_eq!(
+                    returned_post["type"].as_array().unwrap(),
+                    post["type"].as_array().unwrap()
+                );
+                assert_eq!(
+                    returned_post["properties"]["content"][0].as_str().unwrap(),
+                    "Different test content"
+                );
+                assert_eq!(
+                    returned_post["properties"]["category"].as_array().unwrap(),
+                    &vec![json!("testing")]
+                );
+            },
+            something_else => {
+                something_else.expect("Shouldn't error").expect("Should have the post");
+            }
+        }
+    }
+
+    async fn test_get_channel_list<Backend: Storage>(backend: Backend) {
+        let feed = json!({
+            "type": ["h-feed"],
+            "properties": {
+                "name": ["Main Page"],
+                "author": ["https://fireburn.ru/"],
+                "uid": ["https://fireburn.ru/feeds/main"]
+            },
+            "children": []
+        });
+        backend
+            .put_post(&feed, "https://fireburn.ru/")
+            .await
+            .unwrap();
+        let chans = backend.get_channels("https://fireburn.ru/").await.unwrap();
+        assert_eq!(chans.len(), 1);
+        assert_eq!(
+            chans[0],
+            MicropubChannel {
+                uid: "https://fireburn.ru/feeds/main".to_string(),
+                name: "Main Page".to_string()
+            }
+        );
+    }
+
+    async fn test_settings<Backend: Storage>(backend: Backend) {
+        backend
+            .set_setting(crate::database::Settings::SiteName, "https://fireburn.ru/", "Vika's Hideout")
+            .await
+            .unwrap();
+        assert_eq!(
+            backend
+                .get_setting(crate::database::Settings::SiteName, "https://fireburn.ru/")
+                .await
+                .unwrap(),
+            "Vika's Hideout"
+        );
+    }
+
+    fn gen_random_post(domain: &str) -> serde_json::Value {
+        use faker_rand::lorem::{Paragraphs, Word};
+
+        let uid = format!(
+            "https://{domain}/posts/{}-{}-{}",
+            rand::random::<Word>(), rand::random::<Word>(), rand::random::<Word>()
+        );
+
+        let post = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": [rand::random::<Paragraphs>().to_string()],
+                "uid": [&uid],
+                "url": [&uid]
+            }
+        });
+
+        post
+    }
+
+    async fn test_feed_pagination<Backend: Storage>(backend: Backend) {
+        let posts = std::iter::from_fn(|| Some(gen_random_post("fireburn.ru")))
+            .take(20)
+            .collect::<Vec<serde_json::Value>>();
+
+        let feed = json!({
+            "type": ["h-feed"],
+            "properties": {
+                "name": ["Main Page"],
+                "author": ["https://fireburn.ru/"],
+                "uid": ["https://fireburn.ru/feeds/main"]
+            },
+            "children": posts.iter()
+                .filter_map(|json| json["properties"]["uid"][0].as_str())
+                .collect::<Vec<&str>>()
+        });
+        let key = feed["properties"]["uid"][0].as_str().unwrap();
+
+        backend
+            .put_post(&feed, "https://fireburn.ru/")
+            .await
+            .unwrap();
+        println!("---");
+        for (i, post) in posts.iter().enumerate() {
+            backend.put_post(post, "https://fireburn.ru/").await.unwrap();
+            println!("posts[{}] = {}", i, post["properties"]["uid"][0]);
+        }
+        println!("---");
+        let limit: usize = 10;
+        let result = backend.read_feed_with_limit(key, &None, limit, &None)
+            .await
+            .unwrap()
+            .unwrap();
+        for (i, post) in result["children"].as_array().unwrap().iter().enumerate() {
+            println!("feed[0][{}] = {}", i, post["properties"]["uid"][0]);
+        }
+        println!("---");
+        assert_eq!(result["children"].as_array().unwrap()[0..10], posts[0..10]);
+
+        let result2 = backend.read_feed_with_limit(
+            key,
+            &result["children"]
+                .as_array()
+                .unwrap()
+                .last()
+                .unwrap()
+                ["properties"]["uid"][0]
+                .as_str()
+                .map(|i| i.to_owned()),
+            limit, &None
+        ).await.unwrap().unwrap();
+        for (i, post) in result2["children"].as_array().unwrap().iter().enumerate() {
+            println!("feed[1][{}] = {}", i, post["properties"]["uid"][0]);
+        }
+        println!("---");
+        assert_eq!(result2["children"].as_array().unwrap()[0..10], posts[10..20]);
+
+        // Regression test for #4
+        let nonsense_after = Some("1010101010".to_owned());
+        let result3 = tokio::time::timeout(tokio::time::Duration::from_secs(10), async move {
+            backend.read_feed_with_limit(
+                key, &nonsense_after, limit, &None
+            ).await.unwrap().unwrap()
+        }).await.expect("Operation should not hang: see https://gitlab.com/kittybox/kittybox/-/issues/4");
+        assert!(result3["children"].as_array().unwrap().is_empty());
+    }
+
+    /// Automatically generates a test suite for
+    macro_rules! test_all {
+        ($func_name:ident, $mod_name:ident) => {
+            mod $mod_name {
+                $func_name!(test_basic_operations);
+                $func_name!(test_get_channel_list);
+                $func_name!(test_settings);
+                $func_name!(test_update);
+                $func_name!(test_feed_pagination);
+            }
+        }
+    }
+    macro_rules! file_test {
+        ($func_name:ident) => {
+            #[tokio::test]
+            async fn $func_name () {
+                test_logger::ensure_env_logger_initialized();
+                let tempdir = tempdir::TempDir::new("file").expect("Failed to create tempdir");
+                let backend = super::super::FileStorage::new(tempdir.into_path()).await.unwrap();
+                super::$func_name(backend).await
+            }
+        };
+    }
+
+    test_all!(file_test, file);
+
+}
diff --git a/kittybox-rs/src/database/redis/edit_post.lua b/kittybox-rs/src/database/redis/edit_post.lua
new file mode 100644
index 0000000..a398f8d
--- /dev/null
+++ b/kittybox-rs/src/database/redis/edit_post.lua
@@ -0,0 +1,93 @@
+local posts = KEYS[1]
+local update_desc = cjson.decode(ARGV[2])
+local post = cjson.decode(redis.call("HGET", posts, ARGV[1]))
+
+local delete_keys = {}
+local delete_kvs = {}
+local add_keys = {}
+
+if update_desc.replace ~= nil then
+    for k, v in pairs(update_desc.replace) do
+        table.insert(delete_keys, k)
+        add_keys[k] = v
+    end
+end
+if update_desc.delete ~= nil then
+    if update_desc.delete[0] == nil then
+        -- Table has string keys. Probably!
+        for k, v in pairs(update_desc.delete) do
+            delete_kvs[k] = v
+        end
+    else
+        -- Table has numeric keys. Probably!
+        for i, v in ipairs(update_desc.delete) do
+            table.insert(delete_keys, v)
+        end
+    end
+end
+if update_desc.add ~= nil then
+    for k, v in pairs(update_desc.add) do
+        add_keys[k] = v
+    end
+end
+
+for i, v in ipairs(delete_keys) do
+    post["properties"][v] = nil
+    -- TODO delete URL links
+end
+
+for k, v in pairs(delete_kvs) do
+    local index = -1
+    if k == "children" then
+        for j, w in ipairs(post[k]) do
+            if w == v then
+                index = j
+                break
+            end
+        end
+        if index > -1 then
+            table.remove(post[k], index)
+        end
+    else
+        for j, w in ipairs(post["properties"][k]) do
+            if w == v then
+                index = j
+                break
+            end
+        end
+        if index > -1 then
+            table.remove(post["properties"][k], index)
+            -- TODO delete URL links
+        end
+    end
+end
+
+for k, v in pairs(add_keys) do
+    if k == "children" then
+        if post["children"] == nil then
+            post["children"] = {}
+        end
+        for i, w in ipairs(v) do
+            table.insert(post["children"], 1, w)
+        end
+    else
+        if post["properties"][k] == nil then
+            post["properties"][k] = {}
+        end
+        for i, w in ipairs(v) do
+            table.insert(post["properties"][k], w)
+        end
+        if k == "url" then
+            redis.call("HSET", posts, v, cjson.encode({ see_other = post["properties"]["uid"][1] }))
+        elseif k == "channel" then
+            local feed = cjson.decode(redis.call("HGET", posts, v))
+            table.insert(feed["children"], 1, post["properties"]["uid"][1])
+            redis.call("HSET", posts, v, cjson.encode(feed))
+        end
+    end
+end
+
+local encoded = cjson.encode(post)
+redis.call("SET", "debug", encoded)
+redis.call("HSET", posts, post["properties"]["uid"][1], encoded)
+return
\ No newline at end of file
diff --git a/kittybox-rs/src/database/redis/mod.rs b/kittybox-rs/src/database/redis/mod.rs
new file mode 100644
index 0000000..eeaa3f2
--- /dev/null
+++ b/kittybox-rs/src/database/redis/mod.rs
@@ -0,0 +1,392 @@
+use async_trait::async_trait;
+use futures::stream;
+use futures_util::FutureExt;
+use futures_util::StreamExt;
+use futures_util::TryStream;
+use futures_util::TryStreamExt;
+use lazy_static::lazy_static;
+use log::error;
+use mobc::Pool;
+use mobc_redis::redis;
+use mobc_redis::redis::AsyncCommands;
+use mobc_redis::RedisConnectionManager;
+use serde_json::json;
+use std::time::Duration;
+
+use crate::database::{ErrorKind, MicropubChannel, Result, Storage, StorageError, filter_post};
+use crate::indieauth::User;
+
+struct RedisScripts {
+    edit_post: redis::Script,
+}
+
+impl From<mobc_redis::redis::RedisError> for StorageError {
+    fn from(err: mobc_redis::redis::RedisError) -> Self {
+        Self {
+            msg: format!("{}", err),
+            source: Some(Box::new(err)),
+            kind: ErrorKind::Backend,
+        }
+    }
+}
+impl From<mobc::Error<mobc_redis::redis::RedisError>> for StorageError {
+    fn from(err: mobc::Error<mobc_redis::redis::RedisError>) -> Self {
+        Self {
+            msg: format!("{}", err),
+            source: Some(Box::new(err)),
+            kind: ErrorKind::Backend,
+        }
+    }
+}
+
+lazy_static! {
+    static ref SCRIPTS: RedisScripts = RedisScripts {
+        edit_post: redis::Script::new(include_str!("./edit_post.lua"))
+    };
+}
+
+#[derive(Clone)]
+pub struct RedisStorage {
+    // note to future Vika:
+    // mobc::Pool is actually a fancy name for an Arc
+    // around a shared connection pool with a manager
+    // which makes it safe to implement [`Clone`] and
+    // not worry about new pools being suddenly made
+    //
+    // stop worrying and start coding, you dum-dum
+    redis: mobc::Pool<RedisConnectionManager>,
+}
+
+#[async_trait]
+impl Storage for RedisStorage {
+    async fn get_setting<'a>(&self, setting: &'a str, user: &'a str) -> Result<String> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        Ok(conn
+            .hget::<String, &str, String>(format!("settings_{}", user), setting)
+            .await?)
+    }
+
+    async fn set_setting<'a>(&self, setting: &'a str, user: &'a str, value: &'a str) -> Result<()> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        Ok(conn
+            .hset::<String, &str, &str, ()>(format!("settings_{}", user), setting, value)
+            .await?)
+    }
+
+    async fn delete_post<'a>(&self, url: &'a str) -> Result<()> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        Ok(conn.hdel::<&str, &str, ()>("posts", url).await?)
+    }
+
+    async fn post_exists(&self, url: &str) -> Result<bool> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        Ok(conn.hexists::<&str, &str, bool>("posts", url).await?)
+    }
+
+    async fn get_post(&self, url: &str) -> Result<Option<serde_json::Value>> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        match conn
+            .hget::<&str, &str, Option<String>>("posts", url)
+            .await?
+        {
+            Some(val) => {
+                let parsed = serde_json::from_str::<serde_json::Value>(&val)?;
+                if let Some(new_url) = parsed["see_other"].as_str() {
+                    match conn
+                        .hget::<&str, &str, Option<String>>("posts", new_url)
+                        .await?
+                    {
+                        Some(val) => Ok(Some(serde_json::from_str::<serde_json::Value>(&val)?)),
+                        None => Ok(None),
+                    }
+                } else {
+                    Ok(Some(parsed))
+                }
+            }
+            None => Ok(None),
+        }
+    }
+
+    async fn get_channels(&self, user: &User) -> Result<Vec<MicropubChannel>> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        let channels = conn
+            .smembers::<String, Vec<String>>("channels_".to_string() + user.me.as_str())
+            .await?;
+        // TODO: use streams here instead of this weird thing... how did I even write this?!
+        Ok(futures_util::future::join_all(
+            channels
+                .iter()
+                .map(|channel| {
+                    self.get_post(channel).map(|result| result.unwrap()).map(
+                        |post: Option<serde_json::Value>| {
+                            post.map(|post| MicropubChannel {
+                                uid: post["properties"]["uid"][0].as_str().unwrap().to_string(),
+                                name: post["properties"]["name"][0].as_str().unwrap().to_string(),
+                            })
+                        },
+                    )
+                })
+                .collect::<Vec<_>>(),
+        )
+        .await
+        .into_iter()
+        .flatten()
+        .collect::<Vec<_>>())
+    }
+
+    async fn put_post<'a>(&self, post: &'a serde_json::Value, user: &'a str) -> Result<()> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        let key: &str;
+        match post["properties"]["uid"][0].as_str() {
+            Some(uid) => key = uid,
+            None => {
+                return Err(StorageError::new(
+                    ErrorKind::BadRequest,
+                    "post doesn't have a UID",
+                ))
+            }
+        }
+        conn.hset::<&str, &str, String, ()>("posts", key, post.to_string())
+            .await?;
+        if post["properties"]["url"].is_array() {
+            for url in post["properties"]["url"]
+                .as_array()
+                .unwrap()
+                .iter()
+                .map(|i| i.as_str().unwrap().to_string())
+            {
+                if url != key && url.starts_with(user) {
+                    conn.hset::<&str, &str, String, ()>(
+                        "posts",
+                        &url,
+                        json!({ "see_other": key }).to_string(),
+                    )
+                    .await?;
+                }
+            }
+        }
+        if post["type"]
+            .as_array()
+            .unwrap()
+            .iter()
+            .any(|i| i == "h-feed")
+        {
+            // This is a feed. Add it to the channels array if it's not already there.
+            conn.sadd::<String, &str, ()>(
+                "channels_".to_string() + post["properties"]["author"][0].as_str().unwrap(),
+                key,
+            )
+            .await?
+        }
+        Ok(())
+    }
+
+    async fn read_feed_with_limit<'a>(
+        &self,
+        url: &'a str,
+        after: &'a Option<String>,
+        limit: usize,
+        user: &'a Option<String>,
+    ) -> Result<Option<serde_json::Value>> {
+        let mut conn = self.redis.get().await?;
+        let mut feed;
+        match conn
+            .hget::<&str, &str, Option<String>>("posts", url)
+            .await
+            .map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?
+        {
+            Some(post) => feed = serde_json::from_str::<serde_json::Value>(&post)?,
+            None => return Ok(None),
+        }
+        if feed["see_other"].is_string() {
+            match conn
+                .hget::<&str, &str, Option<String>>("posts", feed["see_other"].as_str().unwrap())
+                .await?
+            {
+                Some(post) => feed = serde_json::from_str::<serde_json::Value>(&post)?,
+                None => return Ok(None),
+            }
+        }
+        if let Some(post) = filter_post(feed, user) {
+            feed = post
+        } else {
+            return Err(StorageError::new(
+                ErrorKind::PermissionDenied,
+                "specified user cannot access this post",
+            ));
+        }
+        if feed["children"].is_array() {
+            let children = feed["children"].as_array().unwrap();
+            let mut posts_iter = children.iter().map(|i| i.as_str().unwrap().to_string());
+            if after.is_some() {
+                loop {
+                    let i = posts_iter.next();
+                    if &i == after {
+                        break;
+                    }
+                }
+            }
+            async fn fetch_post_for_feed(url: String) -> Option<serde_json::Value> {
+                return Some(serde_json::json!({}));
+            }
+            let posts = stream::iter(posts_iter)
+                .map(|url: String| async move {
+                    return Ok(fetch_post_for_feed(url).await);
+                    /*match self.redis.get().await {
+                        Ok(mut conn) => {
+                            match conn.hget::<&str, &str, Option<String>>("posts", &url).await {
+                                Ok(post) => match post {
+                                    Some(post) => {
+                                        Ok(Some(serde_json::from_str(&post)?))
+                                    }
+                                    // Happens because of a broken link (result of an improper deletion?)
+                                    None => Ok(None),
+                                },
+                                Err(err) => Err(StorageError::with_source(ErrorKind::Backend, "Error executing a Redis command", Box::new(err)))
+                            }
+                        }
+                        Err(err) => Err(StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(err)))
+                    }*/
+                })
+                // TODO: determine the optimal value for this buffer
+                // It will probably depend on how often can you encounter a private post on the page
+                // It shouldn't be too large, or we'll start fetching too many posts from the database
+                // It MUST NOT be larger than the typical page size
+                // It MUST NOT be a significant amount of the connection pool size
+                //.buffered(std::cmp::min(3, limit))
+                // Hack to unwrap the Option and sieve out broken links
+                // Broken links return None, and Stream::filter_map skips all Nones.
+                // I wonder if one can use try_flatten() here somehow akin to iters
+                .try_filter_map(|post| async move { Ok(post) })
+                .try_filter_map(|post| async move {
+                    Ok(filter_post(post, user))
+                })
+                .take(limit);
+            match posts.try_collect::<Vec<serde_json::Value>>().await {
+                Ok(posts) => feed["children"] = json!(posts),
+                Err(err) => {
+                    let e = StorageError::with_source(
+                        ErrorKind::Other,
+                        "An error was encountered while processing the feed",
+                        Box::new(err)
+                    );
+                    error!("Error while assembling feed: {}", e);
+                    return Err(e);
+                }
+            }
+        }
+        return Ok(Some(feed));
+    }
+
+    async fn update_post<'a>(&self, mut url: &'a str, update: serde_json::Value) -> Result<()> {
+        let mut conn = self.redis.get().await.map_err(|e| StorageError::with_source(ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)))?;
+        if !conn
+            .hexists::<&str, &str, bool>("posts", url)
+            .await
+            .unwrap()
+        {
+            return Err(StorageError::new(
+                ErrorKind::NotFound,
+                "can't edit a non-existent post",
+            ));
+        }
+        let post: serde_json::Value =
+            serde_json::from_str(&conn.hget::<&str, &str, String>("posts", url).await?)?;
+        if let Some(new_url) = post["see_other"].as_str() {
+            url = new_url
+        }
+        Ok(SCRIPTS
+            .edit_post
+            .key("posts")
+            .arg(url)
+            .arg(update.to_string())
+            .invoke_async::<_, ()>(&mut conn as &mut redis::aio::Connection)
+            .await?)
+    }
+}
+
+impl RedisStorage {
+    /// Create a new RedisDatabase that will connect to Redis at `redis_uri` to store data.
+    pub async fn new(redis_uri: String) -> Result<Self> {
+        match redis::Client::open(redis_uri) {
+            Ok(client) => Ok(Self {
+                redis: Pool::builder()
+                    .max_open(20)
+                    .max_idle(5)
+                    .get_timeout(Some(Duration::from_secs(3)))
+                    .max_lifetime(Some(Duration::from_secs(120)))
+                    .build(RedisConnectionManager::new(client)),
+            }),
+            Err(e) => Err(e.into()),
+        }
+    }
+
+    pub async fn conn(&self) -> Result<mobc::Connection<mobc_redis::RedisConnectionManager>> {
+        self.redis.get().await.map_err(|e| StorageError::with_source(
+            ErrorKind::Backend, "Error getting a connection from the pool", Box::new(e)
+        ))
+    }
+}
+
+#[cfg(test)]
+pub mod tests {
+    use mobc_redis::redis;
+    use std::process;
+    use std::time::Duration;
+
+    pub struct RedisInstance {
+        // We just need to hold on to it so it won't get dropped and remove the socket
+        _tempdir: tempdir::TempDir,
+        uri: String,
+        child: std::process::Child,
+    }
+    impl Drop for RedisInstance {
+        fn drop(&mut self) {
+            self.child.kill().expect("Failed to kill the child!");
+        }
+    }
+    impl RedisInstance {
+        pub fn uri(&self) -> &str {
+            &self.uri
+        }
+    }
+
+    pub async fn get_redis_instance() -> RedisInstance {
+        let tempdir = tempdir::TempDir::new("redis").expect("failed to create tempdir");
+        let socket = tempdir.path().join("redis.sock");
+        let redis_child = process::Command::new("redis-server")
+            .current_dir(&tempdir)
+            .arg("--port")
+            .arg("0")
+            .arg("--unixsocket")
+            .arg(&socket)
+            .stdout(process::Stdio::null())
+            .stderr(process::Stdio::null())
+            .spawn()
+            .expect("Failed to spawn Redis");
+        println!("redis+unix:///{}", socket.to_str().unwrap());
+        let uri = format!("redis+unix:///{}", socket.to_str().unwrap());
+        // There should be a slight delay, we need to wait for Redis to spin up
+        let client = redis::Client::open(uri.clone()).unwrap();
+        let millisecond = Duration::from_millis(1);
+        let mut retries: usize = 0;
+        const MAX_RETRIES: usize = 60 * 1000/*ms*/;
+        while let Err(err) = client.get_connection() {
+            if err.is_connection_refusal() {
+                async_std::task::sleep(millisecond).await;
+                retries += 1;
+                if retries > MAX_RETRIES {
+                    panic!("Timeout waiting for Redis, last error: {}", err);
+                }
+            } else {
+                panic!("Could not connect: {}", err);
+            }
+        }
+
+        RedisInstance {
+            uri,
+            child: redis_child,
+            _tempdir: tempdir,
+        }
+    }
+}
diff --git a/kittybox-rs/src/frontend/login.rs b/kittybox-rs/src/frontend/login.rs
new file mode 100644
index 0000000..9665ce7
--- /dev/null
+++ b/kittybox-rs/src/frontend/login.rs
@@ -0,0 +1,333 @@
+use http_types::Mime;
+use log::{debug, error};
+use rand::Rng;
+use serde::{Deserialize, Serialize};
+use sha2::{Digest, Sha256};
+use std::convert::TryInto;
+use std::str::FromStr;
+
+use crate::frontend::templates::Template;
+use crate::frontend::{FrontendError, IndiewebEndpoints};
+use crate::{database::Storage, ApplicationState};
+use kittybox_templates::LoginPage;
+
+pub async fn form<S: Storage>(req: Request<ApplicationState<S>>) -> Result {
+    let owner = req.url().origin().ascii_serialization() + "/";
+    let storage = &req.state().storage;
+    let authorization_endpoint = req.state().authorization_endpoint.to_string();
+    let token_endpoint = req.state().token_endpoint.to_string();
+    let blog_name = storage
+        .get_setting("site_name", &owner)
+        .await
+        .unwrap_or_else(|_| "Kitty Box!".to_string());
+    let feeds = storage.get_channels(&owner).await.unwrap_or_default();
+
+    Ok(Response::builder(200)
+        .body(
+            Template {
+                title: "Sign in with IndieAuth",
+                blog_name: &blog_name,
+                endpoints: IndiewebEndpoints {
+                    authorization_endpoint,
+                    token_endpoint,
+                    webmention: None,
+                    microsub: None,
+                },
+                feeds,
+                user: req.session().get("user"),
+                content: LoginPage {}.to_string(),
+            }
+            .to_string(),
+        )
+        .content_type("text/html; charset=utf-8")
+        .build())
+}
+
+#[derive(Serialize, Deserialize)]
+struct LoginForm {
+    url: String,
+}
+
+#[derive(Serialize, Deserialize)]
+struct IndieAuthClientState {
+    /// A random value to protect from CSRF attacks.
+    nonce: String,
+    /// The user's initial "me" value.
+    me: String,
+    /// Authorization endpoint used.
+    authorization_endpoint: String,
+}
+
+#[derive(Serialize, Deserialize)]
+struct IndieAuthRequestParams {
+    response_type: String,         // can only have "code". TODO make an enum
+    client_id: String,             // always a URL. TODO consider making a URL
+    redirect_uri: surf::Url,       // callback URI for IndieAuth
+    state: String, // CSRF protection, should include randomness and be passed through
+    code_challenge: String, // base64-encoded PKCE challenge
+    code_challenge_method: String, // usually "S256". TODO make an enum
+    scope: Option<String>, // oAuth2 scopes to grant,
+    me: surf::Url, // User's entered profile URL
+}
+
+/// Handle login requests. Find the IndieAuth authorization endpoint and redirect to it.
+pub async fn handler<S: Storage>(mut req: Request<ApplicationState<S>>) -> Result {
+    let content_type = req.content_type();
+    if content_type.is_none() {
+        return Err(FrontendError::with_code(400, "Use the login form, Luke.").into());
+    }
+    if content_type.unwrap() != Mime::from_str("application/x-www-form-urlencoded").unwrap() {
+        return Err(
+            FrontendError::with_code(400, "Login form results must be a urlencoded form").into(),
+        );
+    }
+
+    let form = req.body_form::<LoginForm>().await?; // FIXME check if it returns 400 or 500 on error
+    let homepage_uri = surf::Url::parse(&form.url)?;
+    let http = &req.state().http_client;
+
+    let mut fetch_response = http.get(&homepage_uri).send().await?;
+    if fetch_response.status() != 200 {
+        return Err(FrontendError::with_code(
+            500,
+            "Error fetching your authorization endpoint. Check if your website's okay.",
+        )
+        .into());
+    }
+
+    let mut authorization_endpoint: Option<surf::Url> = None;
+    if let Some(links) = fetch_response.header("Link") {
+        // NOTE: this is the same Link header parser used in src/micropub/post.rs:459.
+        // One should refactor it to a function to use independently and improve later
+        for link in links.iter().flat_map(|i| i.as_str().split(',')) {
+            debug!("Trying to match {} as authorization_endpoint", link);
+            let mut split_link = link.split(';');
+
+            match split_link.next() {
+                Some(uri) => {
+                    if let Some(uri) = uri.strip_prefix('<').and_then(|uri| uri.strip_suffix('>')) {
+                        debug!("uri: {}", uri);
+                        for prop in split_link {
+                            debug!("prop: {}", prop);
+                            let lowercased = prop.to_ascii_lowercase();
+                            let trimmed = lowercased.trim();
+                            if trimmed == "rel=\"authorization_endpoint\""
+                                || trimmed == "rel=authorization_endpoint"
+                            {
+                                if let Ok(endpoint) = homepage_uri.join(uri) {
+                                    debug!(
+                                        "Found authorization endpoint {} for user {}",
+                                        endpoint,
+                                        homepage_uri.as_str()
+                                    );
+                                    authorization_endpoint = Some(endpoint);
+                                    break;
+                                }
+                            }
+                        }
+                    }
+                }
+                None => continue,
+            }
+        }
+    }
+    // If the authorization_endpoint is still not found after the Link parsing gauntlet,
+    // bring out the big guns and parse HTML to find it.
+    if authorization_endpoint.is_none() {
+        let body = fetch_response.body_string().await?;
+        let pattern =
+            easy_scraper::Pattern::new(r#"<link rel="authorization_endpoint" href="{{url}}">"#)
+                .expect("Cannot parse the pattern for authorization_endpoint");
+        let matches = pattern.matches(&body);
+        debug!("Matches for authorization_endpoint in HTML: {:?}", matches);
+        if !matches.is_empty() {
+            if let Ok(endpoint) = homepage_uri.join(&matches[0]["url"]) {
+                debug!(
+                    "Found authorization endpoint {} for user {}",
+                    endpoint,
+                    homepage_uri.as_str()
+                );
+                authorization_endpoint = Some(endpoint)
+            }
+        }
+    };
+    // If even after this the authorization endpoint is still not found, bail out.
+    if authorization_endpoint.is_none() {
+        error!(
+            "Couldn't find authorization_endpoint for {}",
+            homepage_uri.as_str()
+        );
+        return Err(FrontendError::with_code(
+            400,
+            "Your website doesn't support the IndieAuth protocol.",
+        )
+        .into());
+    }
+    let mut authorization_endpoint: surf::Url = authorization_endpoint.unwrap();
+    let mut rng = rand::thread_rng();
+    let state: String = data_encoding::BASE64URL.encode(
+        serde_urlencoded::to_string(IndieAuthClientState {
+            nonce: (0..8)
+                .map(|_| {
+                    let idx = rng.gen_range(0..INDIEAUTH_PKCE_CHARSET.len());
+                    INDIEAUTH_PKCE_CHARSET[idx] as char
+                })
+                .collect(),
+            me: homepage_uri.to_string(),
+            authorization_endpoint: authorization_endpoint.to_string(),
+        })?
+        .as_bytes(),
+    );
+    // PKCE code generation
+    let code_verifier: String = (0..128)
+        .map(|_| {
+            let idx = rng.gen_range(0..INDIEAUTH_PKCE_CHARSET.len());
+            INDIEAUTH_PKCE_CHARSET[idx] as char
+        })
+        .collect();
+    let mut hasher = Sha256::new();
+    hasher.update(code_verifier.as_bytes());
+    let code_challenge: String = data_encoding::BASE64URL.encode(&hasher.finalize());
+
+    authorization_endpoint.set_query(Some(&serde_urlencoded::to_string(
+        IndieAuthRequestParams {
+            response_type: "code".to_string(),
+            client_id: req.url().origin().ascii_serialization(),
+            redirect_uri: req.url().join("login/callback")?,
+            state: state.clone(),
+            code_challenge,
+            code_challenge_method: "S256".to_string(),
+            scope: Some("profile".to_string()),
+            me: homepage_uri,
+        },
+    )?));
+
+    let cookies = vec![
+        format!(
+            r#"indieauth_state="{}"; Same-Site: None; Secure; Max-Age: 600"#,
+            state
+        ),
+        format!(
+            r#"indieauth_code_verifier="{}"; Same-Site: None; Secure; Max-Age: 600"#,
+            code_verifier
+        ),
+    ];
+
+    let cookie_header = cookies
+        .iter()
+        .map(|i| -> http_types::headers::HeaderValue { (i as &str).try_into().unwrap() })
+        .collect::<Vec<_>>();
+
+    Ok(Response::builder(302)
+        .header("Location", authorization_endpoint.to_string())
+        .header("Set-Cookie", &*cookie_header)
+        .build())
+}
+
+const INDIEAUTH_PKCE_CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
+                                        abcdefghijklmnopqrstuvwxyz\
+                                        1234567890-._~";
+
+#[derive(Deserialize)]
+struct IndieAuthCallbackResponse {
+    code: Option<String>,
+    error: Option<String>,
+    error_description: Option<String>,
+    #[allow(dead_code)]
+    error_uri: Option<String>,
+    // This needs to be further decoded to receive state back and will always be present
+    state: String,
+}
+
+impl IndieAuthCallbackResponse {
+    fn is_successful(&self) -> bool {
+        self.code.is_some()
+    }
+}
+
+#[derive(Serialize, Deserialize)]
+struct IndieAuthCodeRedeem {
+    grant_type: String,
+    code: String,
+    client_id: String,
+    redirect_uri: String,
+    code_verifier: String,
+}
+
+#[derive(Serialize, Deserialize)]
+struct IndieWebProfile {
+    name: Option<String>,
+    url: Option<String>,
+    email: Option<String>,
+    photo: Option<String>,
+}
+
+#[derive(Serialize, Deserialize)]
+struct IndieAuthResponse {
+    me: String,
+    scope: Option<String>,
+    access_token: Option<String>,
+    token_type: Option<String>,
+    profile: Option<IndieWebProfile>,
+}
+
+/// Handle IndieAuth parameters, fetch the final h-card and redirect the user to the homepage.
+pub async fn callback<S: Storage>(mut req: Request<ApplicationState<S>>) -> Result {
+    let params: IndieAuthCallbackResponse = req.query()?;
+    let http: &surf::Client = &req.state().http_client;
+    let origin = req.url().origin().ascii_serialization();
+
+    if req.cookie("indieauth_state").unwrap().value() != params.state {
+        return Err(FrontendError::with_code(400, "The state doesn't match. A possible CSRF attack was prevented. Please try again later.").into());
+    }
+    let state: IndieAuthClientState =
+        serde_urlencoded::from_bytes(&data_encoding::BASE64URL.decode(params.state.as_bytes())?)?;
+
+    if !params.is_successful() {
+        return Err(FrontendError::with_code(
+            400,
+            &format!(
+                "The authorization endpoint indicated a following error: {:?}: {:?}",
+                &params.error, &params.error_description
+            ),
+        )
+        .into());
+    }
+
+    let authorization_endpoint = surf::Url::parse(&state.authorization_endpoint).unwrap();
+    let mut code_response = http
+        .post(authorization_endpoint)
+        .body_string(serde_urlencoded::to_string(IndieAuthCodeRedeem {
+            grant_type: "authorization_code".to_string(),
+            code: params.code.unwrap().to_string(),
+            client_id: origin.to_string(),
+            redirect_uri: origin + "/login/callback",
+            code_verifier: req
+                .cookie("indieauth_code_verifier")
+                .unwrap()
+                .value()
+                .to_string(),
+        })?)
+        .header("Content-Type", "application/x-www-form-urlencoded")
+        .header("Accept", "application/json")
+        .send()
+        .await?;
+
+    if code_response.status() != 200 {
+        return Err(FrontendError::with_code(
+            code_response.status(),
+            &format!(
+                "Authorization endpoint returned an error when redeeming the code: {}",
+                code_response.body_string().await?
+            ),
+        )
+        .into());
+    }
+
+    let json: IndieAuthResponse = code_response.body_json().await?;
+    let session = req.session_mut();
+    session.insert("user", &json.me)?;
+
+    // TODO redirect to the page user came from
+    Ok(Response::builder(302).header("Location", "/").build())
+}
diff --git a/kittybox-rs/src/frontend/mod.rs b/kittybox-rs/src/frontend/mod.rs
new file mode 100644
index 0000000..b87f9c6
--- /dev/null
+++ b/kittybox-rs/src/frontend/mod.rs
@@ -0,0 +1,459 @@
+use std::convert::TryInto;
+use crate::database::Storage;
+use serde::Deserialize;
+use futures_util::TryFutureExt;
+use warp::{http::StatusCode, Filter, host::Authority, path::FullPath};
+
+//pub mod login;
+
+#[allow(unused_imports)]
+use kittybox_templates::{ErrorPage, MainPage, OnboardingPage, Template, POSTS_PER_PAGE};
+
+pub use kittybox_util::IndiewebEndpoints;
+
+#[derive(Deserialize)]
+struct QueryParams {
+    after: Option<String>,
+}
+
+#[derive(Debug)]
+struct FrontendError {
+    msg: String,
+    source: Option<Box<dyn std::error::Error + Send + Sync + 'static>>,
+    code: StatusCode,
+}
+
+impl FrontendError {
+    pub fn with_code<C>(code: C, msg: &str) -> Self
+    where
+        C: TryInto<StatusCode>,
+    {
+        Self {
+            msg: msg.to_string(),
+            source: None,
+            code: code.try_into().unwrap_or(StatusCode::INTERNAL_SERVER_ERROR),
+        }
+    }
+    pub fn msg(&self) -> &str {
+        &self.msg
+    }
+    pub fn code(&self) -> StatusCode {
+        self.code
+    }
+}
+
+impl From<crate::database::StorageError> for FrontendError {
+    fn from(err: crate::database::StorageError) -> Self {
+        Self {
+            msg: "Database error".to_string(),
+            source: Some(Box::new(err)),
+            code: StatusCode::INTERNAL_SERVER_ERROR,
+        }
+    }
+}
+
+impl std::error::Error for FrontendError {
+    fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+        self.source
+            .as_ref()
+            .map(|e| e.as_ref() as &(dyn std::error::Error + 'static))
+    }
+}
+
+impl std::fmt::Display for FrontendError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        write!(f, "{}", self.msg)
+    }
+}
+
+impl warp::reject::Reject for FrontendError {}
+
+async fn get_post_from_database<S: Storage>(
+    db: &S,
+    url: &str,
+    after: Option<String>,
+    user: &Option<String>,
+) -> std::result::Result<serde_json::Value, FrontendError> {
+    match db
+        .read_feed_with_limit(url, &after, POSTS_PER_PAGE, user)
+        .await
+    {
+        Ok(result) => match result {
+            Some(post) => Ok(post),
+            None => Err(FrontendError::with_code(
+                StatusCode::NOT_FOUND,
+                "Post not found in the database",
+            )),
+        },
+        Err(err) => match err.kind() {
+            crate::database::ErrorKind::PermissionDenied => {
+                // TODO: Authentication
+                if user.is_some() {
+                    Err(FrontendError::with_code(
+                        StatusCode::FORBIDDEN,
+                        "User authenticated AND forbidden to access this resource",
+                    ))
+                } else {
+                    Err(FrontendError::with_code(
+                        StatusCode::UNAUTHORIZED,
+                        "User needs to authenticate themselves",
+                    ))
+                }
+            }
+            _ => Err(err.into()),
+        },
+    }
+}
+
+#[allow(dead_code)]
+#[derive(Deserialize)]
+struct OnboardingFeed {
+    slug: String,
+    name: String,
+}
+
+#[allow(dead_code)]
+#[derive(Deserialize)]
+struct OnboardingData {
+    user: serde_json::Value,
+    first_post: serde_json::Value,
+    #[serde(default = "OnboardingData::default_blog_name")]
+    blog_name: String,
+    feeds: Vec<OnboardingFeed>,
+}
+
+impl OnboardingData {
+    fn default_blog_name() -> String {
+        "Kitty Box!".to_owned()
+    }
+}
+
+/*pub async fn onboarding_receiver<S: Storage>(mut req: Request<ApplicationState<S>>) -> Result {
+    use serde_json::json;
+
+    log::debug!("Entering onboarding receiver...");
+
+    // This cannot error out as the URL must be valid. Or there is something horribly wrong
+    // and we shouldn't serve this request anyway.
+    <dyn AsMut<tide::http::Request>>::as_mut(&mut req)
+        .url_mut()
+        .set_scheme("https")
+        .unwrap();
+
+    log::debug!("Parsing the body...");
+    let body = req.body_json::<OnboardingData>().await?;
+    log::debug!("Body parsed!");
+    let backend = &req.state().storage;
+
+    #[cfg(any(not(debug_assertions), test))]
+    let me = req.url();
+    #[cfg(all(debug_assertions, not(test)))]
+    let me = url::Url::parse("https://localhost:8080/").unwrap();
+
+    log::debug!("me value: {:?}", me);
+
+    if get_post_from_database(backend, me.as_str(), None, &None)
+        .await
+        .is_ok()
+    {
+        return Err(FrontendError::with_code(
+            StatusCode::Forbidden,
+            "Onboarding is over. Are you trying to take over somebody's website?!",
+        )
+        .into());
+    }
+    info!("Onboarding new user: {}", me);
+
+    let user = crate::indieauth::User::new(me.as_str(), "https://kittybox.fireburn.ru/", "create");
+
+    log::debug!("Setting the site name to {}", &body.blog_name);
+    backend
+        .set_setting("site_name", user.me.as_str(), &body.blog_name)
+        .await?;
+
+    if body.user["type"][0] != "h-card" || body.first_post["type"][0] != "h-entry" {
+        return Err(FrontendError::with_code(
+            StatusCode::BadRequest,
+            "user and first_post should be h-card and h-entry",
+        )
+        .into());
+    }
+    info!("Validated body.user and body.first_post as microformats2");
+
+    let mut hcard = body.user;
+    let hentry = body.first_post;
+
+    // Ensure the h-card's UID is set to the main page, so it will be fetchable.
+    hcard["properties"]["uid"] = json!([me.as_str()]);
+    // Normalize the h-card - note that it should preserve the UID we set here.
+    let (_, hcard) = crate::micropub::normalize_mf2(hcard, &user);
+    // The h-card is written directly - all the stuff in the Micropub's
+    // post function is just to ensure that the posts will be syndicated
+    // and inserted into proper feeds. Here, we don't have a need for this,
+    // since the h-card is DIRECTLY accessible via its own URL.
+    log::debug!("Saving the h-card...");
+    backend.put_post(&hcard, me.as_str()).await?;
+
+    log::debug!("Creating feeds...");
+    for feed in body.feeds {
+        if feed.name.is_empty() || feed.slug.is_empty() {
+            continue;
+        };
+        log::debug!("Creating feed {} with slug {}", &feed.name, &feed.slug);
+        let (_, feed) = crate::micropub::normalize_mf2(
+            json!({
+                "type": ["h-feed"],
+                "properties": {"name": [feed.name], "mp-slug": [feed.slug]}
+            }),
+            &user,
+        );
+
+        backend.put_post(&feed, me.as_str()).await?;
+    }
+    log::debug!("Saving the h-entry...");
+    // This basically puts the h-entry post through the normal creation process.
+    // We need to insert it into feeds and optionally send a notification to everywhere.
+    req.set_ext(user);
+    crate::micropub::post::new_post(req, hentry).await?;
+
+    Ok(Response::builder(201).header("Location", "/").build())
+}
+*/
+
+fn request_uri() -> impl Filter<Extract = (String,), Error = warp::Rejection> + Copy {
+    crate::util::require_host()
+        .and(warp::path::full())
+        .map(|host: Authority, path: FullPath| "https://".to_owned() + host.as_str() + path.as_str())
+}
+
+#[forbid(clippy::unwrap_used)]
+pub fn homepage<D: Storage>(db: D, endpoints: IndiewebEndpoints) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
+    let inject_db = move || db.clone();
+    warp::any()
+        .map(inject_db.clone())
+        .and(crate::util::require_host())
+        .and(warp::query())
+        .and_then(|db: D, host: Authority, q: QueryParams| async move {
+            let path = format!("https://{}/", host);
+            let feed_path = format!("https://{}/feeds/main", host);
+
+            match tokio::try_join!(
+                get_post_from_database(&db, &path, None, &None),
+                get_post_from_database(&db, &feed_path, q.after, &None)
+            ) {
+                Ok((hcard, hfeed)) => Ok((
+                    Some(hcard),
+                    Some(hfeed),
+                    StatusCode::OK
+                )),
+                Err(err) => {
+                    if err.code == StatusCode::NOT_FOUND {
+                        // signal for onboarding flow
+                        Ok((None, None, err.code))
+                    } else {
+                        Err(warp::reject::custom(err))
+                    }
+                }
+            }
+        })
+        .and(warp::any().map(move || endpoints.clone()))
+        .and(crate::util::require_host())
+        .and(warp::any().map(inject_db))
+        .then(|content: (Option<serde_json::Value>, Option<serde_json::Value>, StatusCode), endpoints: IndiewebEndpoints, host: Authority, db: D| async move {
+            let owner = format!("https://{}/", host.as_str());
+            let blog_name = db.get_setting(crate::database::Settings::SiteName, &owner).await
+                .unwrap_or_else(|_| "Kitty Box!".to_string());
+            let feeds = db.get_channels(&owner).await.unwrap_or_default();
+            match content {
+                (Some(card), Some(feed), StatusCode::OK) => {
+                    Box::new(warp::reply::html(Template {
+                        title: &blog_name,
+                        blog_name: &blog_name,
+                        endpoints: Some(endpoints),
+                        feeds,
+                        user: None, // TODO
+                        content: MainPage { feed: &feed, card: &card }.to_string()
+                    }.to_string())) as Box<dyn warp::Reply>
+                },
+                (None, None, StatusCode::NOT_FOUND) => {
+                    // TODO Onboarding
+                    Box::new(warp::redirect::found(
+                        hyper::Uri::from_static("/onboarding")
+                    )) as Box<dyn warp::Reply>
+                }
+                _ => unreachable!()
+            }
+        })
+}
+
+pub fn onboarding<D: 'static + Storage>(
+    db: D,
+    endpoints: IndiewebEndpoints,
+    http: reqwest::Client
+) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
+    let inject_db = move || db.clone();
+    warp::get()
+        .map(move || warp::reply::html(Template {
+            title: "Kittybox - Onboarding",
+            blog_name: "Kittybox",
+            endpoints: Some(endpoints.clone()),
+            feeds: vec![],
+            user: None,
+            content: OnboardingPage {}.to_string()
+        }.to_string()))
+        .or(warp::post()
+            .and(crate::util::require_host())
+            .and(warp::any().map(inject_db))
+            .and(warp::body::json::<OnboardingData>())
+            .and(warp::any().map(move || http.clone()))
+            .and_then(|host: warp::host::Authority, db: D, body: OnboardingData, http: reqwest::Client| async move {
+                let user_uid = format!("https://{}/", host.as_str());
+                if db.post_exists(&user_uid).await.map_err(FrontendError::from)? {
+                    
+                    return Ok(warp::redirect(hyper::Uri::from_static("/")));
+                }
+                let user = crate::indieauth::User::new(&user_uid, "https://kittybox.fireburn.ru/", "create");
+                if body.user["type"][0] != "h-card" || body.first_post["type"][0] != "h-entry" {
+                    return Err(FrontendError::with_code(StatusCode::BAD_REQUEST, "user and first_post should be an h-card and an h-entry").into());
+                }
+                db.set_setting(crate::database::Settings::SiteName, user.me.as_str(), &body.blog_name)
+                    .await
+                    .map_err(FrontendError::from)?;
+
+                let (_, hcard) = {
+                    let mut hcard = body.user;
+                    hcard["properties"]["uid"] = serde_json::json!([&user_uid]);
+                    crate::micropub::normalize_mf2(hcard, &user)
+                };
+                db.put_post(&hcard, &user_uid).await.map_err(FrontendError::from)?;
+                let (uid, post) = crate::micropub::normalize_mf2(body.first_post, &user);
+                crate::micropub::_post(user, uid, post, db, http).await.map_err(|e| {
+                    FrontendError {
+                        msg: "Error while posting the first post".to_string(),
+                        source: Some(Box::new(e)),
+                        code: StatusCode::INTERNAL_SERVER_ERROR
+                    }
+                })?;
+                Ok::<_, warp::Rejection>(warp::redirect(hyper::Uri::from_static("/")))
+            }))
+        
+}
+
+#[forbid(clippy::unwrap_used)]
+pub fn catchall<D: Storage>(db: D, endpoints: IndiewebEndpoints) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
+    let inject_db = move || db.clone();
+    warp::any()
+        .map(inject_db.clone())
+        .and(request_uri())
+        .and(warp::query())
+        .and_then(|db: D, path: String, query: QueryParams| async move {
+            get_post_from_database(&db, &path, query.after, &None).map_err(warp::reject::custom).await
+        })
+        // Rendering pipeline
+        .and_then(|post: serde_json::Value| async move {
+            let post_name = &post["properties"]["name"][0].as_str().to_owned();
+            match post["type"][0]
+                .as_str()
+            {
+                Some("h-entry") => Ok((
+                    post_name.unwrap_or("Note").to_string(),
+                    kittybox_templates::Entry { post: &post }.to_string(),
+                    StatusCode::OK
+                )),
+                Some("h-card") => Ok((
+                    post_name.unwrap_or("Contact card").to_string(),
+                    kittybox_templates::VCard { card: &post }.to_string(),
+                    StatusCode::OK
+                )),
+                Some("h-feed") => Ok((
+                    post_name.unwrap_or("Feed").to_string(),
+                    kittybox_templates::Feed { feed: &post }.to_string(),
+                    StatusCode::OK
+                )),
+                _ => Err(warp::reject::custom(FrontendError::with_code(
+                    StatusCode::INTERNAL_SERVER_ERROR,
+                    &format!("Couldn't render an unknown type: {}", post["type"][0]),
+                )))
+            }
+        })
+        .recover(|err: warp::Rejection| {
+            use warp::Rejection;
+            use futures_util::future;
+            if let Some(err) = err.find::<FrontendError>() {
+                return future::ok::<(String, String, StatusCode), Rejection>((
+                    format!("Error: HTTP {}", err.code().as_u16()),
+                    ErrorPage { code: err.code(), msg: Some(err.msg().to_string()) }.to_string(),
+                    err.code()
+                ));
+            }
+            future::err::<(String, String, StatusCode), Rejection>(err)
+        })
+        .unify()
+        .and(warp::any().map(move || endpoints.clone()))
+        .and(crate::util::require_host())
+        .and(warp::any().map(inject_db))
+        .then(|content: (String, String, StatusCode), endpoints: IndiewebEndpoints, host: Authority, db: D| async move {
+            let owner = format!("https://{}/", host.as_str());
+            let blog_name = db.get_setting(crate::database::Settings::SiteName, &owner).await
+                .unwrap_or_else(|_| "Kitty Box!".to_string());
+            let feeds = db.get_channels(&owner).await.unwrap_or_default();
+            let (title, content, code) = content;
+            warp::reply::with_status(warp::reply::html(Template {
+                title: &title,
+                blog_name: &blog_name,
+                endpoints: Some(endpoints),
+                feeds,
+                user: None, // TODO
+                content,
+            }.to_string()), code)
+        })
+
+}
+
+static STYLE_CSS: &[u8] = include_bytes!("./style.css");
+static ONBOARDING_JS: &[u8] = include_bytes!("./onboarding.js");
+static ONBOARDING_CSS: &[u8] = include_bytes!("./onboarding.css");
+
+static MIME_JS: &str = "application/javascript";
+static MIME_CSS: &str = "text/css";
+
+fn _dispatch_static(name: &str) -> Option<(&'static [u8], &'static str)> {
+    match name {
+        "style.css" => Some((STYLE_CSS, MIME_CSS)),
+        "onboarding.js" => Some((ONBOARDING_JS, MIME_JS)),
+        "onboarding.css" => Some((ONBOARDING_CSS, MIME_CSS)),
+        _ => None
+    }
+}
+
+pub fn static_files() -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Copy {
+    use futures_util::future;
+
+    warp::get()
+        .and(warp::path::param()
+             .and_then(|filename: String| {
+                 match _dispatch_static(&filename) {
+                     Some((buf, content_type)) => future::ok(
+                         warp::reply::with_header(
+                             buf, "Content-Type", content_type
+                         )
+                     ),
+                     None => future::err(warp::reject())
+                 }
+             }))
+        .or(warp::head()
+            .and(warp::path::param()
+                 .and_then(|filename: String| {
+                     match _dispatch_static(&filename) {
+                         Some((buf, content_type)) => future::ok(
+                             warp::reply::with_header(
+                                 warp::reply::with_header(
+                                     warp::reply(), "Content-Type", content_type
+                                 ),
+                                 "Content-Length", buf.len()
+                             )
+                         ),
+                         None => future::err(warp::reject())
+                     }
+                 })))
+}
diff --git a/kittybox-rs/src/frontend/onboarding.css b/kittybox-rs/src/frontend/onboarding.css
new file mode 100644
index 0000000..6f191b9
--- /dev/null
+++ b/kittybox-rs/src/frontend/onboarding.css
@@ -0,0 +1,33 @@
+form.onboarding > ul#progressbar > li.active {
+    font-weight: bold;
+}
+form.onboarding > ul#progressbar {
+  display: flex; list-style: none; justify-content: space-around;
+}
+
+form.onboarding > fieldset > div.switch_card_buttons {
+    display: flex;
+    justify-content: space-between;
+    width: 100%;
+}
+form.onboarding > fieldset > div.switch_card_buttons button:last-child {
+    margin-left: auto;
+}
+.form_group, .multi_input {
+    display: flex;
+    flex-direction: column;
+}
+.multi_input {
+    align-items: start;
+}
+.multi_input > input {
+    width: 100%;
+    align-self: stretch;
+}
+form.onboarding > fieldset > .form_group + * {
+    margin-top: .75rem;
+}
+form.onboarding textarea {
+    width: 100%;
+    resize: vertical;
+}
diff --git a/kittybox-rs/src/frontend/onboarding.js b/kittybox-rs/src/frontend/onboarding.js
new file mode 100644
index 0000000..7f9aa32
--- /dev/null
+++ b/kittybox-rs/src/frontend/onboarding.js
@@ -0,0 +1,87 @@
+const firstOnboardingCard = "intro";
+
+function switchOnboardingCard(card) {
+    Array.from(document.querySelectorAll("form.onboarding > fieldset")).map(node => {
+        if (node.id == card) {
+            node.style.display = "block";
+        } else {
+            node.style.display = "none";
+        }
+    });
+
+    Array.from(document.querySelectorAll("form.onboarding > ul#progressbar > li")).map(node => {
+        if (node.id == card) {
+            node.classList.add("active")
+        } else {
+            node.classList.remove("active")
+        }
+    })
+};
+
+window.kittybox_onboarding = {
+    switchOnboardingCard
+};
+
+document.querySelector("form.onboarding > ul#progressbar").style.display = "";
+switchOnboardingCard(firstOnboardingCard);
+
+function switchCardOnClick(event) {
+    switchOnboardingCard(event.target.dataset.card)
+}
+
+function multiInputAddMore(event) {
+    let parent = event.target.parentElement;
+    let template = event.target.parentElement.querySelector("template").content.cloneNode(true);
+    parent.prepend(template);
+}
+
+Array.from(document.querySelectorAll("form.onboarding > fieldset button.switch_card")).map(button => {
+    button.addEventListener("click", switchCardOnClick)
+})
+
+Array.from(document.querySelectorAll("form.onboarding > fieldset div.multi_input > button.add_more")).map(button => {
+    button.addEventListener("click", multiInputAddMore)
+    multiInputAddMore({ target: button });
+})
+
+const form = document.querySelector("form.onboarding");
+console.log(form);
+form.onsubmit = async (event) => {
+    console.log(event);
+    event.preventDefault();
+    const form = event.target;
+    const json = {
+        user: {
+            type: ["h-card"],
+            properties: {
+                name: [form.querySelector("#hcard_name").value],
+                pronoun: Array.from(form.querySelectorAll("#hcard_pronouns")).map(input => input.value).filter(i => i != ""),
+                url: Array.from(form.querySelectorAll("#hcard_url")).map(input => input.value).filter(i => i != ""),
+                note: [form.querySelector("#hcard_note").value]
+            }
+        },
+        first_post: {
+            type: ["h-entry"],
+            properties: {
+                content: [form.querySelector("#first_post_content").value]
+            }
+        },
+        blog_name: form.querySelector("#blog_name").value,
+        feeds: Array.from(form.querySelectorAll(".multi_input#custom_feeds > fieldset.feed")).map(form => {
+            return {
+                name: form.querySelector("#feed_name").value,
+                slug: form.querySelector("#feed_slug").value
+            }
+        }).filter(feed => feed.name == "" || feed.slug == "")
+    };
+
+    await fetch("/", {
+        method: "POST",
+        body: JSON.stringify(json),
+        headers: { "Content-Type": "application/json" }
+    }).then(response => {
+        if (response.status == 201) {
+            window.location.href = window.location.href;
+        }
+    })
+}
\ No newline at end of file
diff --git a/kittybox-rs/src/frontend/style.css b/kittybox-rs/src/frontend/style.css
new file mode 100644
index 0000000..109bba0
--- /dev/null
+++ b/kittybox-rs/src/frontend/style.css
@@ -0,0 +1,194 @@
+@import url('https://fonts.googleapis.com/css2?family=Caveat:wght@500&family=Lato&display=swap');
+
+:root {
+    font-family: var(--font-normal);
+    --font-normal: 'Lato', sans-serif;
+    --font-accent: 'Caveat', cursive;
+    --type-scale: 1.250;
+
+    --primary-accent: purple;
+    --secondary-accent: gold;
+}
+* {
+    box-sizing: border-box;
+}
+body {
+    margin: 0;
+}
+h1, h2, h3, h4, h5, h6 {
+    font-family: var(--font-accent);
+}
+.titanic {
+    font-size: 3.815rem
+}
+h1, .xxxlarge {
+    margin-top: 0;
+    margin-bottom: 0;
+    font-size: 3.052rem;
+}
+h2, .xxlarge {font-size: 2.441rem;}
+h3, .xlarge {font-size: 1.953rem;}
+h4, .larger {font-size: 1.563rem;}
+h5, .large {font-size: 1.25rem;}
+h6, .normal {font-size: 1rem;}
+small, .small { font-size: 0.8em; }
+
+nav#headerbar {
+    background: var(--primary-accent);
+    color: whitesmoke;
+    border-bottom: .75rem solid var(--secondary-accent);
+    padding: .3rem;
+    vertical-align: center;
+    position: sticky;
+    top: 0;
+}
+nav#headerbar a#homepage {
+    font-weight: bolder;
+    font-family: var(--font-accent);
+    font-size: 2rem;
+}
+nav#headerbar > ul {
+    display: flex;
+    padding: inherit;
+    margin: inherit;
+    gap: .75em;
+}
+nav#headerbar > ul > li {
+    display: inline-flex;
+    flex-direction: column;
+    marker: none;
+    padding: inherit;
+    margin: inherit;
+    justify-content: center;
+}
+nav#headerbar > ul > li.shiftright {
+    margin-left: auto;
+}
+nav#headerbar a {
+    color: white;
+}
+body > main {
+    max-width: 60rem;
+    margin: auto;
+    padding: .75rem;
+}
+body > footer {
+    text-align: center;
+}
+.sidebyside {
+    display: flex;
+    flex-wrap: wrap;
+    gap: .75rem;
+    margin-top: .75rem;
+    margin-bottom: .75rem;
+}
+.sidebyside > * {
+    width: 100%;
+    margin-top: 0;
+    margin-bottom: 0;
+    border: .125rem solid black;
+    border-radius: .75rem;
+    padding: .75rem;
+    margin-top: 0 !important;
+    margin-bottom: 0 !important;
+    flex-basis: 28rem;
+    flex-grow: 1;
+}
+article > * + * {
+    margin-top: .75rem;
+}
+article > header {
+    padding-bottom: .75rem;
+    border-bottom: 1px solid gray;
+}
+article > footer {
+    border-top: 1px solid gray;
+}
+article.h-entry, article.h-feed, article.h-card, article.h-event {
+    border: 2px solid black;
+    border-radius: .75rem;
+    padding: .75rem;
+    margin-top: .75rem;
+    margin-bottom: .75rem;
+}
+.webinteractions > ul.counters {
+    display: inline-flex;
+    padding: inherit;
+    margin: inherit;
+    gap: .75em;
+    flex-wrap: wrap;
+}
+.webinteractions > ul.counters > li > .icon {
+    font-size: 1.5em;
+}
+.webinteractions > ul.counters > li {
+    display: inline-flex;
+    align-items: center;
+    gap: .5em;
+}
+article.h-entry > header.metadata ul {
+    padding-inline-start: unset;
+    margin: unset;
+}
+article.h-entry > header.metadata ul.categories {
+    flex-wrap: wrap;
+    display: inline-flex;
+    list-style-type: none;
+}
+article.h-entry > header.metadata ul.categories li {
+    display: inline;
+    margin-inline-start: unset;
+}
+article.h-entry > header.metadata ul li {
+    margin-inline-start: 2.5em;
+}
+article.h-entry .e-content pre {
+    border: 1px solid gray;
+    border-radius: 0.5em;
+    overflow-y: auto;
+    padding: 0.5em;
+}
+article.h-entry img.u-photo {
+    max-width: 80%;
+    max-height: 90vh;
+    display: block;
+    margin: auto;
+}
+article.h-entry img.u-photo + * {
+    margin-top: .75rem;
+}
+article.h-entry > header.metadata span + span::before {
+    content: " | "
+}
+li.p-category::before {
+    content: " #";
+}
+
+article.h-entry ul.categories {
+    gap: .2em;
+}
+article.h-card img.u-photo {
+    border-radius: 100%;
+    float: left;
+    height: 8rem;
+    border: 1px solid gray;
+    margin-right: .75em;
+    object-fit: cover;
+    aspect-ratio: 1;
+}
+
+.mini-h-card img {
+    height: 2em;
+    display: inline-block;
+    border: 2px solid gray;
+    border-radius: 100%;
+    margin-right: 0.5rem;
+}
+
+.mini-h-card * {
+    vertical-align: middle;
+}
+
+.mini-h-card a {
+    text-decoration: none;
+}
diff --git a/kittybox-rs/src/index.html b/kittybox-rs/src/index.html
new file mode 100644
index 0000000..1fc2a96
--- /dev/null
+++ b/kittybox-rs/src/index.html
@@ -0,0 +1,182 @@
+<html>
+    <head>
+        <meta charset="utf-8">
+        <title>Kittybox-Micropub debug client</title>
+        <style type="text/css">
+        * {
+            box-sizing: border-box;
+        }
+        :root {
+            font-family: sans-serif;
+        }
+        body {
+            margin: 0;
+        }
+        body > main {
+            margin: auto;
+            max-width: 1024px;
+        }
+        h1.header {
+            margin-top: 0.75em;
+            text-align: center;
+        }
+        fieldset + fieldset, fieldset + input, section + section, section + fieldset {
+            margin-top: 0.75em;
+        }
+        input[type="submit"] {
+            margin-left: auto;
+            display: block;
+        }
+        form > fieldset > section > label {
+            width: 100%;
+            display: block;
+        }
+        form > fieldset > section > input, form > fieldset > section > textarea {
+            width: 100%;
+        }
+        textarea {
+            min-height: 10em;
+        }
+        </style>
+        <script type="module">
+            const form = document.getElementById("micropub");
+            const channel_select_radio = document.getElementById("select_channels");
+            channel_select_radio.onclick = async () => {
+                const channels = await query_channels()
+                if (channels !== undefined) {
+                    populate_channel_list(channels)
+                }
+            }
+            const no_channel_radio = document.getElementById("no_channel");
+            no_channel_radio.onclick = () => {
+                document.getElementById("channels").style.display = "none";
+                const channel_list = document.getElementById("channels_target")
+                channel_list.innerHTML = "";
+            }
+            function construct_body(form) {
+                return {
+                    type: ["h-entry"],
+                    properties: {
+                        content: [form.elements.content.value],
+                        name: form.elements.name.value ? [form.elements.name.value] : undefined,
+                        category: form.elements.category.value ? form.elements.category.value.split(",").map(val => val.trim()) : undefined,
+                        channel: form.elements.channel_select.value ?  Array.from(form.elements.channel).map(i => i.checked ? i.value : false).filter(i => i) : undefined
+                    }
+                }
+            }
+
+            async function query_channels() {
+                const response = await fetch(form.action + "?q=config", {
+                    headers: {
+                        "Authorization": `Bearer ${form.elements.access_token.value}`
+                    }
+                })
+
+                const config = await response.json();
+
+                return config["channels"]
+            }
+
+            function populate_channel_list(channels) {
+                document.getElementById("channels").style.display = "block";
+                const channel_list = document.getElementById("channels_target")
+                channel_list.innerHTML = "";
+                channels.forEach((channel) => {
+                    const template = document.getElementById("channel_selector").content.cloneNode(true)
+                    const input = template.querySelector("input")
+                    const label = template.querySelector("label")
+                    input.id = `channel_selector_option_${channel.uid}`
+                    input.value = channel.uid
+                    label.for = input.id
+                    label.innerHTML = `<a href="${channel.uid}">${channel.name}</a>`
+
+                    channel_list.appendChild(template)
+                })
+            }
+
+            form.onsubmit = async (event) => {
+                event.preventDefault()
+                console.log(JSON.stringify(construct_body(form)))
+                try {
+                    const response = await fetch(form.action, {
+                        method: form.method,
+                        headers: {
+                            "Authorization": `Bearer ${form.elements.access_token.value}`,
+                            "Content-Type": "application/json"
+                        },
+                        body: JSON.stringify(construct_body(form))
+                    })
+                    if (response.status != 201 || response.status != 202) {
+                        console.error(await response.json());
+                    }
+                    if (response.headers.get("Location")) {
+                        window.location.href = response.headers.get("Location");
+                    }
+                } catch (e) {
+                    console.error(e)
+                }
+            }
+        </script>
+    </head>
+    <body>
+        <h1 class="header">Kittybox-Micropub debug client</h1>
+
+        <main>
+            <p>
+                In a pinch? Lost your Micropub client, but need to make a quick announcement?
+                Worry not, the debug client has your back. <i>I just hope you have a spare Micropub token stored somewhere like I do...</i>
+            </p>
+
+            <form action="/micropub" method="POST" id="micropub">
+                <fieldset>
+                    <legend>Authorization details</legend>
+                    <section>
+                        <label for="access_token">Access token:</label>
+                        <input id="access_token" name="access_token" type="password">
+
+                        <p><a href="https://gimme-a-token.5eb.nl/" target="_blank">Get an access token (will open in a new tab)</a></p>
+                    </section>
+                </fieldset>
+                <fieldset>
+                    <legend>Post details:</legend>
+                    <section>
+                        <label for="name">Name (leave blank for an unnamed post):</label>
+                        <input id="name" type="text">
+                    </section>
+                    <section>
+                        <label for="content">Content:</label>
+                        <textarea id="content" placeholder="Your post's text goes here"></textarea>
+                    </section>
+                    <section>
+                        <label for="category">Categories (separeted by commas):</label>
+                        <input id="category" type="text">
+                    </section>
+                    <fieldset>
+                        <legend>Channels</legend>
+                        <section>
+                            <input type="radio" id="no_channel" name="channel_select" checked value="">
+                            <label for="no_channel">Default channel only</label>
+                        </section>
+
+                        <section>
+                            <input type="radio" id="select_channels" name="channel_select" value="on">
+                            <label for="select_channels">Select channels manually</label>
+                        </section>
+                        
+                        <fieldset id="channels" style="display: none">
+                            <legend>Available channels:</legend>
+                            <template id="channel_selector">
+                                <section>
+                                    <input type="checkbox" name="channel" id="" value="">
+                                    <label for=""></label>
+                                </section>
+                            </template>
+                            <div id="channels_target"></div>
+                        </fieldset>
+                    </fieldset>
+                </fieldset>
+                <input type="submit">
+            </form>
+        </main>
+    </body>
+</html>
\ No newline at end of file
diff --git a/kittybox-rs/src/indieauth.rs b/kittybox-rs/src/indieauth.rs
new file mode 100644
index 0000000..57c0301
--- /dev/null
+++ b/kittybox-rs/src/indieauth.rs
@@ -0,0 +1,291 @@
+use url::Url;
+use serde::{Serialize, Deserialize};
+use warp::{Filter, Rejection, reject::MissingHeader};
+
+#[derive(Deserialize, Serialize, Debug, PartialEq, Clone)]
+pub struct User {
+    pub me: Url,
+    pub client_id: Url,
+    scope: String,
+}
+
+#[derive(Debug, Clone, PartialEq, Copy)]
+pub enum ErrorKind {
+    PermissionDenied,
+    NotAuthorized,
+    TokenEndpointError,
+    JsonParsing,
+    Other
+}
+
+#[derive(Deserialize, Serialize, Debug, Clone)]
+pub struct TokenEndpointError {
+    error: String,
+    error_description: String
+}
+
+#[derive(Debug)]
+pub struct IndieAuthError {
+    source: Option<Box<dyn std::error::Error + Send + Sync>>,
+    kind: ErrorKind,
+    msg: String
+}
+
+impl std::error::Error for IndieAuthError {
+    fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
+        self.source.as_ref().map(|e| e.as_ref() as &dyn std::error::Error)
+    }
+}
+
+impl std::fmt::Display for IndieAuthError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        match match self.kind {
+            ErrorKind::TokenEndpointError => write!(f, "token endpoint returned an error: "),
+            ErrorKind::JsonParsing => write!(f, "error while parsing token endpoint response: "),
+            ErrorKind::NotAuthorized => write!(f, "token endpoint did not recognize the token: "),
+            ErrorKind::PermissionDenied => write!(f, "token endpoint rejected the token: "),
+            ErrorKind::Other => write!(f, "token endpoint communication error: "),
+        } {
+            Ok(_) => write!(f, "{}", self.msg),
+            Err(err) => Err(err)
+        }
+    }
+}
+
+impl From<serde_json::Error> for IndieAuthError {
+    fn from(err: serde_json::Error) -> Self {
+        Self {
+            msg: format!("{}", err),
+            source: Some(Box::new(err)),
+            kind: ErrorKind::JsonParsing,
+        }
+    }
+}
+
+impl From<reqwest::Error> for IndieAuthError {
+    fn from(err: reqwest::Error) -> Self {
+        Self {
+            msg: format!("{}", err),
+            source: Some(Box::new(err)),
+            kind: ErrorKind::Other,
+        }
+    }
+}
+
+impl warp::reject::Reject for IndieAuthError {}
+
+impl User {
+    pub fn check_scope(&self, scope: &str) -> bool {
+        self.scopes().any(|i| i == scope)
+    }
+    pub fn scopes(&self) -> std::str::SplitAsciiWhitespace<'_> {
+        self.scope.split_ascii_whitespace()
+    }
+    pub fn new(me: &str, client_id: &str, scope: &str) -> Self {
+        Self {
+            me: Url::parse(me).unwrap(),
+            client_id: Url::parse(client_id).unwrap(),
+            scope: scope.to_string(),
+        }
+    }
+}
+
+pub fn require_token(token_endpoint: String, http: reqwest::Client) -> impl Filter<Extract = (User,), Error = Rejection> + Clone {
+    // It might be OK to panic here, because we're still inside the initialisation sequence for now.
+    // Proper error handling on the top of this should be used though.
+    let token_endpoint_uri = url::Url::parse(&token_endpoint)
+        .expect("Couldn't parse the token endpoint URI!");
+    warp::any()
+        .map(move || token_endpoint_uri.clone())
+        .and(warp::any().map(move || http.clone()))
+        .and(warp::header::<String>("Authorization").recover(|err: Rejection| async move {
+            if err.find::<MissingHeader>().is_some() {
+                Err(IndieAuthError {
+                    source: None,
+                    msg: "No Authorization header provided.".to_string(),
+                    kind: ErrorKind::NotAuthorized
+                }.into())
+            } else {
+                Err(err)
+            }
+        }).unify())
+        .and_then(|token_endpoint, http: reqwest::Client, token| async move {
+            use hyper::StatusCode;
+
+            match http
+                .get(token_endpoint)
+                .header("Authorization", token)
+                .header("Accept", "application/json")
+                .send()
+                .await
+            {
+                Ok(res) => match res.status() {
+                    StatusCode::OK => match res.json::<serde_json::Value>().await {
+                        Ok(json) => match serde_json::from_value::<User>(json.clone()) {
+                            Ok(user) => Ok(user),
+                            Err(err) => {
+                                if let Some(false) = json["active"].as_bool() {
+                                    Err(IndieAuthError {
+                                        source: None,
+                                        kind: ErrorKind::NotAuthorized,
+                                        msg: "The token is not active for this user.".to_owned()
+                                    }.into())
+                                } else {
+                                    Err(IndieAuthError::from(err).into())
+                                }
+                            }
+                        }
+                        Err(err) => Err(IndieAuthError::from(err).into())
+                    },
+                    StatusCode::BAD_REQUEST => {
+                        match res.json::<TokenEndpointError>().await {
+                            Ok(err) => {
+                                if err.error == "unauthorized" {
+                                    Err(IndieAuthError {
+                                        source: None,
+                                        kind: ErrorKind::NotAuthorized,
+                                        msg: err.error_description
+                                    }.into())
+                                } else {
+                                    Err(IndieAuthError {
+                                        source: None,
+                                        kind: ErrorKind::TokenEndpointError,
+                                        msg: err.error_description
+                                    }.into())
+                                }
+                            },
+                            Err(err) => Err(IndieAuthError::from(err).into())
+                        }
+                    },
+                    _ => Err(IndieAuthError {
+                        source: None,
+                        msg: format!("Token endpoint returned {}", res.status()),
+                        kind: ErrorKind::TokenEndpointError
+                    }.into())
+                },
+                Err(err) => Err(warp::reject::custom(IndieAuthError::from(err)))
+            }
+        })
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{User, IndieAuthError, require_token};
+    use httpmock::prelude::*;
+    
+    #[test]
+    fn user_scopes_are_checkable() {
+        let user = User::new(
+            "https://fireburn.ru/",
+            "https://quill.p3k.io/",
+            "create update media",
+        );
+
+        assert!(user.check_scope("create"));
+        assert!(!user.check_scope("delete"));
+    }
+
+    #[inline]
+    fn get_http_client() -> reqwest::Client {
+        reqwest::Client::new()
+    }
+    
+    #[tokio::test]
+    async fn test_require_token_with_token() {
+        let server = MockServer::start_async().await;
+        server.mock_async(|when, then| {
+            when.path("/token")
+                .header("Authorization", "Bearer token");
+
+            then.status(200)
+                .header("Content-Type", "application/json")
+                .json_body(serde_json::to_value(User::new(
+                    "https://fireburn.ru/",
+                    "https://quill.p3k.io/",
+                    "create update media",
+                )).unwrap());
+        }).await;
+        
+        let filter = require_token(server.url("/token"), get_http_client());
+
+        let res: User = warp::test::request()
+            .path("/")
+            .header("Authorization", "Bearer token")
+            .filter(&filter)
+            .await
+            .unwrap();
+
+        assert_eq!(res.me.as_str(), "https://fireburn.ru/")
+    }
+
+    #[tokio::test]
+    async fn test_require_token_fake_token() {
+        let server = MockServer::start_async().await;
+        server.mock_async(|when, then| {
+            when.path("/refuse_token");
+
+            then.status(200)
+                .json_body(serde_json::json!({"active": false}));
+        }).await;
+
+        let filter = require_token(server.url("/refuse_token"), get_http_client());
+
+        let res = warp::test::request()
+            .path("/")
+            .header("Authorization", "Bearer token")
+            .filter(&filter)
+            .await
+            .unwrap_err();
+
+        let err: &IndieAuthError = res.find().unwrap();
+        assert_eq!(err.kind, super::ErrorKind::NotAuthorized);
+    }
+
+    #[tokio::test]
+    async fn test_require_token_no_token() {
+        let server = MockServer::start_async().await;
+        let mock = server.mock_async(|when, then| {
+            when.path("/should_never_be_called");
+
+            then.status(500);
+        }).await;
+        let filter = require_token(server.url("/should_never_be_called"), get_http_client());
+
+        let res = warp::test::request()
+            .path("/")
+            .filter(&filter)
+            .await
+            .unwrap_err();
+
+        let err: &IndieAuthError = res.find().unwrap();
+        assert_eq!(err.kind, super::ErrorKind::NotAuthorized);
+
+        mock.assert_hits_async(0).await;
+    }
+
+    #[tokio::test]
+    async fn test_require_token_400_error_unauthorized() {
+        let server = MockServer::start_async().await;
+        server.mock_async(|when, then| {
+            when.path("/refuse_token_with_400");
+
+            then.status(400)
+                .json_body(serde_json::json!({
+                    "error": "unauthorized",
+                    "error_description": "The token provided was malformed"
+                }));
+        }).await;
+
+        let filter = require_token(server.url("/refuse_token_with_400"), get_http_client());
+
+        let res = warp::test::request()
+            .path("/")
+            .header("Authorization", "Bearer token")
+            .filter(&filter)
+            .await
+            .unwrap_err();
+
+        let err: &IndieAuthError = res.find().unwrap();
+        assert_eq!(err.kind, super::ErrorKind::NotAuthorized);
+    }
+}
diff --git a/kittybox-rs/src/lib.rs b/kittybox-rs/src/lib.rs
new file mode 100644
index 0000000..1800b5b
--- /dev/null
+++ b/kittybox-rs/src/lib.rs
@@ -0,0 +1,103 @@
+#![forbid(unsafe_code)]
+#![warn(clippy::todo)]
+
+pub mod metrics;
+/// Database abstraction layer for Kittybox, allowing the CMS to work with any kind of database.
+pub mod database;
+pub mod micropub;
+pub mod media;
+pub mod indieauth;
+pub mod frontend;
+
+pub(crate) mod rejections {
+    #[derive(Debug)]
+    pub(crate) struct UnacceptableContentType;
+    impl warp::reject::Reject for UnacceptableContentType {}
+
+    #[derive(Debug)]
+    pub(crate) struct HostHeaderUnset;
+    impl warp::reject::Reject for HostHeaderUnset {}
+}
+
+pub static MICROPUB_CLIENT: &[u8] = include_bytes!("./index.html");
+
+pub mod util {
+    use warp::{Filter, host::Authority};
+    use super::rejections;
+
+    pub fn require_host() -> impl Filter<Extract = (Authority,), Error = warp::Rejection> + Copy {
+        warp::host::optional()
+            .and_then(|authority: Option<Authority>| async move {
+                authority.ok_or_else(|| warp::reject::custom(rejections::HostHeaderUnset))
+            })
+    }
+
+    pub fn parse_accept() -> impl Filter<Extract = (http_types::Mime,), Error = warp::Rejection> + Copy {
+        warp::header::value("Accept").and_then(|accept: warp::http::HeaderValue| async move {
+            let mut accept: http_types::content::Accept = {
+                // This is unneccesarily complicated because I want to reuse some http-types parsing
+                // and http-types has constructor for Headers private so I need to construct
+                // a mock Request to reason about headers... this is so dumb wtf
+                // so much for zero-cost abstractions, huh
+                let bytes: &[u8] = accept.as_bytes();
+                let value = http_types::headers::HeaderValue::from_bytes(bytes.to_vec()).unwrap();
+                let values: http_types::headers::HeaderValues = vec![value].into();
+                let mut request = http_types::Request::new(http_types::Method::Get, "http://example.com/");
+                request.append_header("Accept".parse::<http_types::headers::HeaderName>().unwrap(), &values);
+                http_types::content::Accept::from_headers(&request).unwrap().unwrap()
+            };
+
+            // This code is INCREDIBLY dumb, honestly...
+            // why did I even try to use it?
+            // TODO vendor this stuff in so I can customize it
+            match accept.negotiate(&[
+                "text/html; encoding=\"utf-8\"".into(),
+                "application/json; encoding=\"utf-8\"".into(),
+                "text/html".into(),
+                "application/json".into(),
+
+            ]) {
+                Ok(mime) => {
+                    Ok(http_types::Mime::from(mime.value().as_str()))
+                },
+                Err(err) => {
+                    log::error!("Content-Type negotiation error: {:?}, accepting: {:?}", err, accept);
+                    Err(warp::reject::custom(rejections::UnacceptableContentType))
+                }
+            }
+        })
+    }
+
+    mod tests {
+        #[tokio::test]
+        async fn test_require_host_with_host() {
+            use super::require_host;
+
+            let filter = require_host();
+
+            let res = warp::test::request()
+                .path("/")
+                .header("Host", "localhost:8080")
+                .filter(&filter)
+                .await
+                .unwrap();
+
+            assert_eq!(res, "localhost:8080");
+            
+        }
+
+        #[tokio::test]
+        async fn test_require_host_no_host() {
+            use super::require_host;
+
+            let filter = require_host();
+
+            let res = warp::test::request()
+                .path("/")
+                .filter(&filter)
+                .await;
+
+            assert!(res.is_err());
+        }
+    }
+}
diff --git a/kittybox-rs/src/main.rs b/kittybox-rs/src/main.rs
new file mode 100644
index 0000000..eb70885
--- /dev/null
+++ b/kittybox-rs/src/main.rs
@@ -0,0 +1,256 @@
+use log::{debug, error, info};
+use std::{convert::Infallible, env, time::Duration};
+use url::Url;
+use warp::{Filter, host::Authority};
+
+#[tokio::main]
+async fn main() {
+    // TODO turn into a feature so I can enable and disable it
+    #[cfg(debug_assertions)]
+    console_subscriber::init();
+
+    // TODO use tracing instead of log
+    let logger_env = env_logger::Env::new().filter_or("RUST_LOG", "info");
+    env_logger::init_from_env(logger_env);
+
+    info!("Starting the kittybox server...");
+
+    let backend_uri: String = match env::var("BACKEND_URI") {
+        Ok(val) => {
+            debug!("Backend URI: {}", val);
+            val
+        }
+        Err(_) => {
+            error!("BACKEND_URI is not set, cannot find a database");
+            std::process::exit(1);
+        }
+    };
+    let token_endpoint: Url = match env::var("TOKEN_ENDPOINT") {
+        Ok(val) => {
+            debug!("Token endpoint: {}", val);
+            match Url::parse(&val) {
+                Ok(val) => val,
+                _ => {
+                    error!("Token endpoint URL cannot be parsed, aborting.");
+                    std::process::exit(1)
+                }
+            }
+        }
+        Err(_) => {
+            error!("TOKEN_ENDPOINT is not set, will not be able to authorize users!");
+            std::process::exit(1)
+        }
+    };
+    let authorization_endpoint: Url = match env::var("AUTHORIZATION_ENDPOINT") {
+        Ok(val) => {
+            debug!("Auth endpoint: {}", val);
+            match Url::parse(&val) {
+                Ok(val) => val,
+                _ => {
+                    error!("Authorization endpoint URL cannot be parsed, aborting.");
+                    std::process::exit(1)
+                }
+            }
+        }
+        Err(_) => {
+            error!("AUTHORIZATION_ENDPOINT is not set, will not be able to confirm token and ID requests using IndieAuth!");
+            std::process::exit(1)
+        }
+    };
+
+    //let internal_token: Option<String> = env::var("KITTYBOX_INTERNAL_TOKEN").ok();
+
+    /*let cookie_secret: String = match env::var("COOKIE_SECRET").ok() {
+        Some(value) => value,
+        None => {
+            if let Ok(filename) = env::var("COOKIE_SECRET_FILE") {
+                use tokio::io::AsyncReadExt;
+
+                let mut file = tokio::fs::File::open(filename).await.map_err(|e| {
+                    error!("Couldn't open the cookie secret file: {}", e);
+                    std::process::exit(1);
+                }).unwrap();
+                let mut temp_string = String::new();
+                file.read_to_string(&mut temp_string).await.map_err(|e| {
+                    error!("Couldn't read the cookie secret from file: {}", e);
+                    std::process::exit(1);
+                }).unwrap();
+
+                temp_string
+            } else {
+                error!("COOKIE_SECRET or COOKIE_SECRET_FILE is not set, will not be able to log in users securely!");
+                std::process::exit(1);
+            }
+        }
+    };*/
+
+    let listen_at = match env::var("SERVE_AT")
+        .ok()
+        .unwrap_or_else(|| "[::]:8080".to_string())
+        .parse::<std::net::SocketAddr>() {
+            Ok(addr) => addr,
+            Err(e) => {
+                error!("Cannot parse SERVE_AT: {}", e);
+                std::process::exit(1);
+            }
+        };
+
+    // This thing handles redirects automatically but is type-incompatible with hyper::Client
+    // Bonus: less generics to be aware of, this thing hides its complexity
+    let http: reqwest::Client = {
+        #[allow(unused_mut)]
+        let mut builder = reqwest::Client::builder()
+            .user_agent(concat!(
+                env!("CARGO_PKG_NAME"),
+                "/",
+                env!("CARGO_PKG_VERSION")
+            ));
+        // TODO: add a root certificate if there's an environment variable pointing at it
+        //builder = builder.add_root_certificate(reqwest::Certificate::from_pem(todo!()));
+
+        builder.build().unwrap()
+    };
+
+    if backend_uri.starts_with("redis") {
+        println!("The Redis backend is deprecated.");
+        std::process::exit(1);
+    } else if backend_uri.starts_with("file") {
+        
+        let database = {
+            let folder = backend_uri.strip_prefix("file://").unwrap();
+            let path = std::path::PathBuf::from(folder);
+            match kittybox::database::FileStorage::new(path).await {
+                Ok(db) => db,
+                Err(err) => {
+                    error!("Error creating database: {:?}", err);
+                    std::process::exit(1);
+                }
+            }
+        };
+
+        let endpoints = kittybox::frontend::IndiewebEndpoints {
+            authorization_endpoint: authorization_endpoint.to_string(),
+            token_endpoint: token_endpoint.to_string(),
+            webmention: None,
+            microsub: None,
+        };
+        
+        let homepage = warp::get()
+            .and(warp::path::end())
+            .and(kittybox::frontend::homepage(database.clone(), endpoints.clone()));
+
+        let onboarding = warp::path("onboarding")
+            .and(warp::path::end())
+            .and(kittybox::frontend::onboarding(
+                database.clone(),
+                endpoints.clone(),
+                http.clone()
+            ));
+        
+        let micropub = warp::path("micropub")
+            .and(warp::path::end()
+                 .and(kittybox::micropub::micropub(
+                     database.clone(),
+                     token_endpoint.to_string(),
+                     http.clone()
+                 ))
+                 .or(warp::get()
+                     .and(warp::path("client"))
+                     .and(warp::path::end())
+                     .map(|| warp::reply::html(kittybox::MICROPUB_CLIENT))));
+
+        let media = warp::path("media")
+            .and(warp::path::end()
+                 .and(kittybox::media::media())
+                 .or(kittybox::util::require_host()
+                     .and(warp::path::param())
+                     .map(|_host: Authority, path: String| format!("media file {}", path))));
+        
+        // TODO remember how login logic works because I forgor
+        let login = warp::path("login")
+            .and(warp::path("callback")
+                 .map(|| "callback!")
+                 // TODO form on GET and handler on POST
+                 .or(warp::path::end().map(|| "login page!")));
+
+        // TODO prettier error response
+        let coffee = warp::path("coffee")
+            .map(|| warp::reply::with_status("I'm a teapot!", warp::http::StatusCode::IM_A_TEAPOT));
+        
+        let static_files = warp::path("static")
+            .and(kittybox::frontend::static_files());
+
+        let catchall = kittybox::frontend::catchall(
+            database.clone(),
+            endpoints.clone()
+        );
+
+        let health = warp::path("health").and(warp::path::end()).map(|| "OK");
+        let metrics = warp::path("metrics").and(warp::path::end()).map(kittybox::metrics::gather);
+
+        let app = homepage
+            .or(onboarding)
+            .or(metrics
+                .or(health))
+            .or(static_files)
+            .or(login)
+            .or(coffee)
+            .or(micropub)
+            .or(media)
+            .or(catchall)
+            .with(warp::log("kittybox"))
+            .with(kittybox::metrics::metrics(vec![
+                "health".to_string(),
+                "micropub".to_string(),
+                "static".to_string(),
+                "media".to_string(),
+                "metrics".to_string()
+            ]))
+            ;
+
+        let svc = warp::service(app);
+
+        let mut listenfd = listenfd::ListenFd::from_env();
+        let tcp_listener: std::net::TcpListener = if let Ok(Some(listener)) = listenfd.take_tcp_listener(0) {
+            listener
+        } else {
+            std::net::TcpListener::bind(listen_at).unwrap()
+        };
+        tcp_listener.set_nonblocking(true).unwrap();
+
+        info!("Listening on {}", tcp_listener.local_addr().unwrap());
+        let server = hyper::server::Server::from_tcp(tcp_listener)
+            .unwrap()
+            .tcp_keepalive(Some(Duration::from_secs(30 * 60)))
+            .serve(hyper::service::make_service_fn(move |_| {
+                let service = svc.clone();
+                async move {
+                    Ok::<_, Infallible>(service)
+                }
+            }))
+            .with_graceful_shutdown(async move {
+                // Defer to C-c handler whenever we're not on Unix
+                // TODO consider using a diverging future here
+                #[cfg(not(unix))]
+                return tokio::signal::ctrl_c().await.unwrap();
+                #[cfg(unix)]
+                {
+                    use tokio::signal::unix::{signal, SignalKind};
+
+                    signal(SignalKind::terminate())
+                        .unwrap()
+                        .recv()
+                        .await
+                        .unwrap()
+                }
+            });
+
+        if let Err(err) = server.await {
+            error!("Error serving requests: {}", err);
+            std::process::exit(1);
+        }
+    } else {
+        println!("Unknown backend, not starting.");
+        std::process::exit(1);
+    }
+}
diff --git a/kittybox-rs/src/media/mod.rs b/kittybox-rs/src/media/mod.rs
new file mode 100644
index 0000000..0d46e0c
--- /dev/null
+++ b/kittybox-rs/src/media/mod.rs
@@ -0,0 +1,46 @@
+use futures_util::StreamExt;
+use bytes::buf::Buf;
+use warp::{Filter, Rejection, Reply, multipart::{FormData, Part}};
+
+pub fn query() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
+    warp::get()
+        .and(crate::util::require_host())
+        .map(|host| "media endpoint query...")
+}
+
+pub fn options() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
+    warp::options()
+        .map(|| warp::reply::json::<Option<()>>(&None))
+        .with(warp::reply::with::header("Allow", "GET, POST"))
+}
+
+pub fn upload() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
+    warp::post()
+        .and(crate::util::require_host())
+        .and(warp::multipart::form().max_length(1024*1024*150/*mb*/))
+        .and_then(|host, mut form: FormData| async move {
+            // TODO get rid of the double unwrap() here
+            let file: Part = form.next().await.unwrap().unwrap();
+            log::debug!("Uploaded: {:?}, type: {:?}", file.filename(), file.content_type());
+
+            let mut data = file.stream();
+            while let Some(buf) = data.next().await {
+                // TODO save it into a file
+                log::debug!("buffer length: {:?}", buf.map(|b| b.remaining()));
+            }
+            Ok::<_, warp::Rejection>(warp::reply::with_header(
+                warp::reply::with_status(
+                    "",
+                    warp::http::StatusCode::CREATED
+                ),
+                "Location",
+                "./awoo.png"
+            ))
+        })
+}
+
+pub fn media() -> impl Filter<Extract = (impl Reply,), Error = Rejection> + Clone {
+    upload()
+        .or(query())
+        .or(options())
+}
diff --git a/kittybox-rs/src/metrics.rs b/kittybox-rs/src/metrics.rs
new file mode 100644
index 0000000..48f5d9b
--- /dev/null
+++ b/kittybox-rs/src/metrics.rs
@@ -0,0 +1,21 @@
+#![allow(unused_imports, dead_code)]
+use async_trait::async_trait;
+use lazy_static::lazy_static;
+use std::time::{Duration, Instant};
+use prometheus::Encoder;
+
+// TODO: Vendor in the Metrics struct from warp_prometheus and rework the path matching algorithm
+
+pub fn metrics(path_includes: Vec<String>) -> warp::log::Log<impl Fn(warp::log::Info) + Clone> {
+    let metrics = warp_prometheus::Metrics::new(prometheus::default_registry(), &path_includes);
+    warp::log::custom(move |info| metrics.http_metrics(info))
+}
+
+pub fn gather() -> Vec<u8> {
+    let mut buffer: Vec<u8> = vec![];
+    let encoder = prometheus::TextEncoder::new();
+    let metric_families = prometheus::gather();
+    encoder.encode(&metric_families, &mut buffer).unwrap();
+
+    buffer
+}
diff --git a/kittybox-rs/src/micropub/get.rs b/kittybox-rs/src/micropub/get.rs
new file mode 100644
index 0000000..718714a
--- /dev/null
+++ b/kittybox-rs/src/micropub/get.rs
@@ -0,0 +1,82 @@
+use crate::database::{MicropubChannel, Storage};
+use crate::indieauth::User;
+use crate::ApplicationState;
+use tide::prelude::{json, Deserialize};
+use tide::{Request, Response, Result};
+
+#[derive(Deserialize)]
+struct QueryOptions {
+    q: String,
+    url: Option<String>,
+}
+
+pub async fn get_handler<Backend>(req: Request<ApplicationState<Backend>>) -> Result
+where
+    Backend: Storage + Send + Sync,
+{
+    let user = req.ext::<User>().unwrap();
+    let backend = &req.state().storage;
+    let media_endpoint = &req.state().media_endpoint;
+    let query = req.query::<QueryOptions>().unwrap_or(QueryOptions {
+        q: "".to_string(),
+        url: None,
+    });
+    match &*query.q {
+        "config" => {
+            let channels: Vec<MicropubChannel>;
+            match backend.get_channels(user.me.as_str()).await {
+                Ok(chans) => channels = chans,
+                Err(err) => return Ok(err.into())
+            }
+            Ok(Response::builder(200).body(json!({
+                "q": ["source", "config", "channel"],
+                "channels": channels,
+                "media-endpoint": media_endpoint
+            })).build())
+        },
+        "channel" => {
+            let channels: Vec<MicropubChannel>;
+            match backend.get_channels(user.me.as_str()).await {
+                Ok(chans) => channels = chans,
+                Err(err) => return Ok(err.into())
+            }
+            Ok(Response::builder(200).body(json!(channels)).build())
+        }
+        "source" => {
+            if user.check_scope("create") || user.check_scope("update") || user.check_scope("delete") || user.check_scope("undelete") {
+                if let Some(url) = query.url {
+                    match backend.get_post(&url).await {
+                        Ok(post) => if let Some(post) = post {
+                            Ok(Response::builder(200).body(post).build())
+                        } else {
+                            Ok(Response::builder(404).build())
+                        },
+                        Err(err) => Ok(err.into())
+                    }
+                } else {
+                    Ok(Response::builder(400).body(json!({
+                        "error": "invalid_request",
+                        "error_description": "Please provide `url`."
+                    })).build())
+                }
+            } else {
+                Ok(Response::builder(401).body(json!({
+                    "error": "insufficient_scope",
+                    "error_description": "You don't have the required scopes to proceed.",
+                    "scope": "update"
+                })).build())
+            }
+        },
+        // TODO: ?q=food, ?q=geo, ?q=contacts
+        // Depends on indexing posts
+        // Errors
+        "" => Ok(Response::builder(400).body(json!({
+            "error": "invalid_request",
+            "error_description": "No ?q= parameter specified. Try ?q=config maybe?"
+        })).build()),
+        _ => Ok(Response::builder(400).body(json!({
+            "error": "invalid_request",
+            "error_description": "Unsupported ?q= query. Try ?q=config and see the q array for supported values."
+        })).build())
+    }
+}
diff --git a/kittybox-rs/src/micropub/mod.rs b/kittybox-rs/src/micropub/mod.rs
new file mode 100644
index 0000000..f426c77
--- /dev/null
+++ b/kittybox-rs/src/micropub/mod.rs
@@ -0,0 +1,964 @@
+use std::convert::Infallible;
+use std::fmt::Display;
+use either::Either;
+use log::{info, warn, error};
+use warp::http::StatusCode;
+use warp::{Filter, Rejection, reject::InvalidQuery};
+use serde_json::json;
+use serde::{Serialize, Deserialize};
+use crate::database::{MicropubChannel, Storage, StorageError};
+use crate::indieauth::User;
+use crate::micropub::util::form_to_mf2_json;
+
+#[derive(Serialize, Deserialize, Debug, PartialEq)]
+#[serde(rename_all = "kebab-case")]
+enum QueryType {
+    Source,
+    Config,
+    Channel,
+    SyndicateTo
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+struct MicropubQuery {
+    q: QueryType,
+    url: Option<String>
+}
+
+#[derive(Serialize, Deserialize, PartialEq, Debug)]
+#[serde(rename_all = "snake_case")]
+enum ErrorType {
+    AlreadyExists,
+    Forbidden,
+    InternalServerError,
+    InvalidRequest,
+    InvalidScope,
+    NotAuthorized,
+    NotFound,
+    UnsupportedMediaType
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub(crate) struct MicropubError {
+    error: ErrorType,
+    error_description: String
+}
+
+impl From<StorageError> for MicropubError {
+    fn from(err: StorageError) -> Self {
+        Self {
+            error: match err.kind() {
+                crate::database::ErrorKind::NotFound => ErrorType::NotFound,
+                _ => ErrorType::InternalServerError
+            },
+            error_description: format!("Backend error: {}", err)
+        }
+    }
+}
+
+impl std::error::Error for MicropubError {}
+
+impl Display for MicropubError {
+    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+        f.write_str("Micropub error: ")?;
+        f.write_str(&self.error_description)
+    }
+}
+
+impl From<&MicropubError> for StatusCode {
+    fn from(err: &MicropubError) -> Self {
+        use ErrorType::*;
+        match err.error {
+            AlreadyExists => StatusCode::CONFLICT,
+            Forbidden => StatusCode::FORBIDDEN,
+            InternalServerError => StatusCode::INTERNAL_SERVER_ERROR,
+            InvalidRequest => StatusCode::BAD_REQUEST,
+            InvalidScope => StatusCode::UNAUTHORIZED,
+            NotAuthorized => StatusCode::UNAUTHORIZED,
+            NotFound => StatusCode::NOT_FOUND,
+            UnsupportedMediaType => StatusCode::UNSUPPORTED_MEDIA_TYPE,
+        }
+    }
+}
+impl From<MicropubError> for StatusCode {
+    fn from(err: MicropubError) -> Self {
+        (&err).into()
+    }
+}
+
+impl From<serde_json::Error> for MicropubError {
+    fn from(err: serde_json::Error) -> Self {
+        use ErrorType::*;
+        Self {
+            error: InvalidRequest,
+            error_description: err.to_string()
+        }
+    }
+}
+
+impl MicropubError {
+    fn new(error: ErrorType, error_description: &str) -> Self {
+        Self {
+            error,
+            error_description: error_description.to_owned()
+        }
+    }
+}
+
+impl warp::reject::Reject for MicropubError {}
+
+mod post;
+pub(crate) use post::normalize_mf2;
+
+mod util {
+    use serde_json::json;
+
+    pub(crate) fn form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value {
+        let mut mf2 = json!({"type": [], "properties": {}});
+        for (k, v) in form {
+            if k == "h" {
+                mf2["type"]
+                    .as_array_mut()
+                    .unwrap()
+                    .push(json!("h-".to_string() + &v));
+            } else if k != "access_token" {
+                let key = k.strip_suffix("[]").unwrap_or(&k);
+                match mf2["properties"][key].as_array_mut() {
+                    Some(prop) => prop.push(json!(v)),
+                    None => mf2["properties"][key] = json!([v]),
+                }
+            }
+        }
+        if mf2["type"].as_array().unwrap().is_empty() {
+            mf2["type"].as_array_mut().unwrap().push(json!("h-entry"));
+        }
+        mf2
+    }
+
+    #[cfg(test)]
+    mod tests {
+        use serde_json::json;
+        #[test]
+        fn test_form_to_mf2() {
+            assert_eq!(
+                super::form_to_mf2_json(
+                    serde_urlencoded::from_str(
+                        "h=entry&content=something%20interesting"
+                    ).unwrap()
+                ),
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "content": ["something interesting"]
+                    }
+                })
+            )
+        }
+    }
+}
+
+#[derive(Debug)]
+struct FetchedPostContext {
+    url: url::Url,
+    mf2: serde_json::Value,
+    webmention: Option<url::Url>
+}
+
+fn populate_reply_context(mf2: &serde_json::Value, prop: &str, ctxs: &[FetchedPostContext]) -> Option<serde_json::Value> {
+    if mf2["properties"][prop].is_array() {
+        Some(json!(
+            mf2["properties"][prop]
+                .as_array()
+            // Safe to unwrap because we checked its existence and type
+            // And it's not like we can make it disappear without unsafe code
+                .unwrap()
+                .iter()
+            // This seems to be O(n^2) and I don't like it.
+            // Nevertheless, I lack required knowledge to optimize it. Also, it works, so...
+                .map(|i| ctxs.iter()
+                     .find(|ctx| Some(ctx.url.as_str()) == i.as_str())
+                     .and_then(|ctx| ctx.mf2["items"].get(0))
+                     .or(Some(i))
+                     .unwrap())
+                .collect::<Vec<&serde_json::Value>>()
+        ))
+    } else {
+        None
+    }
+}
+
+// TODO actually save the post to the database and schedule post-processing
+pub(crate) async fn _post<D: 'static + Storage>(
+    user: crate::indieauth::User,
+    uid: String,
+    mf2: serde_json::Value,
+    db: D,
+    http: reqwest::Client
+) -> Result<impl warp::Reply, MicropubError> {
+    // Here, we have the following guarantees:
+    // - The user is the same user for this host (guaranteed by ensure_same_user)
+    // - The MF2-JSON document is normalized (guaranteed by normalize_mf2)\
+    //   - The MF2-JSON document contains a UID
+    //   - The MF2-JSON document's URL list contains its UID
+    //   - The MF2-JSON document's "content" field contains an HTML blob, if present
+    //   - The MF2-JSON document's publishing datetime is present
+    //   - The MF2-JSON document's target channels are set
+    //   - The MF2-JSON document's author is set
+
+    // Security check! Do we have an oAuth2 scope to proceed?
+    if !user.check_scope("create") {
+        return Err(MicropubError {
+            error: ErrorType::InvalidScope,
+            error_description: "Not enough privileges - try acquiring the \"create\" scope.".to_owned()
+        });
+    }
+
+    // Security check #2! Are we posting to our own website?
+    if !uid.starts_with(user.me.as_str()) || mf2["properties"]["channel"]
+        .as_array()
+        .unwrap_or(&vec![])
+        .iter()
+        .any(|url| !url.as_str().unwrap().starts_with(user.me.as_str()))
+    {
+        return Err(MicropubError {
+            error: ErrorType::Forbidden,
+            error_description: "You're posting to a website that's not yours.".to_owned()
+        });
+    }
+
+    // Security check #3! Are we overwriting an existing document?
+    if db.post_exists(&uid).await? {
+        return Err(MicropubError {
+            error: ErrorType::AlreadyExists,
+            error_description: "UID clash was detected, operation aborted.".to_owned()
+        });
+    }
+
+    // Save the post
+    db.put_post(&mf2, user.me.as_str()).await?;
+
+    let mut channels = mf2["properties"]["channel"]
+        .as_array()
+        .unwrap()
+        .iter()
+        .map(|i| i.as_str().unwrap_or(""))
+        .filter(|i| !i.is_empty());
+
+    let default_channel = user.me.join(post::DEFAULT_CHANNEL_PATH).unwrap().to_string();
+    let vcards_channel = user.me.join(post::CONTACTS_CHANNEL_PATH).unwrap().to_string();
+    let food_channel = user.me.join(post::FOOD_CHANNEL_PATH).unwrap().to_string();
+    let default_channels = vec![default_channel, vcards_channel, food_channel];
+
+    for chan in &mut channels {
+        if db.post_exists(chan).await? {
+            db.update_post(chan, json!({"add": {"children": [uid]}})).await?;
+        } else if default_channels.iter().any(|i| chan == i) {
+            post::create_feed(&db, &uid, chan, &user).await?;
+        } else {
+            warn!("Ignoring non-existent channel: {}", chan);
+        }
+    }
+
+    let reply = warp::reply::with_status(
+        warp::reply::with_header(
+            warp::reply::json(&json!({"location": &uid})),
+            "Location", &uid
+        ),
+        StatusCode::ACCEPTED
+    );
+    
+    // TODO: Post-processing the post (aka second write pass)
+    // - [x] Download rich reply contexts
+    // - [ ] Syndicate the post if requested, add links to the syndicated copies
+    // - [ ] Send WebSub notifications to the hub (if we happen to have one)
+    // - [x] Send webmentions
+    tokio::task::spawn(async move {
+        use futures_util::StreamExt;
+
+        let uid: &str = mf2["properties"]["uid"][0].as_str().unwrap();
+
+        let context_props = ["in-reply-to", "like-of", "repost-of", "bookmark-of"];
+        let mut context_urls: Vec<url::Url> = vec![];
+        for prop in &context_props {
+            if let Some(array) = mf2["properties"][prop].as_array() {
+                context_urls.extend(
+                    array
+                        .iter()
+                        .filter_map(|v| v.as_str())
+                        .filter_map(|v| v.parse::<url::Url>().ok()),
+                );
+            }
+        }
+        // TODO parse HTML in e-content and add links found here
+        context_urls.sort_unstable_by_key(|u| u.to_string());
+        context_urls.dedup();
+
+        // TODO: Make a stream to fetch all these posts and convert them to MF2
+        let post_contexts = {
+            let http = &http;
+            tokio_stream::iter(context_urls.into_iter())
+                .then(move |url: url::Url| http.get(url).send())
+                .filter_map(|response| futures::future::ready(response.ok()))
+                .filter(|response| futures::future::ready(response.status() == 200))
+                .filter_map(|response: reqwest::Response| async move {
+                    // 1. We need to preserve the URL
+                    // 2. We need to get the HTML for MF2 processing
+                    // 3. We need to get the webmention endpoint address
+                    // All of that can be done in one go.
+                    let url = response.url().clone();
+                    // TODO parse link headers
+                    let links = response
+                        .headers()
+                        .get_all(hyper::http::header::LINK)
+                        .iter()
+                        .cloned()
+                        .collect::<Vec<hyper::http::HeaderValue>>();
+                    let html = response.text().await;
+                    if html.is_err() {
+                        return None;
+                    }
+                    let html = html.unwrap();
+                    let mf2 = microformats::from_html(&html, url.clone()).unwrap();
+                    // TODO use first Link: header if available
+                    let webmention: Option<url::Url> = mf2.rels.by_rels().get("webmention")
+                        .and_then(|i| i.first().cloned());
+
+                    dbg!(Some(FetchedPostContext {
+                        url, mf2: serde_json::to_value(mf2).unwrap(), webmention
+                    }))
+                })
+                .collect::<Vec<FetchedPostContext>>()
+                .await
+        };
+
+        let mut update = json!({ "replace": {} });
+        for prop in &context_props {
+            if let Some(json) = populate_reply_context(&mf2, prop, &post_contexts) {
+                update["replace"][prop] = json;
+            }
+        }
+        if !update["replace"].as_object().unwrap().is_empty() {
+            if let Err(err) = db.update_post(uid, update).await {
+                error!("Failed to update post with rich reply contexts: {}", err);
+            }
+        }
+
+        // At this point we can start syndicating the post.
+        // Currently we don't really support any syndication endpoints, but still!
+        /*if let Some(syndicate_to) = mf2["properties"]["mp-syndicate-to"].as_array() {
+            let http = &http;
+            tokio_stream::iter(syndicate_to)
+                .filter_map(|i| futures::future::ready(i.as_str()))
+                .for_each_concurrent(3, |s: &str| async move {
+                    #[allow(clippy::match_single_binding)]
+                    match s {
+                        _ => {
+                            todo!("Syndicate to generic webmention-aware service {}", s);
+                        }
+                        // TODO special handling for non-webmention-aware services like the birdsite
+                    }
+                })
+                .await;
+        }*/
+
+        {
+            let http = &http;
+            tokio_stream::iter(
+                post_contexts.into_iter()
+                    .filter(|ctx| ctx.webmention.is_some()))
+                .for_each_concurrent(2, |ctx| async move {
+                    let mut map = std::collections::HashMap::new();
+                    map.insert("source", uid);
+                    map.insert("target", ctx.url.as_str());
+
+                    match http.post(ctx.webmention.unwrap().clone())
+                        .form(&map)
+                        .send()
+                        .await
+                    {
+                        Ok(res) => {
+                            if !res.status().is_success() {
+                                warn!(
+                                    "Failed to send a webmention for {}: got HTTP {}",
+                                    ctx.url, res.status()
+                                );
+                            } else {
+                                info!("Sent a webmention to {}, got HTTP {}", ctx.url, res.status())
+                            }
+                        },
+                        Err(err) => warn!("Failed to send a webmention for {}: {}", ctx.url, err)
+                    }
+                })
+                .await;
+        }
+    });
+    
+    Ok(reply)
+}
+
+#[derive(Serialize, Deserialize)]
+#[serde(rename_all = "snake_case")]
+enum ActionType {
+    Delete,
+    Update
+}
+
+#[derive(Serialize, Deserialize)]
+struct MicropubFormAction {
+    action: ActionType,
+    url: String
+}
+
+#[derive(Serialize, Deserialize)]
+struct MicropubAction {
+    action: ActionType,
+    url: String,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    replace: Option<serde_json::Value>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    add: Option<serde_json::Value>,
+    #[serde(skip_serializing_if = "Option::is_none")]
+    delete: Option<serde_json::Value>
+}
+
+impl From<MicropubFormAction> for MicropubAction {
+    fn from(a: MicropubFormAction) -> Self {
+        Self {
+            action: a.action,
+            url: a.url,
+            replace: None, add: None, delete: None
+        }
+    }
+}
+
+// TODO perform the requested actions synchronously
+async fn post_action<D: Storage>(
+    action: MicropubAction,
+    db: D,
+    user: User    
+) -> Result<impl warp::Reply, MicropubError> {
+
+    let uri = if let Ok(uri) = action.url.parse::<hyper::Uri>() {
+        uri
+    } else {
+        return Err(MicropubError {
+            error: ErrorType::InvalidRequest,
+            error_description: "Your URL doesn't parse properly.".to_owned()
+        });
+    };
+
+    if uri.authority().unwrap() != user.me.as_str().parse::<hyper::Uri>().unwrap().authority().unwrap() {
+        return Err(MicropubError {
+            error: ErrorType::Forbidden,
+            error_description: "Don't tamper with others' posts!".to_owned()
+        });
+    }
+
+    match action.action {
+        ActionType::Delete => {
+            if !user.check_scope("delete") {
+                return Err(MicropubError {
+                    error: ErrorType::InvalidScope,
+                    error_description: "You need a \"delete\" scope for this.".to_owned()
+                });
+            }
+
+            db.delete_post(&action.url).await?
+        },
+        ActionType::Update => {
+            if !user.check_scope("update") {
+                return Err(MicropubError {
+                    error: ErrorType::InvalidScope,
+                    error_description: "You need an \"update\" scope for this.".to_owned()
+                });
+            }
+
+            db.update_post(
+                &action.url,
+                // Here, unwrapping is safe, because this value
+                // was recently deserialized from JSON already.
+                serde_json::to_value(&action).unwrap()
+            ).await?
+        },
+    }
+
+    Ok(warp::reply::reply())
+}
+
+async fn check_auth(host: warp::host::Authority, user: User) -> Result<User, warp::Rejection> {
+    let user_authority = warp::http::Uri::try_from(user.me.as_str())
+        .unwrap()
+        .authority()
+        .unwrap()
+        .clone();
+    // TODO compare with potential list of allowed websites
+    // to allow one user to edit several websites with one token
+    if host != user_authority {
+        Err(warp::reject::custom(MicropubError::new(
+            ErrorType::NotAuthorized,
+            "This user is not authorized to use Micropub on this website."
+        )))
+    } else {
+        Ok(user)
+    }
+}
+
+#[cfg(any(not(debug_assertions), test))]
+fn ensure_same_user_as_host(
+    token_endpoint: String,
+    http: reqwest::Client
+) -> impl Filter<Extract = (User,), Error = warp::Rejection> + Clone {
+    crate::util::require_host()
+        .and(crate::indieauth::require_token(token_endpoint, http))
+        .and_then(check_auth)
+}
+
+async fn dispatch_post_body(
+    mut body: impl bytes::Buf,
+    mimetype: http_types::Mime
+) -> Result<Either<MicropubAction, serde_json::Value>, warp::Rejection> {
+    // Since hyper::common::buf::BufList doesn't implement Clone, we can't use Clone in here
+    // We have to copy the body. Ugh!!!
+    // so much for zero-copy buffers
+    let body = {
+        let mut _body: Vec<u8> = Vec::default();
+        while body.has_remaining() {
+            _body.extend(body.chunk());
+            body.advance(body.chunk().len());
+        }
+        _body
+    };
+    match mimetype.essence() {
+        "application/json" => {
+            if let Ok(body) = serde_json::from_slice::<MicropubAction>(&body) {
+                Ok(Either::Left(body))
+            } else if let Ok(body) = serde_json::from_slice::<serde_json::Value>(&body) {
+                // quick sanity check
+                if !body.is_object() || !body["type"].is_array() {
+                    return Err(MicropubError {
+                        error: ErrorType::InvalidRequest,
+                        error_description: "Invalid MF2-JSON detected: `.` should be an object, `.type` should be an array of MF2 types".to_owned()
+                    }.into())
+                }
+                Ok(Either::Right(body))
+            } else {
+                Err(MicropubError {
+                    error: ErrorType::InvalidRequest,
+                    error_description: "Invalid JSON object passed.".to_owned()
+                }.into())
+            }
+        },
+        "application/x-www-form-urlencoded" => {
+            if let Ok(body) = serde_urlencoded::from_bytes::<MicropubFormAction>(&body) {
+                Ok(Either::Left(body.into()))
+            } else if let Ok(body) = serde_urlencoded::from_bytes::<Vec<(String, String)>>(&body) {
+                Ok(Either::Right(form_to_mf2_json(body)))
+            } else {
+                Err(MicropubError {
+                    error: ErrorType::InvalidRequest,
+                    error_description: "Invalid form-encoded data. Try h=entry&content=Hello!".to_owned()
+                }.into())
+            }
+        },
+        other => Err(MicropubError {
+            error: ErrorType::UnsupportedMediaType,
+            error_description: format!("Unsupported media type: {}. Try application/json?", other)
+        }.into())
+    }
+}
+
+#[cfg_attr(all(debug_assertions, not(test)), allow(unused_variables))]
+pub fn post<D: 'static + Storage>(
+    db: D,
+    token_endpoint: String,
+    http: reqwest::Client
+) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
+    let inject_db = warp::any().map(move || db.clone());
+    #[cfg(all(debug_assertions, not(test)))]
+    let ensure_same_user = warp::any().map(|| crate::indieauth::User::new(
+        "http://localhost:8080/",
+        "https://quill.p3k.io/",
+        "create update delete media"
+    ));
+    #[cfg(any(not(debug_assertions), test))]
+    let ensure_same_user = ensure_same_user_as_host(token_endpoint, http.clone());
+
+    warp::post()
+        .and(warp::body::content_length_limit(1024 * 512)
+             .and(warp::body::aggregate())
+             .and(warp::header::<http_types::Mime>("Content-Type"))
+             .and_then(dispatch_post_body))
+        .and(inject_db)
+        .and(warp::any().map(move || http.clone()))
+        .and(ensure_same_user)
+        .and_then(|body: Either<MicropubAction, serde_json::Value>, db: D, http: reqwest::Client, user: crate::indieauth::User| async move {
+            (match body {
+                Either::Left(action) => {
+                    post_action(action, db, user).await.map(|p| Box::new(p) as Box<dyn warp::Reply>)
+                },
+                Either::Right(post) => {
+                    let (uid, mf2) = post::normalize_mf2(post, &user);
+                    _post(user, uid, mf2, db, http).await.map(|p| Box::new(p) as Box<dyn warp::Reply>)
+                }
+            }).map_err(warp::reject::custom)
+        })
+}
+
+pub fn options() -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
+    warp::options()
+        // TODO make it reply with a basic description of Micropub spec
+        .map(|| warp::reply::json::<Option<()>>(&None))
+        .with(warp::reply::with::header("Allow", "GET, POST"))
+}
+
+async fn _query<D: Storage>(
+    db: D,
+    query: MicropubQuery,
+    user: crate::indieauth::User
+) -> Box<dyn warp::Reply> {
+    let user_authority = warp::http::Uri::try_from(user.me.as_str())
+        .unwrap()
+        .authority()
+        .unwrap()
+        .clone();
+
+    match query.q {
+        QueryType::Config => {
+            let channels: Vec<MicropubChannel> = match db.get_channels(user_authority.as_str()).await {
+                Ok(chans) => chans,
+                Err(err) => return Box::new(warp::reply::with_status(
+                    warp::reply::json(&MicropubError::new(
+                        ErrorType::InternalServerError,
+                        &format!("Error fetching channels: {}", err)
+                    )),
+                    StatusCode::INTERNAL_SERVER_ERROR
+                ))
+            };
+
+            Box::new(warp::reply::json(json!({
+                "q": [
+                    QueryType::Source,
+                    QueryType::Config,
+                    QueryType::Channel,
+                    QueryType::SyndicateTo
+                ],
+                "channels": channels,
+                "_kittybox_authority": user_authority.as_str(),
+                "syndicate-to": []
+            }).as_object().unwrap()))
+        },
+        QueryType::Source => {
+            match query.url {
+                Some(url) => {
+                    if warp::http::Uri::try_from(&url).unwrap().authority().unwrap() != &user_authority {
+                        return Box::new(warp::reply::with_status(
+                            warp::reply::json(&MicropubError::new(
+                                ErrorType::NotAuthorized,
+                                "You are requesting a post from a website that doesn't belong to you."
+                            )),
+                            StatusCode::UNAUTHORIZED
+                        ))
+                    }
+                    match db.get_post(&url).await {
+                        Ok(some) => match some {
+                            Some(post) => Box::new(warp::reply::json(&post)),
+                            None => Box::new(warp::reply::with_status(
+                                warp::reply::json(&MicropubError::new(
+                                    ErrorType::NotFound,
+                                    "The specified MF2 object was not found in database."
+                                )),
+                                StatusCode::NOT_FOUND
+                            ))
+                        },
+                        Err(err) => {
+                            Box::new(warp::reply::json(&MicropubError::new(
+                                ErrorType::InternalServerError,
+                                &format!("Backend error: {}", err)
+                            )))
+                        }
+                    }
+                },
+                None => {
+                    // Here, one should probably attempt to query at least the main feed and collect posts
+                    // Using a pre-made query function can't be done because it does unneeded filtering
+                    // Don't implement for now, this is optional
+                    Box::new(warp::reply::with_status(
+                        warp::reply::json(&MicropubError::new(
+                            ErrorType::InvalidRequest,
+                            "Querying for post list is not implemented yet."
+                        )),
+                        StatusCode::BAD_REQUEST
+                    ))
+                }
+            }
+        },
+        QueryType::Channel => {
+            let channels: Vec<MicropubChannel> = match db.get_channels(user_authority.as_str()).await {
+                Ok(chans) => chans,
+                Err(err) => return Box::new(warp::reply::with_status(
+                    warp::reply::json(&MicropubError::new(
+                        ErrorType::InternalServerError,
+                        &format!("Error fetching channels: {}", err)
+                    )),
+                    StatusCode::INTERNAL_SERVER_ERROR
+                ))
+            };
+
+            Box::new(warp::reply::json(&json!({ "channels": channels })))
+        },
+        QueryType::SyndicateTo => {
+            Box::new(warp::reply::json(&json!({ "syndicate-to": [] })))
+        }
+    }
+}
+
+pub fn query<D: Storage>(
+    db: D,
+    token_endpoint: String,
+    http: reqwest::Client
+) -> impl Filter<Extract = (impl warp::Reply,), Error = warp::Rejection> + Clone {
+    warp::get()
+        .map(move || db.clone())
+        .and(warp::query::<MicropubQuery>())
+        .and(crate::util::require_host()
+             .and(crate::indieauth::require_token(token_endpoint, http))
+             .and_then(check_auth))
+        .then(_query)
+        .recover(|e: warp::Rejection| async move {
+            if let Some(err) = e.find::<MicropubError>() {
+                Ok(warp::reply::json(err))
+            } else {
+                Err(e)
+            }
+        })
+}
+
+pub async fn recover(err: Rejection) -> Result<impl warp::Reply, Infallible> {
+    if let Some(error) = err.find::<MicropubError>() {
+        return Ok(warp::reply::with_status(warp::reply::json(&error), error.into()))
+    }
+    let error = if err.find::<InvalidQuery>().is_some() {
+        MicropubError::new(
+            ErrorType::InvalidRequest,
+            "Invalid query parameters sent. Try ?q=config to see what you can do."
+        )
+    } else {
+        log::error!("Unhandled rejection: {:?}", err);
+        MicropubError::new(
+            ErrorType::InternalServerError,
+            &format!("Unknown error: {:?}", err)
+        )
+    };
+
+    Ok(warp::reply::with_status(warp::reply::json(&error), error.into()))
+}
+
+pub fn micropub<D: 'static + Storage>(
+    db: D,
+    token_endpoint: String,
+    http: reqwest::Client
+) -> impl Filter<Extract = (impl warp::Reply,), Error = Infallible> + Clone {
+    query(db.clone(), token_endpoint.clone(), http.clone())
+        .or(post(db, token_endpoint, http))
+        .or(options())
+        .recover(recover)
+}
+#[cfg(test)]
+#[allow(dead_code)]
+impl MicropubQuery {
+    fn config() -> Self {
+        Self {
+            q: QueryType::Config,
+            url: None
+        }
+    }
+
+    fn source(url: &str) -> Self {
+        Self {
+            q: QueryType::Source,
+            url: Some(url.to_owned())
+        }
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use hyper::body::HttpBody;
+    use crate::{database::Storage, micropub::MicropubError};
+    use warp::{Filter, Reply};
+    use serde_json::json;
+
+    use super::FetchedPostContext;
+
+    #[test]
+    fn test_populate_reply_context() {
+        let already_expanded_reply_ctx = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": ["Hello world!"]
+            }
+        });
+        let mf2 = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "like-of": [
+                    "https://fireburn.ru/posts/example",
+                    already_expanded_reply_ctx,
+                    "https://fireburn.ru/posts/non-existent"
+                ]
+            }
+        });
+        let test_ctx = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": ["This is a post which was reacted to."]
+            }
+        });
+        let reply_contexts = vec![
+            FetchedPostContext {
+                url: "https://fireburn.ru/posts/example".parse().unwrap(),
+                mf2: json!({
+                    "items": [
+                        test_ctx
+                    ]
+                }),
+                webmention: None
+            }
+        ];
+
+        let like_of = super::populate_reply_context(&mf2, "like-of", &reply_contexts).unwrap();
+
+        assert_eq!(like_of[0], test_ctx);
+        assert_eq!(like_of[1], already_expanded_reply_ctx);
+        assert_eq!(like_of[2], "https://fireburn.ru/posts/non-existent");
+    }
+
+    #[tokio::test]
+    async fn check_post_reject_scope() {
+        let inject_db = {
+            let db = crate::database::MemoryStorage::new();
+
+            move || db.clone()
+        };
+        let db = inject_db();
+
+        let res = warp::test::request()
+            .filter(&warp::any()
+                    .map(inject_db)
+                    .and_then(move |db| async move {
+                        let post = json!({
+                            "type": ["h-entry"],
+                            "properties": {
+                                "content": ["Hello world!"]
+                            }
+                        });
+                        let user = crate::indieauth::User::new(
+                            "https://localhost:8080/",
+                            "https://kittybox.fireburn.ru/",
+                            "profile"
+                        );
+                        let (uid, mf2) = super::post::normalize_mf2(post, &user);
+
+                        super::_post(
+                            user, uid, mf2, db, reqwest::Client::new()
+                        ).await.map_err(warp::reject::custom)
+                    })
+            )
+            .await
+            .map(|_| panic!("Tried to do something with a reply!"))
+            .unwrap_err();
+
+        if let Some(err) = res.find::<MicropubError>() {
+            assert_eq!(err.error, super::ErrorType::InvalidScope);
+        } else {
+            panic!("Did not return MicropubError");
+        }
+
+        let hashmap = db.mapping.read().await;
+        assert!(hashmap.is_empty());
+    }
+    
+    #[tokio::test]
+    async fn check_post_mf2() {
+        let inject_db = {
+            let db = crate::database::MemoryStorage::new();
+
+            move || db.clone()
+        };
+        let db = inject_db();
+        
+        let res = warp::test::request()
+            .filter(&warp::any()
+                    .map(inject_db)
+                    .and_then(move |db| async move {
+                        let post = json!({
+                            "type": ["h-entry"],
+                            "properties": {
+                                "content": ["Hello world!"]
+                            }
+                        });
+                        let user = crate::indieauth::User::new(
+                            "https://localhost:8080/",
+                            "https://kittybox.fireburn.ru/",
+                            "create"
+                        );
+                        let (uid, mf2) = super::post::normalize_mf2(post, &user);
+
+                        super::_post(
+                            user, uid, mf2, db, reqwest::Client::new()
+                        ).await.map_err(warp::reject::custom)
+                    })
+            )
+            .await
+            .unwrap()
+            .into_response();
+
+        assert!(res.headers().contains_key("Location"));
+        let location = res.headers().get("Location").unwrap();
+        assert!(db.post_exists(location.to_str().unwrap()).await.unwrap());
+        assert!(db.post_exists("https://localhost:8080/feeds/main").await.unwrap());
+    }
+
+    #[tokio::test]
+    async fn test_check_auth() {
+        let err = warp::test::request()
+            .filter(&warp::any()
+                    .map(|| (
+                        warp::host::Authority::from_static("aaronparecki.com"),
+                        crate::indieauth::User::new(
+                            "https://fireburn.ru/",
+                            "https://quill.p3k.io/",
+                            "create update media"
+                        )))
+                    .untuple_one()
+                    .and_then(super::check_auth))
+            .await
+            .unwrap_err();
+
+        let json: &MicropubError = err.find::<MicropubError>().unwrap();
+        assert_eq!(json.error, super::ErrorType::NotAuthorized);
+    }
+
+    #[tokio::test]
+    async fn test_query_foreign_url() {
+        let mut res = warp::test::request()
+            .filter(&warp::any().then(|| super::_query(
+                crate::database::MemoryStorage::new(),
+                super::MicropubQuery::source("https://aaronparecki.com/feeds/main"),
+                crate::indieauth::User::new(
+                    "https://fireburn.ru/",
+                    "https://quill.p3k.io/",
+                    "create update media"
+                )
+            )))
+            .await
+            .unwrap()
+            .into_response();
+
+        assert_eq!(res.status(), 401);
+        let body = res.body_mut().data().await.unwrap().unwrap();
+        let json: MicropubError = serde_json::from_slice(&body as &[u8]).unwrap();
+        assert_eq!(json.error, super::ErrorType::NotAuthorized);
+    }
+}
+
diff --git a/kittybox-rs/src/micropub/post.rs b/kittybox-rs/src/micropub/post.rs
new file mode 100644
index 0000000..cf9f3d9
--- /dev/null
+++ b/kittybox-rs/src/micropub/post.rs
@@ -0,0 +1,944 @@
+use crate::database::Storage;
+use crate::indieauth::User;
+use chrono::prelude::*;
+use core::iter::Iterator;
+use newbase60::num_to_sxg;
+use std::convert::TryInto;
+use serde_json::json;
+
+pub(crate) static DEFAULT_CHANNEL_PATH: &str = "/feeds/main";
+static DEFAULT_CHANNEL_NAME: &str = "Main feed";
+pub(crate) static CONTACTS_CHANNEL_PATH: &str = "/feeds/vcards";
+static CONTACTS_CHANNEL_NAME: &str = "My address book";
+pub(crate) static FOOD_CHANNEL_PATH: &str = "/feeds/food";
+static FOOD_CHANNEL_NAME: &str = "My recipe book";
+
+fn get_folder_from_type(post_type: &str) -> String {
+    (match post_type {
+        "h-feed" => "feeds/",
+        "h-card" => "vcards/",
+        "h-event" => "events/",
+        "h-food" => "food/",
+        _ => "posts/",
+    })
+    .to_string()
+}
+
+/// Reset the datetime to a proper datetime.
+/// Do not attempt to recover the information.
+/// Do not pass GO. Do not collect $200.                
+fn reset_dt(post: &mut serde_json::Value) -> DateTime<FixedOffset> {
+    let curtime: DateTime<Local> = Local::now();
+    post["properties"]["published"] = json!([curtime.to_rfc3339()]);
+    chrono::DateTime::from(curtime)
+}
+
+pub fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde_json::Value) {
+    // Normalize the MF2 object here.
+    let me = &user.me;
+    let folder = get_folder_from_type(body["type"][0].as_str().unwrap());
+    let published: DateTime<FixedOffset> = if let Some(dt) = body["properties"]["published"][0].as_str() {
+        // Check if the datetime is parsable.
+        match DateTime::parse_from_rfc3339(dt) {
+            Ok(dt) => dt,
+            Err(_) => reset_dt(&mut body)
+        }
+    } else {
+        // Set the datetime.
+        // Note: this code block duplicates functionality with the above failsafe.
+        // Consider refactoring it to a helper function?
+        reset_dt(&mut body)
+    };
+    match body["properties"]["uid"][0].as_str() {
+        None => {
+            let uid = serde_json::Value::String(
+                me.join(
+                    &(folder.clone()
+                        + &num_to_sxg(published.timestamp_millis().try_into().unwrap())),
+                )
+                .unwrap()
+                .to_string(),
+            );
+            body["properties"]["uid"] = serde_json::Value::Array(vec![uid.clone()]);
+            match body["properties"]["url"].as_array_mut() {
+                Some(array) => array.push(uid),
+                None => body["properties"]["url"] = body["properties"]["uid"].clone(),
+            }
+        }
+        Some(uid_str) => {
+            let uid = uid_str.to_string();
+            match body["properties"]["url"].as_array_mut() {
+                Some(array) => {
+                    if !array.iter().any(|i| i.as_str().unwrap_or("") == uid) {
+                        array.push(serde_json::Value::String(uid))
+                    }
+                }
+                None => body["properties"]["url"] = body["properties"]["uid"].clone(),
+            }
+        }
+    }
+    if let Some(slugs) = body["properties"]["mp-slug"].as_array() {
+        let new_urls = slugs
+            .iter()
+            .map(|i| i.as_str().unwrap_or(""))
+            .filter(|i| i != &"")
+            .map(|i| me.join(&((&folder).clone() + i)).unwrap().to_string())
+            .collect::<Vec<String>>();
+        let urls = body["properties"]["url"].as_array_mut().unwrap();
+        new_urls.iter().for_each(|i| urls.push(json!(i)));
+    }
+    let props = body["properties"].as_object_mut().unwrap();
+    props.remove("mp-slug");
+
+    if body["properties"]["content"][0].is_string() {
+        // Convert the content to HTML using the `markdown` crate
+        body["properties"]["content"] = json!([{
+            "html": markdown::to_html(body["properties"]["content"][0].as_str().unwrap()),
+            "value": body["properties"]["content"][0]
+        }])
+    }
+    // TODO: apply this normalization to editing too
+    if body["properties"]["mp-channel"].is_array() {
+        let mut additional_channels = body["properties"]["mp-channel"].as_array().unwrap().clone();
+        if let Some(array) = body["properties"]["channel"].as_array_mut() {
+            array.append(&mut additional_channels);
+        } else {
+            body["properties"]["channel"] = json!(additional_channels)
+        }
+        body["properties"].as_object_mut().unwrap().remove("mp-channel");
+    } else if body["properties"]["mp-channel"].is_string() {
+        let chan = body["properties"]["mp-channel"].as_str().unwrap().to_owned();
+        if let Some(array) = body["properties"]["channel"].as_array_mut() {
+            array.push(json!(chan))
+        } else {
+            body["properties"]["channel"] = json!([chan]);
+        }
+        body["properties"].as_object_mut().unwrap().remove("mp-channel");
+    }
+    if body["properties"]["channel"][0].as_str().is_none() {
+        match body["type"][0].as_str() {
+            Some("h-entry") => {
+                // Set the channel to the main channel...
+                let default_channel = me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string();
+
+                body["properties"]["channel"] = json!([default_channel]);
+            }
+            Some("h-card") => {
+                let default_channel = me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string();
+
+                body["properties"]["channel"] = json!([default_channel]);
+            }
+            Some("h-food") => {
+                let default_channel = me.join(FOOD_CHANNEL_PATH).unwrap().to_string();
+
+                body["properties"]["channel"] = json!([default_channel]);
+            }
+            // TODO h-event
+            /*"h-event" => {
+                let default_channel
+            },*/
+            _ => {
+                body["properties"]["channel"] = json!([]);
+            }
+        }
+    }
+    body["properties"]["posted-with"] = json!([user.client_id]);
+    if body["properties"]["author"][0].as_str().is_none() {
+        body["properties"]["author"] = json!([me.as_str()])
+    }
+    // TODO: maybe highlight #hashtags?
+    // Find other processing to do and insert it here
+    return (
+        body["properties"]["uid"][0].as_str().unwrap().to_string(),
+        body,
+    );
+}
+
+/*pub async fn new_post<S: Storage>(
+    req: Request<ApplicationState<S>>,
+    body: serde_json::Value,
+) -> Result {
+    // First, check for rights.
+    let user = req.ext::<User>().unwrap();
+    let storage = &req.state().storage;
+    if !user.check_scope("create") {
+        return error_json!(
+            401,
+            "invalid_scope",
+            "Not enough privileges to post. Try a token with a \"create\" scope instead."
+        );
+    }
+    let (uid, post) = normalize_mf2(body, user);
+
+    // Security check!
+    // This software might also be used in a multi-user setting
+    // where several users or identities share one Micropub server
+    // (maybe a family website or a shitpost sideblog?)
+    if !post["properties"]["uid"][0]
+        .as_str()
+        .unwrap()
+        .starts_with(user.me.as_str())
+        || post["properties"]["channel"]
+            .as_array()
+            .unwrap()
+            .iter()
+            .any(|url| !url.as_str().unwrap().starts_with(user.me.as_str()))
+    {
+        return error_json!(
+            403,
+            "forbidden",
+            "You're trying to post to someone else's website..."
+        );
+    }
+
+    match storage.post_exists(&uid).await {
+        Ok(exists) => {
+            if exists {
+                return error_json!(
+                    409,
+                    "already_exists",
+                    format!(
+                        "A post with the exact same UID already exists in the database: {}",
+                        uid
+                    )
+                );
+            }
+        }
+        Err(err) => return Ok(err.into()),
+    }
+
+    if let Err(err) = storage.put_post(&post, user.me.as_str()).await {
+        return error_json!(500, "database_error", format!("{}", err));
+    }
+
+    // It makes sense to use a loop here, because you wouldn't post to a hundred channels at once
+    // Mostly one or two, and even those ones will be the ones picked for you by software
+    for channel in post["properties"]["channel"]
+        .as_array()
+        .unwrap()
+        .iter()
+        .map(|i| i.as_str().unwrap_or("").to_string())
+        .filter(|i| !i.is_empty())
+        .collect::<Vec<_>>()
+    {
+        let default_channel = user.me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string();
+        let vcards_channel = user.me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string();
+        let food_channel = user.me.join(FOOD_CHANNEL_PATH).unwrap().to_string();
+        match storage.post_exists(&channel).await {
+            Ok(exists) => {
+                if exists {
+                    if let Err(err) = storage
+                        .update_post(&channel, json!({"add": {"children": [uid]}}))
+                        .await
+                    {
+                        return error_json!(
+                            500,
+                            "database_error",
+                            format!(
+                                "Couldn't insert post into the channel due to a database error: {}",
+                                err
+                            )
+                        );
+                    }
+                } else if channel == default_channel
+                    || channel == vcards_channel
+                    || channel == food_channel
+                {
+                    if let Err(err) = create_feed(storage, &uid, &channel, user).await {
+                        return error_json!(
+                            500,
+                            "database_error",
+                            format!("Couldn't save feed: {}", err)
+                        );
+                    }
+                } else {
+                    warn!(
+                        "Ignoring request to post to a non-existent feed: {}",
+                        channel
+                    );
+                }
+            }
+            Err(err) => return error_json!(500, "database_error", err),
+        }
+    }
+    // END WRITE BOUNDARY
+
+    // do background processing on the post
+    async_std::task::spawn(post_process_new_post(req, post));
+
+    Ok(Response::builder(202)
+        .header("Location", &uid)
+        .body(json!({"status": "accepted", "location": &uid}))
+        .build())
+}*/
+
+pub(crate) async fn create_feed(
+    storage: &impl Storage,
+    uid: &str,
+    channel: &str,
+    user: &User,
+) -> crate::database::Result<()> {
+    let path = url::Url::parse(channel).unwrap().path().to_string();
+
+    // Note to Future Vika: DO NOT CONVERT THIS TO A MATCH BLOCK
+    // It will get treated as a binding instead of a const
+    // See `rustc --explain E0530` for more info
+    let name = if path == DEFAULT_CHANNEL_PATH {
+        DEFAULT_CHANNEL_NAME
+    } else if path == CONTACTS_CHANNEL_PATH {
+        CONTACTS_CHANNEL_NAME
+    } else if path == FOOD_CHANNEL_PATH {
+        FOOD_CHANNEL_NAME
+    } else {
+        panic!("Tried to create an unknown default feed!")
+    };
+
+    let (_, feed) = normalize_mf2(
+        json!({
+            "type": ["h-feed"],
+            "properties": {
+                "name": [name],
+                "uid": [channel]
+            },
+            "children": [uid]
+        }),
+        user,
+    );
+    storage.put_post(&feed, user.me.as_str()).await
+}
+
+/*async fn post_process_new_post<S: Storage>(
+    req: Request<ApplicationState<S>>,
+    post: serde_json::Value,
+) {
+    // TODO: Post-processing the post (aka second write pass)
+    // - [-] Download rich reply contexts
+    // - [-] Syndicate the post if requested, add links to the syndicated copies
+    // - [ ] Send WebSub notifications to the hub (if we happen to have one)
+    // - [x] Send webmentions
+    let http = &req.state().http_client;
+    let uid = post["properties"]["uid"][0].as_str().unwrap().to_string();
+    // 1. Download rich reply contexts
+    //    This needs to be done first, because at this step we can also determine webmention endpoints
+    //    and save them for later use. Additionally, the richer our content is, the better.
+    //    This needs to be done asynchronously, so the posting experience for the author will be as fast
+    //    as possible without making them wait for potentially slow downstream websites to load
+    // 1.1. Collect the list of contextually-significant post to load context from.
+    //      This will include reply-tos, liked, reposted and bookmarked content
+    //
+    //      TODO: Fetch links mentioned in a post, since we need to send webmentions to those as mentions
+    let mut contextually_significant_posts: Vec<surf::Url> = vec![];
+    for prop in &["in-reply-to", "like-of", "repost-of", "bookmark-of"] {
+        if let Some(array) = post["properties"][prop].as_array() {
+            contextually_significant_posts.extend(
+                array
+                    .iter()
+                    .filter_map(|v| v.as_str().and_then(|v| surf::Url::parse(v).ok())),
+            );
+        }
+    }
+    // 1.2. Deduplicate the list
+    contextually_significant_posts.sort_unstable();
+    contextually_significant_posts.dedup();
+
+    // 1.3. Fetch the posts with their bodies and save them in a new Vec<(surf::Url, String)>
+    let posts_with_bodies: Vec<(surf::Url, surf::Response, String)> =
+        stream::iter(contextually_significant_posts.into_iter())
+            .filter_map(|v: surf::Url| async move {
+                if let Ok(res) = http.get(&v).send().await {
+                    if res.status() != 200 {
+                        None
+                    } else {
+                        Some((v, res))
+                    }
+                } else {
+                    None
+                }
+            })
+            .filter_map(|(v, mut res): (surf::Url, surf::Response)| async move {
+                if let Ok(body) = res.body_string().await {
+                    Some((v, res, body))
+                } else {
+                    None
+                }
+            })
+            .collect()
+            .await;
+    // 1.4. Parse the bodies and include them in relevant places on the MF2 struct
+    //      This requires an MF2 parser, and there are none for Rust at the moment.
+    //
+    // TODO: integrate https://gitlab.com/vikanezrimaya/mf2-parser when it's ready
+
+    // 2. Syndicate the post
+    let syndicated_copies: Vec<serde_json::Value>;
+    if let Some(syndication_targets) = post["properties"]["syndicate-to"].as_array() {
+        syndicated_copies = stream::iter(
+            syndication_targets
+                .iter()
+                .filter_map(|v| v.as_str())
+                .filter_map(|t| surf::Url::parse(t).ok())
+                .collect::<Vec<_>>()
+                .into_iter()
+                .map(|_t: surf::Url| async move {
+                    // TODO: Define supported syndication methods
+                    // and syndicate the endpoint there
+                    // Possible ideas:
+                    //  - indieweb.xyz (might need a lot of space for the buttons though, investigate proposing grouping syndication targets)
+                    //  - news.indieweb.org (IndieNews - needs a category linking to #indienews)
+                    //  - Twitter via brid.gy (do I really need Twitter syndication tho?)
+                    if false {
+                        Some("")
+                    } else {
+                        None
+                    }
+                }),
+        )
+        .buffer_unordered(3)
+        .filter_map(|v| async move { v })
+        .map(|v| serde_json::Value::String(v.to_string()))
+        .collect::<Vec<_>>()
+        .await;
+    } else {
+        syndicated_copies = vec![]
+    }
+    // Save the post a second time here after syndication
+    // We use update_post here to prevent race conditions since its required to be atomic
+    let mut update = json!({
+        "action": "update",
+        "url": &uid
+    });
+    if !syndicated_copies.is_empty() {
+        update["add"] = json!({});
+        update["add"]["syndication"] = serde_json::Value::Array(syndicated_copies);
+    }
+    if !posts_with_bodies.is_empty() {
+        error!("Replacing context links with parsed MF2-JSON data is not yet implemented (but it's ok! it'll just be less pretty)")
+        /* TODO: Replace context links with parsed MF2-JSON data * /
+        update["replace"] = {}
+        update["replace"]["like-of"] = []
+        update["replace"]["in-reply-to"] = []
+        update["replace"]["bookmark-of"] = []
+        update["replace"]["repost-of"] = []
+        // */
+    }
+    // We don't need the original copy of the post anymore... I hope!
+    // This will act as a safeguard so I can't read stale data by accident anymore...
+    drop(post);
+    if let Err(err) = req.state().storage.update_post(&uid, update).await {
+        error!("Encountered error while post-processing a post: {}", err)
+        // At this point, we can still continue, we just won't have rich data for the post
+        // I wonder why could it even happen except in case of a database disconnection?
+    }
+    // 3. Send WebSub notifications
+    // TODO WebSub support
+
+    // 4. Send webmentions
+    //    We'll need the bodies here to get their endpoints
+    let source = &uid;
+    stream::iter(posts_with_bodies.into_iter())
+        .filter_map(
+            |(url, response, body): (surf::Url, surf::Response, String)| async move {
+                // Check Link headers first
+                // the first webmention endpoint will be returned
+                if let Some(values) = response.header("Link") {
+                    let iter = values.iter().flat_map(|i| i.as_str().split(','));
+
+                    // Honestly I don't like this parser. It's very crude.
+                    // But it should do the job. But I don't like it.
+                    for link in iter {
+                        let mut split = link.split(';');
+
+                        match split.next() {
+                            Some(uri) => {
+                                if let Some(uri) = uri.strip_prefix('<') {
+                                    if let Some(uri) = uri.strip_suffix('>') {
+                                        for prop in split {
+                                            let lowercased = prop.to_ascii_lowercase();
+                                            if &lowercased == "rel=\"webmention\""
+                                                || &lowercased == "rel=webmention"
+                                            {
+                                                if let Ok(endpoint) = url.join(uri) {
+                                                    return Some((url, endpoint));
+                                                }
+                                            }
+                                        }
+                                    }
+                                }
+                            }
+                            None => continue,
+                        }
+                    }
+                }
+                // TODO: Replace this function once the MF2 parser is ready
+                // A compliant parser's output format includes rels,
+                // we could just find a Webmention one in there
+                let pattern =
+                    easy_scraper::Pattern::new(r#"<link href="{{url}}" rel="webmention">"#)
+                        .expect("Pattern for webmentions couldn't be parsed");
+                let matches = pattern.matches(&body);
+                if matches.is_empty() {
+                    return None;
+                }
+                let endpoint = &matches[0]["url"];
+                if let Ok(endpoint) = url.join(endpoint) {
+                    Some((url, endpoint))
+                } else {
+                    None
+                }
+            },
+        )
+        .map(|(target, endpoint)| async move {
+            info!(
+                "Sending webmention to {} about {}",
+                source,
+                &target.to_string()
+            );
+            let response = http
+                .post(&endpoint)
+                .content_type("application/x-www-form-urlencoded")
+                .body(
+                    serde_urlencoded::to_string(vec![
+                        ("source", source),
+                        ("target", &target.to_string()),
+                    ])
+                    .expect("Couldn't construct webmention form"),
+                )
+                .send()
+                .await;
+            match response {
+                Ok(response) => {
+                    if response.status() == 200
+                        || response.status() == 201
+                        || response.status() == 202
+                    {
+                        info!("Sent webmention for {} to {}", target, endpoint);
+                        Ok(())
+                    } else {
+                        error!(
+                            "Sending webmention for {} to {} failed: Endpoint replied with HTTP {}",
+                            target,
+                            endpoint,
+                            response.status()
+                        );
+                        Err(())
+                    }
+                }
+                Err(err) => {
+                    error!(
+                        "Sending webmention for {} to {} failed: {}",
+                        target, endpoint, err
+                    );
+                    Err(())
+                }
+            }
+        })
+        .buffer_unordered(3)
+        .collect::<Vec<_>>()
+        .await;
+}*/
+
+/*async fn process_json<S: Storage>(
+    req: Request<ApplicationState<S>>,
+    body: serde_json::Value,
+) -> Result {
+    let is_action = body["action"].is_string() && body["url"].is_string();
+    if is_action {
+        // This could be an update, a deletion or an undeletion request.
+        // Process it separately.
+        let action = body["action"].as_str().unwrap();
+        let url = body["url"].as_str().unwrap();
+        let user = req.ext::<User>().unwrap();
+        match action {
+            "delete" => {
+                if !user.check_scope("delete") {
+                    return error_json!(
+                        401,
+                        "insufficient_scope",
+                        "You need a `delete` scope to delete posts."
+                    );
+                }
+                // This special scope is not available through a token endpoint, since the
+                // authorization endpoint is supposed to reject any auth request trying to get this
+                // scope. It is intended for TRUSTED external services that need to modify the
+                // database while ignoring any access controls
+                if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str()
+                    && !user.check_scope("kittybox_internal:do_what_thou_wilt")
+                {
+                    return error_json!(
+                        403,
+                        "forbidden",
+                        "You're not allowed to delete someone else's posts."
+                    );
+                }
+                if let Err(error) = req.state().storage.delete_post(url).await {
+                    return Ok(error.into());
+                }
+                Ok(Response::builder(200).build())
+            }
+            "update" => {
+                if !user.check_scope("update") {
+                    return error_json!(
+                        401,
+                        "insufficient_scope",
+                        "You need an `update` scope to update posts."
+                    );
+                }
+                if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str()
+                    && !user.check_scope("kittybox_internal:do_what_thou_wilt")
+                {
+                    return error_json!(
+                        403,
+                        "forbidden",
+                        "You're not allowed to delete someone else's posts."
+                    );
+                }
+                if let Err(error) = req.state().storage.update_post(url, body.clone()).await {
+                    Ok(error.into())
+                } else {
+                    Ok(Response::builder(204).build())
+                }
+            }
+            _ => return error_json!(400, "invalid_request", "This action is not supported."),
+        }
+    } else if body["type"][0].is_string() {
+        // This is definitely an h-entry or something similar. Check if it has properties?
+        if body["properties"].is_object() {
+            // Ok, this is definitely a new h-entry. Let's save it.
+            return new_post(req, body).await;
+        } else {
+            return error_json!(
+                400,
+                "invalid_request",
+                "This MF2-JSON object has a type, but not properties. This makes no sense to post."
+            );
+        }
+    } else {
+        return error_json!(
+            400,
+            "invalid_request",
+            "Try sending MF2-structured data or an object with an \"action\" and \"url\" keys."
+        );
+    }
+}*/
+
+/*async fn process_form<S: Storage>(
+    req: Request<ApplicationState<S>>,
+    form: Vec<(String, String)>,
+) -> Result {
+    if let Some((_, v)) = form.iter().find(|(k, _)| k == "action") {
+        if v == "delete" {
+            let user = req.ext::<User>().unwrap();
+            if !user.check_scope("delete") {
+                return error_json!(
+                    401,
+                    "insufficient_scope",
+                    "You cannot delete posts without a `delete` scope."
+                );
+            }
+            match form.iter().find(|(k, _)| k == "url") {
+                Some((_, url)) => {
+                    if (url::Url::parse(url)?.origin().ascii_serialization() + "/")
+                        != user.me.as_str()
+                        && !user.check_scope("kittybox_internal:do_what_thou_wilt")
+                    {
+                        return error_json!(
+                            403,
+                            "forbidden",
+                            "You're not allowed to delete someone else's posts."
+                        );
+                    }
+                    if let Err(error) = req.state().storage.delete_post(url).await {
+                        return error_json!(500, "database_error", error);
+                    }
+                    return Ok(Response::builder(200).build());
+                }
+                None => {
+                    return error_json!(
+                        400,
+                        "invalid_request",
+                        "Please provide an `url` to delete."
+                    )
+                }
+            }
+        } else {
+            return error_json!(400, "invalid_request", "This action is not supported in form-encoded mode. (JSON requests support more actions, use JSON!)");
+        }
+    }
+
+    let mf2 = convert_form_to_mf2_json(form);
+
+    if mf2["properties"].as_object().unwrap().keys().len() > 0 {
+        return new_post(req, mf2).await;
+    }
+    return error_json!(
+        400,
+        "invalid_request",
+        "Try sending h=entry&content=something%20interesting"
+    );
+}*/
+
+/*pub async fn post_handler<S: Storage>(mut req: Request<ApplicationState<S>>) -> Result {
+    match req.content_type() {
+        Some(value) => {
+            if value == Mime::from_str("application/json").unwrap() {
+                match req.body_json::<serde_json::Value>().await {
+                    Ok(parsed) => return process_json(req, parsed).await,
+                    Err(err) => {
+                        return error_json!(
+                            400,
+                            "invalid_request",
+                            format!("Parsing JSON failed: {:?}", err)
+                        )
+                    }
+                }
+            } else if value == Mime::from_str("application/x-www-form-urlencoded").unwrap() {
+                match req.body_form::<Vec<(String, String)>>().await {
+                    Ok(parsed) => return process_form(req, parsed).await,
+                    Err(err) => {
+                        return error_json!(
+                            400,
+                            "invalid_request",
+                            format!("Parsing form failed: {:?}", err)
+                        )
+                    }
+                }
+            } else {
+                return error_json!(
+                    415, "unsupported_media_type",
+                    "What's this? Try sending JSON instead. (urlencoded form also works but is less cute)"
+                );
+            }
+        }
+        _ => {
+            return error_json!(
+                415, "unsupported_media_type",
+                "You didn't send a Content-Type header, so we don't know how to parse your request."
+            );
+        }
+    }
+}*/
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_no_replace_uid() {
+        let mf2 = json!({
+            "type": ["h-card"],
+            "properties": {
+                "uid": ["https://fireburn.ru/"],
+                "name": ["Vika Nezrimaya"],
+                "note": ["A crazy programmer girl who wants some hugs"]
+            }
+        });
+
+        let (uid, normalized) = normalize_mf2(
+            mf2.clone(),
+            &User::new(
+                "https://fireburn.ru/",
+                "https://quill.p3k.io/",
+                "create update media",
+            ),
+        );
+        assert_eq!(
+            normalized["properties"]["uid"][0], mf2["properties"]["uid"][0],
+            "UID was replaced"
+        );
+        assert_eq!(
+            normalized["properties"]["uid"][0], uid,
+            "Returned post location doesn't match UID"
+        );
+    }
+
+    #[test]
+    fn test_mp_channel() {
+        let mf2 = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "uid": ["https://fireburn.ru/posts/test"],
+                "content": [{"html": "<p>Hello world!</p>"}],
+                "mp-channel": ["https://fireburn.ru/feeds/test"]
+            }
+        });
+
+        let (_, normalized) = normalize_mf2(
+            mf2.clone(),
+            &User::new(
+                "https://fireburn.ru/",
+                "https://quill.p3k.io/",
+                "create update media",
+            )
+        );
+
+        assert_eq!(
+            normalized["properties"]["channel"],
+            mf2["properties"]["mp-channel"]
+        );
+    }
+
+    #[test]
+    fn test_mp_channel_as_string() {
+        let mf2 = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "uid": ["https://fireburn.ru/posts/test"],
+                "content": [{"html": "<p>Hello world!</p>"}],
+                "mp-channel": "https://fireburn.ru/feeds/test"
+            }
+        });
+
+        let (_, normalized) = normalize_mf2(
+            mf2.clone(),
+            &User::new(
+                "https://fireburn.ru/",
+                "https://quill.p3k.io/",
+                "create update media",
+            )
+        );
+
+        assert_eq!(
+            normalized["properties"]["channel"][0],
+            mf2["properties"]["mp-channel"]
+        );
+    }
+
+    #[test]
+    fn test_normalize_mf2() {
+        let mf2 = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": ["This is content!"]
+            }
+        });
+
+        let (uid, post) = normalize_mf2(
+            mf2,
+            &User::new(
+                "https://fireburn.ru/",
+                "https://quill.p3k.io/",
+                "create update media",
+            ),
+        );
+        assert_eq!(
+            post["properties"]["published"]
+                .as_array()
+                .expect("post['published'] is undefined")
+                .len(),
+            1,
+            "Post doesn't have a published time"
+        );
+        DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap())
+            .expect("Couldn't parse date from rfc3339");
+        assert!(
+            !post["properties"]["url"]
+                .as_array()
+                .expect("post['url'] is undefined")
+                .is_empty(),
+            "Post doesn't have any URLs"
+        );
+        assert_eq!(
+            post["properties"]["uid"]
+                .as_array()
+                .expect("post['uid'] is undefined")
+                .len(),
+            1,
+            "Post doesn't have a single UID"
+        );
+        assert_eq!(
+            post["properties"]["uid"][0], uid,
+            "UID of a post and its supposed location don't match"
+        );
+        assert!(
+            uid.starts_with("https://fireburn.ru/posts/"),
+            "The post namespace is incorrect"
+        );
+        assert_eq!(
+            post["properties"]["content"][0]["html"]
+                .as_str()
+                .expect("Post doesn't have a rich content object")
+                .trim(),
+            "<p>This is content!</p>",
+            "Parsed Markdown content doesn't match expected HTML"
+        );
+        assert_eq!(
+            post["properties"]["channel"][0], "https://fireburn.ru/feeds/main",
+            "Post isn't posted to the main channel"
+        );
+        assert_eq!(
+            post["properties"]["author"][0], "https://fireburn.ru/",
+            "Post author is unknown"
+        );
+    }
+
+    #[test]
+    fn test_mp_slug() {
+        let mf2 = json!({
+            "type": ["h-entry"],
+            "properties": {
+                "content": ["This is content!"],
+                "mp-slug": ["hello-post"]
+            },
+        });
+
+        let (_, post) = normalize_mf2(
+            mf2,
+            &User::new(
+                "https://fireburn.ru/",
+                "https://quill.p3k.io/",
+                "create update media",
+            ),
+        );
+        assert!(
+            post["properties"]["url"]
+                .as_array()
+                .unwrap()
+                .iter()
+                .map(|i| i.as_str().unwrap())
+                .any(|i| i == "https://fireburn.ru/posts/hello-post"),
+            "Didn't found an URL pointing to the location expected by the mp-slug semantics"
+        );
+        assert!(
+            post["properties"]["mp-slug"].as_array().is_none(),
+            "mp-slug wasn't deleted from the array!"
+        )
+    }
+
+    #[test]
+    fn test_normalize_feed() {
+        let mf2 = json!({
+            "type": ["h-feed"],
+            "properties": {
+                "name": "Main feed",
+                "mp-slug": ["main"]
+            }
+        });
+
+        let (uid, post) = normalize_mf2(
+            mf2,
+            &User::new(
+                "https://fireburn.ru/",
+                "https://quill.p3k.io/",
+                "create update media",
+            ),
+        );
+        assert_eq!(
+            post["properties"]["uid"][0], uid,
+            "UID of a post and its supposed location don't match"
+        );
+        assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/");
+        assert!(
+            post["properties"]["url"]
+                .as_array()
+                .unwrap()
+                .iter()
+                .map(|i| i.as_str().unwrap())
+                .any(|i| i == "https://fireburn.ru/feeds/main"),
+            "Didn't found an URL pointing to the location expected by the mp-slug semantics"
+        );
+        assert!(
+            post["properties"]["mp-slug"].as_array().is_none(),
+            "mp-slug wasn't deleted from the array!"
+        )
+    }
+}
diff --git a/kittybox-rs/templates/Cargo.toml b/kittybox-rs/templates/Cargo.toml
new file mode 100644
index 0000000..fe8ac19
--- /dev/null
+++ b/kittybox-rs/templates/Cargo.toml
@@ -0,0 +1,26 @@
+[package]
+name = "kittybox-templates"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dev-dependencies]
+faker_rand = "^0.1.1"
+rand = "^0.8.5"
+test-logger = "^0.1.0"
+[dev-dependencies.microformats]
+version="^0.2.0"
+
+[dependencies]
+ellipse = "^0.2.0"           # Truncate and ellipsize strings in a human-friendly way
+http = "^0.2.7"              # Hyper's strong HTTP types
+log = "^0.4.14"              # A lightweight logging facade for Rust
+markup = "^0.12.0"           # HTML templating engine
+serde_json = "^1.0.64"       # A JSON serialization file format
+[dependencies.chrono]        # Date and time library for Rust
+version = "^0.4.19"
+features = ["serde"]
+[dependencies.kittybox-util]
+version = "0.1.0"
+path = "../util"
\ No newline at end of file
diff --git a/kittybox-rs/templates/src/lib.rs b/kittybox-rs/templates/src/lib.rs
new file mode 100644
index 0000000..39f1075
--- /dev/null
+++ b/kittybox-rs/templates/src/lib.rs
@@ -0,0 +1,347 @@
+mod templates;
+pub use templates::{ErrorPage, MainPage, Template, POSTS_PER_PAGE, Entry, VCard, Feed};
+mod onboarding;
+pub use onboarding::OnboardingPage;
+mod login;
+pub use login::LoginPage;
+
+#[cfg(test)]
+mod tests {
+    use faker_rand::lorem::Word;
+    use serde_json::json;
+    use microformats::types::{Document, Item, PropertyValue, Url};
+    use std::cell::RefCell;
+    use std::rc::Rc;
+    use faker_rand::en_us::internet::Domain;
+
+    enum PostType {
+        Note,
+        Article,
+        ReplyTo(serde_json::Value),
+        ReplyToLink(String),
+        LikeOf(serde_json::Value),
+        LikeOfLink(String)
+    }
+
+    fn gen_hcard(domain: &str) -> serde_json::Value {
+        use faker_rand::en_us::names::FirstName;
+
+        json!({
+            "type": ["h-card"],
+            "properties": {
+                "name": [rand::random::<FirstName>().to_string()],
+                "photo": [format!("https://{domain}/media/me.png")],
+                "uid": [format!("https://{domain}/")],
+                "url": [format!("https://{domain}/")]
+            }
+        })
+    }
+    
+    fn gen_random_post(domain: &str, kind: PostType) -> serde_json::Value {
+        use faker_rand::lorem::{Paragraph, Sentence};
+
+        fn html(content: Paragraph) -> serde_json::Value {
+            json!({
+                "html": format!("<p>{}</p>", content),
+                "value": content.to_string()
+            })
+        }
+            
+        let uid = format!(
+            "https://{domain}/posts/{}-{}-{}",
+            rand::random::<Word>(), rand::random::<Word>(), rand::random::<Word>()
+        );
+        let dt = chrono::offset::Local::now()
+            .to_rfc3339_opts(chrono::SecondsFormat::Secs, true);
+
+        match kind {
+            PostType::Note => {
+                let content = rand::random::<Paragraph>();
+
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "content": [html(content)],
+                        "published": [dt],
+                        "uid": [&uid], "url": [&uid],
+                        "author": [gen_hcard(domain)]
+                    }
+                })
+            }
+            PostType::Article => {
+                let content = rand::random::<Paragraph>();
+                let name = rand::random::<Sentence>();
+
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "content": [html(content)],
+                        "published": [dt],
+                        "uid": [&uid], "url": [&uid],
+                        "author": [gen_hcard(domain)],
+                        "name": [name.to_string()]
+                    }
+                })
+            }
+            PostType::ReplyTo(ctx) => {
+                let content = rand::random::<Paragraph>();
+
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "content": [html(content)],
+                        "published": [dt],
+                        "uid": [&uid], "url": [&uid],
+                        "author": [gen_hcard(domain)],
+                        "in-reply-to": [{
+                            "type": ["h-cite"],
+                            "properties": ctx["properties"]
+                        }]
+                    }
+                })
+            },
+            PostType::ReplyToLink(link) => {
+                let content = rand::random::<Paragraph>();
+
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "content": [html(content)],
+                        "published": [dt],
+                        "uid": [&uid], "url": [&uid],
+                        "author": [gen_hcard(domain)],
+                        "in-reply-to": [link]
+                    }
+                })
+            },
+            PostType::LikeOf(ctx) => {
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "published": [dt],
+                        "author": [gen_hcard(domain)],
+                        "uid": [&uid], "url": [&uid],
+                        "like-of": [{
+                            "type": ["h-cite"],
+                            "properties": ctx["properties"]
+                        }]
+                    }
+                })
+            },
+            PostType::LikeOfLink(link) => {
+                json!({
+                    "type": ["h-entry"],
+                    "properties": {
+                        "published": [dt],
+                        "author": [gen_hcard(domain)],
+                        "uid": [&uid], "url": [&uid],
+                        "like-of": [link]
+                    }
+                })
+            }
+        }
+    }
+
+    fn check_dt_published(
+        mf2: &serde_json::Value,
+        item: &Rc<RefCell<Item>>
+    ) {
+        use microformats::types::temporal::Value as TemporalValue;
+
+        let _item = item.borrow();
+        let props = _item.properties.borrow();
+        assert!(props.contains_key("published"));
+
+        if let Some(PropertyValue::Temporal(
+            TemporalValue::Timestamp(item)
+        )) = props.get("published")
+            .and_then(|v| v.first())
+        {
+            use chrono::{DateTime, FixedOffset, NaiveDateTime};
+
+            // Faithfully reconstruct the original datetime
+            // I wonder why not just have an Enum that would
+            // get you either date, time or a datetime,
+            // potentially with an offset?
+            let offset = item.as_offset().unwrap().data;
+            let ndt: NaiveDateTime = item.as_date().unwrap().data
+                .and_time(item.as_time().unwrap().data)
+            // subtract the offset here, since we will add it back
+                - offset;
+            let dt = DateTime::<FixedOffset>::from_utc(ndt, offset);
+
+            let expected: DateTime<FixedOffset> = chrono::DateTime::parse_from_rfc3339(
+                mf2["properties"]["published"][0].as_str().unwrap()
+            ).unwrap();
+
+            assert_eq!(dt, expected);
+        } else {
+            unreachable!()
+        }
+    }
+
+    fn check_e_content(
+        mf2: &serde_json::Value,
+        item: &Rc<RefCell<Item>>
+    ) {
+        let _item = item.borrow();
+        let props = _item.properties.borrow();
+        assert!(props.contains_key("content"));
+
+        if let Some(PropertyValue::Fragment(content)) =
+            props.get("content")
+            .and_then(|v| v.first())
+        {
+            assert_eq!(
+                content.html,
+                mf2["properties"]["content"][0]["html"].as_str().unwrap()
+            );
+        } else {
+            unreachable!()
+        }
+
+    }
+
+    #[test]
+    #[ignore = "see https://gitlab.com/maxburon/microformats-parser/-/issues/7"]
+    fn test_note() {
+        test_logger::ensure_env_logger_initialized();
+
+        let mf2 = gen_random_post(
+            &rand::random::<Domain>().to_string(),
+            PostType::Note
+        );
+
+        let html = crate::templates::Entry {
+            post: &mf2
+        }.to_string();
+
+        let url: Url = mf2.pointer("/properties/uid/0")
+            .and_then(|i| i.as_str())
+            .and_then(|u| u.parse().ok())
+            .unwrap();
+        let parsed: Document = microformats::from_html(&html, url.clone()).unwrap();
+
+        if let Some(PropertyValue::Item(item)) = parsed.get_item_by_url(&url) {
+            let _item = item.borrow();
+            let props = _item.properties.borrow();
+
+            check_e_content(&mf2, &item);
+            check_dt_published(&mf2, &item);
+            assert!(props.contains_key("uid"));
+            assert!(props.contains_key("url"));
+            assert!(props.get("url")
+                    .unwrap()
+                    .iter()
+                    .any(|i| i == props.get("uid").and_then(|v| v.first()).unwrap()));
+            // XXX: fails because of https://gitlab.com/maxburon/microformats-parser/-/issues/7
+            assert!(!props.contains_key("name"));
+
+        } else {
+            unreachable!()
+        }
+    }
+
+    #[test]
+    fn test_article() {
+        test_logger::ensure_env_logger_initialized();
+
+        let mf2 = gen_random_post(
+            &rand::random::<Domain>().to_string(),
+            PostType::Article
+        );
+        let html = crate::templates::Entry {
+            post: &mf2
+        }.to_string();
+        let url: Url = mf2.pointer("/properties/uid/0")
+            .and_then(|i| i.as_str())
+            .and_then(|u| u.parse().ok())
+            .unwrap();
+        let parsed: Document = microformats::from_html(&html, url.clone()).unwrap();
+
+        if let Some(PropertyValue::Item(item)) = parsed.get_item_by_url(&url) {
+            let _item = item.borrow();
+            let props = _item.properties.borrow();
+
+            check_e_content(&mf2, &item);
+            check_dt_published(&mf2, &item);
+            assert!(props.contains_key("uid"));
+            assert!(props.contains_key("url"));
+            assert!(props.get("url")
+                    .unwrap()
+                    .iter()
+                    .any(|i| i == props.get("uid").and_then(|v| v.first()).unwrap()));
+            assert!(props.contains_key("name"));
+            if let Some(PropertyValue::Plain(name)) = props.get("name").and_then(|v| v.first()) {
+                assert_eq!(
+                    name,
+                    mf2.pointer("/properties/name/0")
+                        .and_then(|v| v.as_str())
+                        .unwrap()
+                );
+            } else {
+                panic!("Name wasn't a plain property!");
+            }
+        } else {
+            unreachable!()
+        }
+    }
+
+    #[test]
+    fn test_like_of() {
+        test_logger::ensure_env_logger_initialized();
+
+        for likeof in [
+            PostType::LikeOf(gen_random_post(
+                &rand::random::<Domain>().to_string(),
+                PostType::Note
+            )),
+            PostType::LikeOfLink(format!(
+                "https://{}/posts/{}-{}-{}",
+                &rand::random::<Domain>(),
+                &rand::random::<Word>(),
+                &rand::random::<Word>(),
+                &rand::random::<Word>(),
+            ))
+        ] {
+            let mf2 = gen_random_post(
+                &rand::random::<Domain>().to_string(),
+                likeof
+            );
+            let url: Url = mf2.pointer("/properties/uid/0")
+                .and_then(|i| i.as_str())
+                .and_then(|u| u.parse().ok())
+                .unwrap();
+            let html = crate::templates::Entry {
+                post: &mf2
+            }.to_string();
+            let parsed: Document = microformats::from_html(&html, url.clone()).unwrap();
+
+            if let Some(item) = parsed.items.get(0) {
+                let _item = item.borrow();
+                let props = _item.properties.borrow();
+
+                check_dt_published(&mf2, item);
+                assert!(props.contains_key("like-of"));
+                match props.get("like-of").and_then(|v| v.first()) {
+                    Some(PropertyValue::Url(url)) => {
+                        assert_eq!(
+                            url,
+                            &mf2.pointer("/properties/like-of/0")
+                                .and_then(|i| i.as_str())
+                                .or_else(|| mf2.pointer("/properties/like-of/0/properties/uid/0").and_then(|i| i.as_str()))
+                                .and_then(|u| u.parse::<Url>().ok())
+                                .unwrap()
+                        );
+                    }
+                    Some(PropertyValue::Item(_cite)) => {
+                        todo!()
+                    }
+                    other => panic!("Unexpected value in like-of: {:?}", other)
+                }
+            } else {
+                unreachable!()
+            }
+        }
+    }
+}
diff --git a/kittybox-rs/templates/src/login.rs b/kittybox-rs/templates/src/login.rs
new file mode 100644
index 0000000..042c308
--- /dev/null
+++ b/kittybox-rs/templates/src/login.rs
@@ -0,0 +1,17 @@
+markup::define! {
+    LoginPage {
+        form[method="POST"] {
+            h1 { "Sign in with your website" }
+            p {
+                "Signing in to Kittybox might allow you to view private content "
+                    "intended for your eyes only."
+            }
+
+            section {
+                label[for="url"] { "Your website URL" }
+                input[id="url", name="url", placeholder="https://example.com/"];
+                input[type="submit"];
+            }
+        }
+    }
+}
diff --git a/kittybox-rs/templates/src/onboarding.rs b/kittybox-rs/templates/src/onboarding.rs
new file mode 100644
index 0000000..9d0f2e1
--- /dev/null
+++ b/kittybox-rs/templates/src/onboarding.rs
@@ -0,0 +1,192 @@
+markup::define! {
+    OnboardingPage {
+        h1[style="text-align: center"] {
+            "Welcome to Kittybox"
+        }
+        script[type="module", src="/static/onboarding.js"] {}
+        link[rel="stylesheet", href="/static/onboarding.css"];
+        form.onboarding[action="", method="POST"] {
+            noscript {
+                p {
+                    "Ok, let's be honest. Most of this software doesn't require JS to be enabled "
+                    "to view pages (and in some cases, even edit them if logged in)."
+                }
+                p { "This page is a little bit different. It uses JavaScript to provide interactive features, such as:" }
+                ul {
+                    li { "Multiple-input questions" }
+                    li { "Answers spanning multiple fields" }
+                    li { "Preview of files being uploaded" }
+                    li { "Pretty pagination so you won't feel overwhelmed" }
+                }
+                p {
+                    "Sadly, it's very hard or even impossible to recreate this without any JavaScript. "
+                    "Good news though - the code is " b { "open-source AND free software" }
+                    " (under GNU AGPLv3) "
+                    "and I promise to not obfuscate it or minify it. "
+                    a[href="/static/onboarding.js"] { "Here" }
+                    "'s the link - you can try reading it so you'll be 200% sure "
+                    "it won't steal your cookies or turn your kitty into a soulless monster."
+                    @markup::raw("<!-- do cats even have souls? I'm not sure. But this code won't steal their souls anyway. -->")
+                }
+                hr;
+                p { "In other words: " b { "please enable JavaScript for this page to work properly." } small { "sorry T__T" } }
+            }
+            ul #progressbar[style="display: none"] {
+                li #intro { "Introduction" }
+                li #hcard { "Your profile" }
+                li #settings { "Your website" }
+                li #firstpost { "Your first post" }
+            }
+            fieldset #intro[style="display: none"] {
+                legend { "Introduction" }
+                p {
+                    "Kittybox is a CMS that can act as a member of the IndieWeb. "
+                    "IndieWeb is a global distributed social network built on top of open Web standards "
+                    "and composed of blogs around the Internet supporting these standards."
+                }
+                p { "There is no registration or centralized database of any sort - everyone owns their data and is responsible for it." }
+                p { "If you're seeing this page, it looks like your configuration is correct and we can proceed with the setup." }
+
+                div.switch_card_buttons {
+                    button.switch_card.next_card[type="button", "data-card"="hcard"] { "Next" }
+                }
+            }
+
+            fieldset #hcard[style="display: none"] {
+                legend { "Your profile" }
+                p { "An h-card is an IndieWeb social profile, and we're gonna make you one!" }
+                p { "Thanks to some clever markup, it will be readable by both humans and machines looking at your homepage."}
+                p {
+                    "If you make a mistake, don't worry, you're gonna be able to edit this later."
+                    "The only mandatory field is your name."
+                }
+
+                div.form_group {
+                    label[for="hcard_name"] { "Your name" }
+                    input #hcard_name[name="hcard_name", placeholder="Your name"];
+                    small {
+                        "No need to write the name as in your passport, this is not a legal document "
+                        "- just write how you want to be called on the network. This name will be also "
+                        "shown whenever you leave a comment on someone else's post using your website."
+                    }
+                }
+
+                div.form_group {
+                    label[for="pronouns"] { "Your pronouns" }
+                    div.multi_input #pronouns {
+                        template {
+                            input #hcard_pronouns[name="hcard_pronouns", placeholder="they/them?"];
+                        }
+                        button.add_more[type="button", "aria-label"="Add more"] { "[+] Add more" }
+                    }
+                    small {
+                        "Write which pronouns you use for yourself. It's a free-form field "
+                        "so don't feel constrained - but keep it compact, as it'll be shown in a lot of places."
+                    }
+                }
+
+                div.form_group {
+                    label[for="urls"] { "Links to other pages of you" }
+                    div.multi_input #urls {
+                        template {
+                            input #hcard_url[name="hcard_url", placeholder="https://example.com/"];
+                        }
+                        button.add_more[type="button", "aria-label"="Add more"] { "[+] Add more" }
+                    }
+                    small {
+                        "These URLs will help your readers find you elsewhere and will help you that whoever owns these pages owns your website too"
+                        " in case the links are mutual. So make sure to put a link to your site in your other social profiles!"
+                    }
+                }
+
+                div.form_group {
+                    label[for="hcard_note"] { "A little about yourself" }
+                    textarea #hcard_note[name="hcard_note", placeholder="Loves cooking, plants, cats, dogs and racoons."] {}
+                    small { "A little bit of introduction. Just one paragraph, and note, you can't use HTML here (yet)." }
+                    // TODO: HTML e-note instead of p-note
+                }
+
+                // TODO: u-photo upload - needs media endpoint cooperation
+
+                div.switch_card_buttons {
+                    button.switch_card.prev_card[type="button", "data-card"="intro"] { "Previous" }
+                    button.switch_card.next_card[type="button", "data-card"="settings"] { "Next" }
+                }
+            }
+
+            fieldset #settings[style="display: none"] {
+                legend { "Your website" }
+                p { "Ok, it's nice to know you more. Tell me about what you'll be writing and how you want to name your blog." }
+                // TODO: site-name, saved to settings
+
+                div.form_group {
+                    label[for="blog_name"] { "Your website's name"}
+                    input #blog_name[name="blog_name", placeholder="Kitty Box!"];
+                    small { "It'll get shown in the title of your blog, in the upper left corner!" }
+                }
+
+                div.form_group {
+                    label[for="custom_feeds"] { "Custom feeds" }
+                    small {
+                        p {
+                            "You can set up custom feeds to post your stuff to. "
+                            "This is a nice way to organize stuff into huge folders, like all your trips or your quantified-self data."
+                        }
+                        p {
+                            "Feeds can be followed individually, which makes it easy for users who are interested in certain types "
+                            "of content you produce to follow your adventures in certain areas of your life without cluttering their "
+                            "readers."
+                        }
+                        p {
+                            "We will automatically create some feeds for you aside from these so you won't have to - including a main feed, "
+                            "address book (for venues you go to and people you talk about), a cookbook for your recipes and some more."
+                            // TODO: Put a link to documentation explaining feeds in more detail.
+                        }
+                    }
+                    div.multi_input #custom_feeds {
+                        template {
+                            fieldset.feed {
+                                div.form_group {
+                                    label[for="feed_name"] { "Name" }
+                                    input #feed_name[name="feed_name", placeholder="My cool feed"];
+                                    small { "This is a name that will identify this feed to the user. Make it short and descriptive!" }
+                                }
+                                div.form_group {
+                                    label[for="feed_slug"] { "Slug" }
+                                    input #feed_slug[name="feed_slug", placeholder="my-cool-feed"];
+                                    small { "This will form a pretty URL for the feed. For example: https://example.com/feeds/my-cool-feed" }
+                                }
+                            }
+                        }
+                        button.add_more[type="button", "aria-label"="Add more"] { "[+] Add More" }
+                    }
+                }
+
+                div.switch_card_buttons {
+                    button.switch_card.prev_card[type="button", "data-card"="hcard"] { "Previous" }
+                    button.switch_card.next_card[type="button", "data-card"="firstpost"] { "Next" }
+                }
+            }
+
+            fieldset #firstpost[style="display: none"] {
+                legend { "Your first post" }
+                p { "Maybe you should start writing your first posts now. How about a short note?" }
+                p { "A note is a short-form post (not unlike a tweet - but without the actual character limit) that doesn't bear a title." }
+                p {
+                    "Consider telling more about yourself, your skills and interests in this note "
+                    @markup::raw("&mdash;")
+                    " though you're free to write anything you want. (By the way, you can use "
+                    a[href="https://daringfireball.net/projects/markdown/syntax"] { "Markdown" }
+                    " here to spice up your note!)"
+                }
+
+                textarea #first_post_content[style="width: 100%; height: 8em", placeholder="Hello! I am really excited about #IndieWeb"] {}
+
+                div.switch_card_buttons {
+                    button.switch_card.prev_card[type="button", "data-card"="settings"] { "Previous" }
+                    button[type="submit"] { "Finish" }
+                }
+            }
+        }
+    }
+}
diff --git a/kittybox-rs/templates/src/templates.rs b/kittybox-rs/templates/src/templates.rs
new file mode 100644
index 0000000..0054c91
--- /dev/null
+++ b/kittybox-rs/templates/src/templates.rs
@@ -0,0 +1,545 @@
+use kittybox_util::{MicropubChannel, IndiewebEndpoints};
+use ellipse::Ellipse;
+use http::StatusCode;
+use log::error;
+
+pub static POSTS_PER_PAGE: usize = 20;
+
+/// Return a pretty location specifier from a geo: URI.
+fn decode_geo_uri(uri: &str) -> String {
+    if let Some(part) = uri.split(':').collect::<Vec<_>>().get(1) {
+        if let Some(part) = part.split(';').next() {
+            let mut parts = part.split(',');
+            let lat = parts.next().unwrap();
+            let lon = parts.next().unwrap();
+            // TODO - format them as proper latitude and longitude
+            return format!("{}, {}", lat, lon);
+        } else {
+            uri.to_string()
+        }
+    } else {
+        uri.to_string()
+    }
+}
+
+markup::define! {
+    Template<'a>(title: &'a str, blog_name: &'a str, endpoints: Option<IndiewebEndpoints>, feeds: Vec<MicropubChannel>, user: Option<String>, content: String) {
+        @markup::doctype()
+        html {
+            head {
+                title { @title }
+                link[rel="preconnect", href="https://fonts.gstatic.com"];
+                link[rel="stylesheet", href="/static/style.css"];
+                meta[name="viewport", content="initial-scale=1, width=device-width"];
+                // TODO: link rel= for common IndieWeb APIs: webmention, microsub
+                link[rel="micropub", href="/micropub"]; // Static, because it's built into the server itself
+                @if let Some(endpoints) = endpoints {
+                    link[rel="authorization_endpoint", href=&endpoints.authorization_endpoint];
+                    link[rel="token_endpoint", href=&endpoints.token_endpoint];
+                    @if let Some(webmention) = &endpoints.webmention {
+                        link[rel="webmention", href=&webmention];
+                    }
+                    @if let Some(microsub) = &endpoints.microsub {
+                        link[rel="microsub", href=&microsub];
+                    }
+                }
+            }
+            body {
+                // TODO Somehow compress headerbar into a menu when the screen space is tight
+                nav #headerbar {
+                    ul {
+                        li { a #homepage[href="/"] { @blog_name } }
+                        @for feed in feeds.iter() {
+                            li { a[href=&feed.uid] { @feed.name } }
+                        }
+                        li.shiftright {
+                            @if user.is_none() {
+                                a #login[href="/login"] { "Sign in" }
+                            } else {
+                                span {
+                                    @user.as_ref().unwrap() " - " a #logout[href="/logout"] { "Sign out" }
+                                }
+                            }
+                        }
+                    }
+                }
+                main {
+                    @markup::raw(content)
+                }
+                footer {
+                    p {
+                        "Powered by " a[href="https://sr.ht/~vikanezrimaya/kittybox"] {
+                            "Kittybox"
+                        }
+                    }
+                }
+            }
+        }
+    }
+    Entry<'a>(post: &'a serde_json::Value) {
+        @if post.pointer("/properties/like-of").is_none() && post.pointer("/properties/bookmark-of").is_none() {
+            @FullEntry { post }
+        } else {
+            // Show a mini-post.
+            @MiniEntry { post }
+        }
+    }
+    MiniEntry<'a>(post: &'a serde_json::Value) {
+        article."h-entry mini-entry" {
+            @if let Some(author) = post["properties"]["author"][0].as_object() {
+                span."mini-h-card"."u-author" {
+                    a."u-author"[href=author["properties"]["uid"][0].as_str().unwrap()] {
+                        @if let Some(photo) = author["properties"]["photo"][0].as_str() {
+                            img[src=photo, loading="lazy"];
+                        }
+                        @author["properties"]["name"][0].as_str().unwrap()
+                    }
+                }
+                @if let Some(likeof) = post["properties"]["like-of"][0].as_str() {
+                    " ❤️ "
+                    a."u-like-of"[href=likeof] { @likeof }
+                } else if let Some(likeof) = post["properties"]["like-of"][0].as_object() {
+                    a."u-like-of"[href=likeof["properties"]["url"][0].as_str().unwrap()] {
+                        @likeof["properties"]["name"][0]
+                            .as_str()
+                            .unwrap_or_else(|| likeof["properties"]["url"][0].as_str().unwrap())
+                    }
+                }
+                @if let Some(bookmarkof) = post["properties"]["bookmark-of"][0].as_str() {
+                    " 🔖 "
+                    a."u-bookmark-of"[href=bookmarkof] { @bookmarkof }
+                } else if let Some(bookmarkof) = post["properties"]["bookmark-of"][0].as_object() {
+                    a."u-bookmark-of"[href=bookmarkof["properties"]["url"][0].as_str().unwrap()] {
+                        @bookmarkof["properties"]["name"][0]
+                            .as_str()
+                            .unwrap_or_else(|| bookmarkof["properties"]["url"][0].as_str().unwrap())
+                    }
+                }
+                @if let Some(published) = post["properties"]["published"][0].as_str() {
+                    time."dt-published"[datetime=published] {
+                        @chrono::DateTime::parse_from_rfc3339(published)
+                            .map(|dt| dt.format("on %a %b %e %T %Y").to_string())
+                            .unwrap_or("sometime in the past".to_string())
+                    }
+                }
+            }
+        }
+    }
+    FullEntry<'a>(post: &'a serde_json::Value) {
+        article."h-entry" {
+            header.metadata {
+                @if let Some(name) = post["properties"]["name"][0].as_str() {
+                    h1."p-name" { @name }
+                }
+                @if let Some(author) = post["properties"]["author"][0].as_object() {
+                    section."mini-h-card" {
+                        a.larger."u-author"[href=author["properties"]["uid"][0].as_str().unwrap()] {
+                            @if let Some(photo) = author["properties"]["photo"][0].as_str() {
+                                img[src=photo, loading="lazy"];
+                            }
+                            @author["properties"]["name"][0].as_str().unwrap()
+                        }
+                    }
+                }
+                div {
+                    span {
+                        a."u-url"."u-uid"[href=post["properties"]["uid"][0].as_str().unwrap()] {
+                            @if let Some(published) = post["properties"]["published"][0].as_str() {
+                                time."dt-published"[datetime=published] {
+                                    @chrono::DateTime::parse_from_rfc3339(published)
+                                        .map(|dt| dt.format("%a %b %e %T %Y").to_string())
+                                        .unwrap_or("sometime in the past".to_string())
+                                }
+                            }
+                        }
+                    }
+                    @if post["properties"]["visibility"][0].as_str().unwrap_or("public") != "public" {
+                        span."p-visibility"[value=post["properties"]["visibility"][0].as_str().unwrap()] {
+                            @post["properties"]["visibility"][0].as_str().unwrap()
+                        }
+                    }
+                    @if post["properties"]["category"].is_array() {
+                        span {
+                            ul.categories {
+                                "Tagged: "
+                                @for cat in post["properties"]["category"].as_array().unwrap() {
+                                    li."p-category" { @cat.as_str().unwrap() }
+                                }
+                            }
+                        }
+                    }
+                    @if post["properties"]["in-reply-to"].is_array() {
+                        // TODO: Rich reply contexts - blocked on MF2 parser
+                        span {
+                            "In reply to: "
+                            ul.replyctx {
+                                @for ctx in post["properties"]["in-reply-to"].as_array().unwrap() {
+                                    li { a."u-in-reply-to"[href=ctx.as_str().unwrap()] {
+                                        @ctx.as_str().unwrap().truncate_ellipse(24).as_ref()
+                                    } }
+                                }
+                            }
+                        }
+                    }
+                }
+                @if post["properties"]["url"].as_array().unwrap().len() > 1 {
+                    hr;
+                    ul {
+                        "Pretty permalinks for this post:"
+                        @for url in post["properties"]["url"].as_array().unwrap().iter().filter(|i| **i != post["properties"]["uid"][0]).map(|i| i.as_str().unwrap()) {
+                            li {
+                                a."u-url"[href=url] { @url }
+                            }
+                        }
+                    }
+                }
+                @if post["properties"]["location"].is_array() || post["properties"]["checkin"].is_array() {
+                    div {
+                        @if post["properties"]["checkin"].is_array() {
+                            span {
+                                "Check-in to: "
+                                @if post["properties"]["checkin"][0].is_string() {
+                                    // It's a URL
+                                    a."u-checkin"[href=post["properties"]["checkin"][0].as_str().unwrap()] {
+                                        @post["properties"]["checkin"][0].as_str().unwrap().truncate_ellipse(24).as_ref()
+                                    }
+                                } else {
+                                    a."u-checkin"[href=post["properties"]["checkin"][0]["properties"]["uid"][0].as_str().unwrap()] {
+                                        @post["properties"]["checkin"][0]["properties"]["name"][0].as_str().unwrap()
+                                    }
+                                }
+                            }
+                        }
+                        @if post["properties"]["location"].is_array() {
+                            span {
+                                "Location: "
+                                @if post["properties"]["location"][0].is_string() {
+                                    // It's a geo: URL
+                                    // We need to decode it
+                                    a."u-location"[href=post["properties"]["location"][0].as_str().unwrap()] {
+                                        @decode_geo_uri(post["properties"]["location"][0].as_str().unwrap())
+                                    }
+                                } else {
+                                    // It's an inner h-geo object
+                                    a."u-location"[href=post["properties"]["location"][0]["value"].as_str().map(|x| x.to_string()).unwrap_or(format!("geo:{},{}", post["properties"]["location"][0]["properties"]["latitude"][0].as_str().unwrap(), post["properties"]["location"][0]["properties"]["longitude"][0].as_str().unwrap()))] {
+                                        // I'm a lazy bitch
+                                        @decode_geo_uri(&post["properties"]["location"][0]["value"].as_str().map(|x| x.to_string()).unwrap_or(format!("geo:{},{}", post["properties"]["location"][0]["properties"]["latitude"][0].as_str().unwrap(), post["properties"]["location"][0]["properties"]["longitude"][0].as_str().unwrap())))
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+                @if post["properties"]["ate"].is_array() || post["properties"]["drank"].is_array() {
+                    div {
+                        @if post["properties"]["ate"].is_array() {
+                            span { ul {
+                                "Ate:"
+                                @for food in post["properties"]["ate"].as_array().unwrap() {
+                                    li {
+                                        @if food.is_string() {
+                                            // If this is a string, it's a URL.
+                                            a."u-ate"[href=food.as_str().unwrap()] {
+                                                @food.as_str().unwrap().truncate_ellipse(24).as_ref()
+                                            }
+                                        } else {
+                                            // This is a rich food object (mm, sounds tasty! I wanna eat something tasty)
+                                            a."u-ate"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] {
+                                                @food["properties"]["name"][0].as_str()
+                                                    .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref())
+                                            }
+                                        }
+                                    }
+                                }
+                            } }
+                        }
+                        @if post["properties"]["drank"].is_array() {
+                            span { ul {
+                                "Drank:"
+                                @for food in post["properties"]["drank"].as_array().unwrap() {
+                                    li {
+                                        @if food.is_string() {
+                                            // If this is a string, it's a URL.
+                                            a."u-drank"[href=food.as_str().unwrap()] {
+                                                @food.as_str().unwrap().truncate_ellipse(24).as_ref()
+                                            }
+                                        } else {
+                                            // This is a rich food object (mm, sounds tasty! I wanna eat something tasty)
+                                            a."u-drank"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] {
+                                                @food["properties"]["name"][0].as_str()
+                                                    .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref())
+                                            }
+                                        }
+                                    }
+                                }
+                            } }
+                        }
+                    }
+                }
+            }
+            @PhotoGallery { photos: post["properties"]["photo"].as_array() }
+            @if post["properties"]["content"][0]["html"].is_string() {
+                main."e-content" {
+                    @markup::raw(post["properties"]["content"][0]["html"].as_str().unwrap().trim())
+                }
+            }
+            @WebInteractions { post }
+        }
+    }
+    PhotoGallery<'a>(photos: Option<&'a Vec<serde_json::Value>>) {
+        @if let Some(photos) = photos {
+            @for photo in photos.iter() {
+                @if let Some(photo) = photo.as_str() {
+                    img."u-photo"[src=photo, loading="lazy"];
+                } else if photo.is_object() {
+                    @if let Some(thumbnail) = photo["thumbnail"].as_str() {
+                        a."u-photo"[href=photo["value"].as_str().unwrap()] {
+                            img[src=thumbnail,
+                                loading="lazy",
+                                alt=photo["alt"].as_str().unwrap_or("")
+                            ];
+                        }
+                    } else {
+                        img."u-photo"[src=photo["value"].as_str().unwrap(),
+                                      loading="lazy",
+                                      alt=photo["alt"].as_str().unwrap_or("")
+                        ];
+                    }
+                }
+            }
+        }
+    }
+    WebInteractions<'a>(post: &'a serde_json::Value) {
+        footer.webinteractions {
+            ul.counters {
+                li {
+                    span.icon { "❤️" }
+                    span.counter { @post["properties"]["like"].as_array().map(|a| a.len()).unwrap_or(0) }
+                }
+                li {
+                    span.icon { "💬" }
+                    span.counter { @post["properties"]["comment"].as_array().map(|a| a.len()).unwrap_or(0) }
+                }
+                li {
+                    span.icon { "🔄" }
+                    span.counter { @post["properties"]["repost"].as_array().map(|a| a.len()).unwrap_or(0) }
+                }
+                li {
+                    span.icon { "🔖" }
+                    span.counter { @post["properties"]["bookmark"].as_array().map(|a| a.len()).unwrap_or(0) }
+                }
+            }
+            /*@if (
+                post["properties"]["like"].as_array().map(|a| a.len()).unwrap_or(0)
+                    + post["properties"]["bookmark"].as_array().map(|a| a.len()).unwrap_or(0)
+                    + post["properties"]["repost"].as_array().map(|a| a.len()).unwrap_or(0)
+                    + post["properties"]["comment"].as_array().map(|a| a.len()).unwrap_or(0)
+            ) > 0 {
+                details {
+                    summary { "Show comments and reactions" }
+                    // TODO actually render facepiles and comments
+                    @if let Some(likes) = post["properties"]["like"].as_array() {
+                        @if !likes.is_empty() {
+                            // Show a facepile of likes for a post
+                        }
+                    }
+                    @if let Some(bookmarks) = post["properties"]["bookmark"].as_array() {
+                        @if !bookmarks.is_empty() {
+                            // Show a facepile of bookmarks for a post
+                        }
+                    }
+                    @if let Some(reposts) = post["properties"]["repost"].as_array() {
+                        @if !reposts.is_empty() {
+                            // Show a facepile of reposts for a post
+                        }
+                    }
+                    @if let Some(comments) = post["properties"]["comment"].as_array() {
+                        @for comment in comments.iter() {
+                            // Show all the comments recursively (so we could do Salmention with them)
+                        }
+                    }
+                }
+            }*/
+        }
+    }
+    VCard<'a>(card: &'a serde_json::Value) {
+        article."h-card" {
+            @if card["properties"]["photo"][0].is_string() {
+                img."u-photo"[src=card["properties"]["photo"][0].as_str().unwrap()];
+            }
+            h1 {
+                a."u-url"."u-uid"."p-name"[href=card["properties"]["uid"][0].as_str().unwrap()] {
+                    @card["properties"]["name"][0].as_str().unwrap()
+                }
+            }
+            @if card["properties"]["pronoun"].is_array() {
+                span {
+                    "("
+                    @for (i, pronoun) in card["properties"]["pronoun"].as_array().unwrap().iter().filter_map(|v| v.as_str()).enumerate() {
+                        span."p-pronoun" {
+                            @pronoun
+                        }
+                        // Insert commas between multiple sets of pronouns
+                        @if i < (card["properties"]["pronoun"].as_array().unwrap().len() - 1) {", "}
+                    }
+                    ")"
+                }
+            }
+            @if card["properties"]["note"].is_array() {
+                p."p-note" {
+                    @card["properties"]["note"][0]["value"].as_str().unwrap_or_else(|| card["properties"]["note"][0].as_str().unwrap())
+                }
+            }
+            @if card["properties"]["url"].is_array() {
+                ul {
+                    "Can be found elsewhere at:"
+                    @for url in card["properties"]["url"].as_array().unwrap().iter().filter_map(|v| v.as_str()).filter(|v| v != &card["properties"]["uid"][0].as_str().unwrap()).filter(|v| !v.starts_with(&card["properties"]["author"][0].as_str().unwrap())) {
+                        li { a."u-url"[href=url, rel="me"] { @url } }
+                    }
+                }
+            }
+        }
+    }
+    Food<'a>(food: &'a serde_json::Value) {
+        article."h-food" {
+            header.metadata {
+                h1 {
+                    a."p-name"."u-url"[href=food["properties"]["url"][0].as_str().unwrap()] {
+                        @food["properties"]["name"][0].as_str().unwrap()
+                    }
+                }
+            }
+            @PhotoGallery { photos: food["properties"]["photo"].as_array() }
+        }
+    }
+    Feed<'a>(feed: &'a serde_json::Value) {
+        div."h-feed" {
+            div.metadata {
+                @if feed["properties"]["name"][0].is_string() {
+                    h1."p-name".titanic {
+                        a[href=feed["properties"]["uid"][0].as_str().unwrap(), rel="feed"] {
+                            @feed["properties"]["name"][0].as_str().unwrap()
+                        }
+                    }
+                }
+            }
+            @if feed["children"].is_array() {
+                @for child in feed["children"].as_array().unwrap() {
+                    @match child["type"][0].as_str().unwrap() {
+                        "h-entry" => { @Entry { post: child } }
+                        "h-feed" => { @Feed { feed: child } }
+                        "h-event" => {
+                            @{error!("Templating error: h-events aren't implemented yet");}
+                        }
+                        "h-card" => { @VCard { card: child }}
+                        something_else => {
+                            @{error!("Templating error: found a {} object that couldn't be parsed", something_else);}
+                        }
+                    }
+                }
+            }
+            @if feed["children"].as_array().map(|a| a.len()).unwrap_or(0) == 0 {
+                p {
+                    "Looks like you reached the end. Wanna jump back to the "
+                    a[href=feed["properties"]["uid"][0].as_str().unwrap()] {
+                        "beginning"
+                    } "?"
+                }
+            }
+            @if feed["children"].as_array().map(|a| a.len()).unwrap_or(0) == super::POSTS_PER_PAGE {
+                a[rel="prev", href=feed["properties"]["uid"][0].as_str().unwrap().to_string()
+                    + "?after=" + feed["children"][super::POSTS_PER_PAGE - 1]["properties"]["uid"][0].as_str().unwrap()] {
+                    "Older posts"
+                }
+            }
+        }
+    }
+    MainPage<'a>(feed: &'a serde_json::Value, card: &'a serde_json::Value) {
+        .sidebyside {
+            @VCard { card }
+            #dynamicstuff {
+                p { "This section will provide interesting statistics or tidbits about my life in this exact moment (with maybe a small delay)." }
+                p { "It will probably require JavaScript to self-update, but I promise to keep this widget lightweight and open-source!" }
+                p { small {
+                    "JavaScript isn't a menace, stop fearing it or I will switch to WebAssembly "
+                    "and knock your nico-nico-kneecaps so fast with its speed you won't even notice that... "
+                    small { "omae ha mou shindeiru" }
+                    @markup::raw("<!-- NANI?!!! -->")
+                } }
+            }
+        }
+        @Feed { feed }
+    }
+    ErrorPage(code: StatusCode, msg: Option<String>) {
+        h1 { @format!("HTTP {}", code) }
+        @match *code {
+            StatusCode::UNAUTHORIZED => {
+                p { "Looks like you need to authenticate yourself before seeing this page. Try logging in with IndieAuth using the Login button above!" }
+            }
+            StatusCode::FORBIDDEN => {
+                p { "Looks like you're forbidden from viewing this page." }
+                p {
+                    "This might've been caused by being banned from viewing my website"
+                    "or simply by trying to see what you're not supposed to see, "
+                    "like a private post that's not intended for you. It's ok, it happens."
+                }
+            }
+            StatusCode::GONE => {
+                p { "Looks like the page you're trying to find is gone and is never coming back." }
+            }
+            StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS => {
+            p { "The page is there, but I can't legally provide it to you because the censorship said so." }
+            }
+            StatusCode::NOT_FOUND => {
+                p { "Looks like there's no such page. Maybe you or someone else mistyped a URL or my database experienced data loss." }
+            }
+            StatusCode::IM_A_TEAPOT => {
+                p { "Wait, do you seriously expect my website to brew you coffee? It's not a coffee machine!" }
+
+                p {
+                    small {
+                        "I could brew you some coffee tho if we meet one day... "
+                        small {
+                            i {
+                                "i-it's nothing personal, I just like brewing coffee, b-baka!!!~ >.<!"
+                            }
+                        }
+                    }
+                }
+            }
+            StatusCode::BAD_REQUEST => {
+                @match msg {
+                    None => {
+                        p {
+                            "There was an undescribed error in your request. "
+                            "Please try again later or with a different request."
+                        }
+                    }
+                    Some(msg) => {
+                        p {
+                            "There was a following error in your request:"
+                        }
+                        blockquote { pre { @msg } }
+                    }
+                }
+            }
+            StatusCode::INTERNAL_SERVER_ERROR => {
+                @match msg {
+                    None => {
+                        p { "It seems like you have found an error. Not to worry, it has already been logged." }
+                    }
+                    Some(msg) => {
+                        p { "The server encountered an error while processing your request:" }
+                        blockquote { @msg }
+                        p { "Don't worry, it has already been logged." }
+                    }
+                }
+            }
+            _ => {
+                p { "It seems like you have found an error. Not to worry, it has already been logged." }
+            }
+        }
+        P { "For now, may I suggest to visit " a[href="/"] {"the main page"} " of this website?" }
+
+    }
+}
diff --git a/kittybox-rs/util/Cargo.toml b/kittybox-rs/util/Cargo.toml
new file mode 100644
index 0000000..31c6bca
--- /dev/null
+++ b/kittybox-rs/util/Cargo.toml
@@ -0,0 +1,12 @@
+[package]
+name = "kittybox-util"
+version = "0.1.0"
+edition = "2021"
+
+# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
+
+[dependencies]
+[dependencies.serde]         # A generic serialization/deserialization framework
+version = "^1.0.125"
+features = ["derive"]
+
diff --git a/kittybox-rs/util/src/lib.rs b/kittybox-rs/util/src/lib.rs
new file mode 100644
index 0000000..bc41689
--- /dev/null
+++ b/kittybox-rs/util/src/lib.rs
@@ -0,0 +1,18 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Clone, Serialize, Deserialize)]
+pub struct IndiewebEndpoints {
+    pub authorization_endpoint: String,
+    pub token_endpoint: String,
+    pub webmention: Option<String>,
+    pub microsub: Option<String>,
+}
+
+/// Data structure representing a Micropub channel in the ?q=channels output.
+#[derive(Serialize, Deserialize, PartialEq, Debug)]
+pub struct MicropubChannel {
+    /// The channel's UID. It is usually also a publically accessible permalink URL.
+    pub uid: String,
+    /// The channel's user-friendly name used to recognize it in lists.
+    pub name: String,
+}