From 4aa7f01da39ab55b4f6346e7565d8bb29566de39 Mon Sep 17 00:00:00 2001 From: Vika Date: Sun, 5 Dec 2021 23:00:01 +0300 Subject: Code cleanup and small bugfixing in templates --- src/bin/kittybox_database_converter.rs | 72 ++++++++++------ src/database/file/mod.rs | 151 ++++++++++++++++++++++----------- src/database/mod.rs | 58 ++++++++----- src/frontend/mod.rs | 23 ++++- src/frontend/templates/mod.rs | 8 +- src/frontend/templates/onboarding.rs | 1 - src/lib.rs | 14 +-- src/main.rs | 5 +- src/metrics.rs | 41 +++++---- src/micropub/post.rs | 20 ++--- 10 files changed, 252 insertions(+), 141 deletions(-) (limited to 'src') diff --git a/src/bin/kittybox_database_converter.rs b/src/bin/kittybox_database_converter.rs index 900a5c6..4dcd2ab 100644 --- a/src/bin/kittybox_database_converter.rs +++ b/src/bin/kittybox_database_converter.rs @@ -1,63 +1,77 @@ use anyhow::{anyhow, Context}; -use redis::{self, AsyncCommands}; -use kittybox::database::Storage; use kittybox::database::FileStorage; +use kittybox::database::Storage; +use redis::{self, AsyncCommands}; use std::collections::HashMap; /// Convert from a Redis storage to a new storage new_storage. async fn convert_from_redis(from: String, new_storage: S) -> anyhow::Result<()> { let db = redis::Client::open(from).context("Failed to open the Redis connection")?; - let mut conn = db.get_async_std_connection().await.context("Failed to connect to Redis")?; + let mut conn = db + .get_async_std_connection() + .await + .context("Failed to connect to Redis")?; // Rebinding to convince the borrow checker we're not smuggling stuff outta scope let storage = &new_storage; - + let mut stream = conn.hscan::<_, String>("posts").await?; while let Some(key) = stream.next_item().await { let value = serde_json::from_str::( - &stream.next_item().await - .ok_or(anyhow!("Failed to find a corresponding value for the key"))? + &stream + .next_item() + .await + .ok_or(anyhow!("Failed to find a corresponding value for the key"))?, )?; println!("{}, {:?}", key, value); if value["see_other"].is_string() { - continue + continue; } - let user = &( - url::Url::parse( - value["properties"]["uid"][0] - .as_str().unwrap() - ).unwrap().origin().ascii_serialization().clone() - + "/" - ); + let user = &(url::Url::parse(value["properties"]["uid"][0].as_str().unwrap()) + .unwrap() + .origin() + .ascii_serialization() + .clone() + + "/"); if let Err(err) = storage.clone().put_post(&value, user).await { eprintln!("Error saving post: {}", err); } - } - + let mut stream: redis::AsyncIter = conn.scan_match("settings_*").await?; while let Some(key) = stream.next_item().await { - let mut conn = db.get_async_std_connection().await.context("Failed to connect to Redis")?; + let mut conn = db + .get_async_std_connection() + .await + .context("Failed to connect to Redis")?; let user = key.strip_prefix("settings_").unwrap(); - match conn.hgetall::<&str, HashMap>(&key).await.context(format!("Failed getting settings from key {}", key)) { + match conn + .hgetall::<&str, HashMap>(&key) + .await + .context(format!("Failed getting settings from key {}", key)) + { Ok(settings) => { for (k, v) in settings.iter() { - if let Err(e) = storage.set_setting(k, &user, v).await.with_context(|| format!("Failed setting {} for {}", k, user)) { + if let Err(e) = storage + .set_setting(k, &user, v) + .await + .with_context(|| format!("Failed setting {} for {}", k, user)) + { eprintln!("{}", e); } } - }, + } Err(e) => { eprintln!("{}", e); } } } - + return Ok(()); } @@ -65,17 +79,25 @@ async fn convert_from_redis(from: String, new_storage: S) -> anyhow: async fn main() -> anyhow::Result<()> { let mut args = std::env::args(); args.next(); // skip argv[0] - let old_uri = args.next().ok_or_else(|| anyhow!("No import source is provided."))?; - let new_uri = args.next().ok_or_else(|| anyhow!("No import destination is provided."))?; + let old_uri = args + .next() + .ok_or_else(|| anyhow!("No import source is provided."))?; + let new_uri = args + .next() + .ok_or_else(|| anyhow!("No import destination is provided."))?; let storage = if new_uri.starts_with("file:") { let folder = new_uri.strip_prefix("file://").unwrap(); let path = std::path::PathBuf::from(folder); - Box::new(FileStorage::new(path).await.context("Failed to construct the file storage")?) + Box::new( + FileStorage::new(path) + .await + .context("Failed to construct the file storage")?, + ) } else { anyhow::bail!("Cannot construct the storage abstraction for destination storage. Check the storage type?"); }; - + if old_uri.starts_with("redis") { convert_from_redis(old_uri, *storage).await? } diff --git a/src/database/file/mod.rs b/src/database/file/mod.rs index d67c920..4fb7f47 100644 --- a/src/database/file/mod.rs +++ b/src/database/file/mod.rs @@ -1,17 +1,17 @@ +use crate::database::{filter_post, ErrorKind, Result, Storage, StorageError}; use async_std::fs::{File, OpenOptions}; -use async_std::io::{ErrorKind as IOErrorKind}; use async_std::io::prelude::*; +use async_std::io::ErrorKind as IOErrorKind; use async_std::task::spawn_blocking; use async_trait::async_trait; +use fd_lock::RwLock; use futures::stream; use futures_util::StreamExt; use futures_util::TryStreamExt; -use serde_json::json; -use crate::database::{ErrorKind, Result, Storage, StorageError, filter_post}; -use fd_lock::RwLock; use log::debug; -use std::path::{Path, PathBuf}; +use serde_json::json; use std::collections::HashMap; +use std::path::{Path, PathBuf}; impl From for StorageError { fn from(source: std::io::Error) -> Self { @@ -106,13 +106,24 @@ fn modify_post(post: &serde_json::Value, update: &serde_json::Value) -> Result Result Result Result(feed: &mut serde_json::Value, user: &'_ Option, storage: &S) { +async fn hydrate_author( + feed: &mut serde_json::Value, + user: &'_ Option, + storage: &S, +) { let url = feed["properties"]["uid"][0].as_str().unwrap(); if let Some(author) = feed["properties"]["author"].clone().as_array() { - if !feed["type"].as_array().unwrap().iter().any(|i| i == "h-card") { + if !feed["type"] + .as_array() + .unwrap() + .iter() + .any(|i| i == "h-card") + { let author_list: Vec = stream::iter(author.iter()) .then(|i| async move { if let Some(i) = i.as_str() { @@ -196,18 +226,21 @@ async fn hydrate_author(feed: &mut serde_json::Value, user: &'_ Opti Ok(post) => match post { Some(post) => match filter_post(post, user) { Some(author) => author, - None => json!(i) + None => json!(i), }, - None => json!(i) + None => json!(i), }, Err(e) => { log::error!("Error while hydrating post {}: {}", url, e); json!(i) } } - } else { i.clone() } + } else { + i.clone() + } }) - .collect::>().await; + .collect::>() + .await; feed["properties"].as_object_mut().unwrap()["author"] = json!(author_list); } } @@ -251,8 +284,8 @@ impl Storage for FileStorage { async fn put_post<'a>(&self, post: &'a serde_json::Value, user: &'a str) -> Result<()> { let key = post["properties"]["uid"][0] - .as_str() - .expect("Tried to save a post without UID"); + .as_str() + .expect("Tried to save a post without UID"); let path = url_to_path(&self.root_dir, key); debug!("Creating {:?}", path); @@ -288,26 +321,39 @@ impl Storage for FileStorage { let orig = path.clone(); spawn_blocking::<_, Result<()>>(move || { // We're supposed to have a parent here. - let basedir = link.parent().ok_or(StorageError::new(ErrorKind::Backend, "Failed to calculate parent directory when creating a symlink"))?; + let basedir = link.parent().ok_or(StorageError::new( + ErrorKind::Backend, + "Failed to calculate parent directory when creating a symlink", + ))?; let relative = path_relative_from(&orig, &basedir).unwrap(); println!("{:?} - {:?} = {:?}", &orig, &basedir, &relative); println!("Created a symlink at {:?}", &link); let symlink_result; #[cfg(unix)] - { symlink_result = std::os::unix::fs::symlink(relative, link); } + { + symlink_result = std::os::unix::fs::symlink(relative, link); + } // Wow it even supports windows. Not sure if I need it to run on Windows but oh well #[cfg(windows)] - { symlink_result = std::os::windows::fs::symlink_file(relative, link); } + { + symlink_result = std::os::windows::fs::symlink_file(relative, link); + } match symlink_result { Ok(()) => Ok(()), - Err(e) => Err(e.into()) + Err(e) => Err(e.into()), } - }).await?; + }) + .await?; } } } - if post["type"].as_array().unwrap().iter().any(|s| s.as_str() == Some("h-feed")) { + if post["type"] + .as_array() + .unwrap() + .iter() + .any(|s| s.as_str() == Some("h-feed")) + { println!("Adding to channel list..."); // Add the h-feed to the channel list let mut path = relative_path::RelativePathBuf::new(); @@ -320,7 +366,8 @@ impl Storage for FileStorage { .write(true) .truncate(false) .create(true) - .open(&path).await?; + .open(&path) + .await?; let mut lock = get_lockable_file(file).await; let mut guard = lock.write()?; @@ -338,11 +385,13 @@ impl Storage for FileStorage { name: post["properties"]["name"][0] .as_str() .map(|s| s.to_string()) - .unwrap_or_else(|| String::default()) + .unwrap_or_else(|| String::default()), }); guard.seek(std::io::SeekFrom::Start(0)).await?; guard.set_len(0).await?; - guard.write_all(serde_json::to_string(&channels)?.as_bytes()).await?; + guard + .write_all(serde_json::to_string(&channels)?.as_bytes()) + .await?; } Ok(()) @@ -375,10 +424,7 @@ impl Storage for FileStorage { Ok(()) } - async fn get_channels<'a>( - &self, - user: &'a str, - ) -> Result> { + async fn get_channels<'a>(&self, user: &'a str) -> Result> { let mut path = relative_path::RelativePathBuf::new(); path.push(user.to_string()); path.push("channels"); @@ -393,11 +439,11 @@ impl Storage for FileStorage { (&mut &*guard).read_to_string(&mut content).await?; // This should not happen, but if it does, let's handle it gracefully instead of failing. if content.len() == 0 { - return Ok(vec![]) + return Ok(vec![]); } let channels: Vec = serde_json::from_str(&content)?; Ok(channels) - }, + } Err(e) => { if e.kind() == IOErrorKind::NotFound { Ok(vec![]) @@ -419,7 +465,8 @@ impl Storage for FileStorage { if let Some(mut feed) = filter_post(feed, user) { if feed["children"].is_array() { let children = feed["children"].as_array().unwrap().clone(); - let mut posts_iter = children.into_iter() + let mut posts_iter = children + .into_iter() .map(|s: serde_json::Value| s.as_str().unwrap().to_string()); if after.is_some() { loop { @@ -430,24 +477,25 @@ impl Storage for FileStorage { } } let posts = stream::iter(posts_iter) - .map(|url: String| async move { - self.get_post(&url).await - }) + .map(|url: String| async move { self.get_post(&url).await }) .buffered(std::cmp::min(3, limit)) - // Hack to unwrap the Option and sieve out broken links - // Broken links return None, and Stream::filter_map skips Nones. + // Hack to unwrap the Option and sieve out broken links + // Broken links return None, and Stream::filter_map skips Nones. .try_filter_map(|post: Option| async move { Ok(post) }) - .and_then(|mut post| async move { hydrate_author(&mut post, user, self).await; Ok(post) }) + .and_then(|mut post| async move { + hydrate_author(&mut post, user, self).await; + Ok(post) + }) .try_filter_map(|post| async move { Ok(filter_post(post, user)) }) - .take(limit); match posts.try_collect::>().await { Ok(posts) => feed["children"] = serde_json::json!(posts), Err(err) => { return Err(StorageError::with_source( - ErrorKind::Other, "Feed assembly error", - Box::new(err) + ErrorKind::Other, + "Feed assembly error", + Box::new(err), )); } } @@ -469,7 +517,9 @@ impl Storage for FileStorage { let path = url_to_path(&self.root_dir, url); if let Err(e) = async_std::fs::remove_file(path).await { Err(e.into()) - } else { Ok(()) } + } else { + Ok(()) + } } async fn get_setting<'a>(&self, setting: &'a str, user: &'a str) -> Result { @@ -489,10 +539,10 @@ impl Storage for FileStorage { let settings: HashMap = serde_json::from_str(&content)?; // XXX consider returning string slices instead of cloning a string every time // it might come with a performance hit and/or memory usage inflation - settings.get(setting) + settings + .get(setting) .map(|s| s.clone()) .ok_or_else(|| StorageError::new(ErrorKind::Backend, "Setting not set")) - } async fn set_setting<'a>(&self, setting: &'a str, user: &'a str, value: &'a str) -> Result<()> { @@ -513,7 +563,8 @@ impl Storage for FileStorage { .read(true) .truncate(false) .create(true) - .open(&path).await?; + .open(&path) + .await?; let mut lock = get_lockable_file(file).await; log::debug!("Created a lock. Locking for writing..."); let mut guard = lock.write()?; @@ -530,7 +581,9 @@ impl Storage for FileStorage { settings.insert(setting.to_string(), value.to_string()); guard.seek(std::io::SeekFrom::Start(0)).await?; guard.set_len(0).await?; - guard.write_all(serde_json::to_string(&settings)?.as_bytes()).await?; + guard + .write_all(serde_json::to_string(&settings)?.as_bytes()) + .await?; Ok(()) } } diff --git a/src/database/mod.rs b/src/database/mod.rs index a57e243..6a874ed 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -1,5 +1,4 @@ #![warn(missing_docs)] -use crate::indieauth::User; use async_trait::async_trait; use serde::{Deserialize, Serialize}; @@ -70,7 +69,9 @@ impl From for tide::Response { } impl std::error::Error for StorageError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - self.source.as_ref().map(|e| e.as_ref() as &dyn std::error::Error) + self.source + .as_ref() + .map(|e| e.as_ref() as &dyn std::error::Error) } } impl From for StorageError { @@ -115,11 +116,15 @@ impl StorageError { } } /// Create a StorageError using another arbitrary Error as a source. - fn with_source(kind: ErrorKind, msg: &str, source: Box) -> Self { + fn with_source( + kind: ErrorKind, + msg: &str, + source: Box, + ) -> Self { Self { msg: msg.to_string(), source: Some(source), - kind + kind, } } /// Get the kind of an error. @@ -135,7 +140,15 @@ impl StorageError { /// A special Result type for the Micropub backing storage. pub type Result = std::result::Result; -pub fn filter_post(mut post: serde_json::Value, user: &'_ Option) -> Option { +/// Filter the post according to the value of `user`. +/// +/// Anonymous users cannot view private posts and protected locations; +/// Logged-in users can only view private posts targeted at them; +/// Logged-in users can't view private location data +pub fn filter_post( + mut post: serde_json::Value, + user: &'_ Option, +) -> Option { if post["properties"]["deleted"][0].is_string() { return Some(serde_json::json!({ "type": post["type"], @@ -253,8 +266,8 @@ mod tests { //#[cfg(feature="redis")] //use super::redis::tests::get_redis_instance; use super::{MicropubChannel, Storage}; - use serde_json::json; use paste::paste; + use serde_json::json; async fn test_backend_basic_operations(backend: Backend) { let post: serde_json::Value = json!({ @@ -338,15 +351,21 @@ mod tests { .await .unwrap(); - backend.update_post(&key, json!({ - "url": &key, - "add": { - "category": ["testing"], - }, - "replace": { - "content": ["Different test content"] - } - })).await.unwrap(); + backend + .update_post( + &key, + json!({ + "url": &key, + "add": { + "category": ["testing"], + }, + "replace": { + "content": ["Different test content"] + } + }), + ) + .await + .unwrap(); if let Some(returned_post) = backend.get_post(&key).await.unwrap() { assert!(returned_post.is_object()); @@ -385,10 +404,7 @@ mod tests { .put_post(&feed, "https://fireburn.ru/") .await .unwrap(); - let chans = backend - .get_channels("https://fireburn.ru/") - .await - .unwrap(); + let chans = backend.get_channels("https://fireburn.ru/").await.unwrap(); assert_eq!(chans.len(), 1); assert_eq!( chans[0], @@ -412,7 +428,7 @@ mod tests { "Vika's Hideout" ); } - + /*macro_rules! redis_test { ($func_name:expr) => { paste! { @@ -441,7 +457,7 @@ mod tests { $func_name(backend).await } } - } + }; } /*redis_test!(test_backend_basic_operations); diff --git a/src/frontend/mod.rs b/src/frontend/mod.rs index 6194249..ce4f015 100644 --- a/src/frontend/mod.rs +++ b/src/frontend/mod.rs @@ -185,7 +185,9 @@ pub async fn onboarding_receiver(mut req: Request(mut req: Request>) -> Resu webmention: None, microsub: None, }, - feeds: backend.get_channels(hcard_url).await.unwrap_or_else(|_| Vec::default()), + feeds: backend + .get_channels(hcard_url) + .await + .unwrap_or_else(|_| Vec::default()), content: MainPage { feed: &feed?, card: &card?, @@ -358,7 +363,12 @@ pub async fn render_post(mut req: Request>) -> R webmention: None, microsub: None, }, - feeds: req.state().storage.get_channels(&owner).await.unwrap_or_else(|e| Vec::default()), + feeds: req + .state() + .storage + .get_channels(&owner) + .await + .unwrap_or_else(|_| Vec::default()), content: template, } .to_string(), @@ -387,7 +397,12 @@ where .get_setting("site_name", &owner) .await .unwrap_or_else(|_| "Kitty Box!".to_string()); - let feeds = request.state().storage.get_channels(&owner).await.unwrap_or_else(|_| Vec::default()); + let feeds = request + .state() + .storage + .get_channels(&owner) + .await + .unwrap_or_else(|_| Vec::default()); let mut res = next.run(request).await; let mut code: Option = None; if let Some(err) = res.downcast_error::() { diff --git a/src/frontend/templates/mod.rs b/src/frontend/templates/mod.rs index 3585804..100e16d 100644 --- a/src/frontend/templates/mod.rs +++ b/src/frontend/templates/mod.rs @@ -165,9 +165,9 @@ markup::define! { } } else { // This is a rich food object (mm, sounds tasty! I wanna eat something tasty) - a."u-ate"[href=food["properties"]["uid"][0].as_str().unwrap()] { + a."u-ate"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] { @food["properties"]["name"][0].as_str() - .unwrap_or(food["properties"]["uid"][0].as_str().unwrap().truncate_ellipse(24).as_ref()) + .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref()) } } } @@ -186,9 +186,9 @@ markup::define! { } } else { // This is a rich food object (mm, sounds tasty! I wanna eat something tasty) - a."u-drank"[href=food["properties"]["uid"][0].as_str().unwrap()] { + a."u-drank"[href=food["properties"]["uid"][0].as_str().unwrap_or("#")] { @food["properties"]["name"][0].as_str() - .unwrap_or(food["properties"]["uid"][0].as_str().unwrap().truncate_ellipse(24).as_ref()) + .unwrap_or(food["properties"]["uid"][0].as_str().unwrap_or("#").truncate_ellipse(24).as_ref()) } } } diff --git a/src/frontend/templates/onboarding.rs b/src/frontend/templates/onboarding.rs index aab2b31..ebf8881 100644 --- a/src/frontend/templates/onboarding.rs +++ b/src/frontend/templates/onboarding.rs @@ -190,4 +190,3 @@ markup::define! { } } } - diff --git a/src/lib.rs b/src/lib.rs index 108a42f..817bda7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -4,8 +4,8 @@ use tide::{Request, Response}; pub mod database; mod frontend; mod indieauth; -mod micropub; mod metrics; +mod micropub; use crate::indieauth::IndieAuthMiddleware; use crate::micropub::CORSMiddleware; @@ -110,21 +110,25 @@ pub async fn get_app_with_file( } #[cfg(test)] -pub async fn get_app_with_test_file(token_endpoint: surf::Url) -> ( +pub async fn get_app_with_test_file( + token_endpoint: surf::Url, +) -> ( tempdir::TempDir, database::FileStorage, - App + App, ) { use surf::Url; let tempdir = tempdir::TempDir::new("file").expect("Failed to create tempdir"); - let backend = database::FileStorage::new(tempdir.path().to_path_buf()).await.unwrap(); + let backend = database::FileStorage::new(tempdir.path().to_path_buf()) + .await + .unwrap(); let app = tide::with_state(ApplicationState { token_endpoint, media_endpoint: None, authorization_endpoint: Url::parse("https://indieauth.com/auth").unwrap(), storage: backend.clone(), internal_token: None, - http_client: surf::Client::new() + http_client: surf::Client::new(), }); (tempdir, backend, equip_app(app)) } diff --git a/src/main.rs b/src/main.rs index 3130411..aec3be0 100644 --- a/src/main.rs +++ b/src/main.rs @@ -73,8 +73,9 @@ async fn main() -> Result<(), std::io::Error> { authorization_endpoint, backend_uri, media_endpoint, - internal_token - ).await; + internal_token, + ) + .await; app.listen(host).await } else { println!("Unknown backend, not starting."); diff --git a/src/metrics.rs b/src/metrics.rs index 0537b9d..9f512dd 100644 --- a/src/metrics.rs +++ b/src/metrics.rs @@ -1,8 +1,11 @@ -use tide::{Request, Response, Result, Next}; -use prometheus::{self, IntCounterVec, HistogramVec, TextEncoder, Encoder, register_int_counter_vec, register_histogram_vec}; -use lazy_static::lazy_static; use async_trait::async_trait; -use std::time::{Instant, Duration}; +use lazy_static::lazy_static; +use prometheus::{ + self, register_histogram_vec, register_int_counter_vec, Encoder, HistogramVec, IntCounterVec, + TextEncoder, +}; +use std::time::{Duration, Instant}; +use tide::{Next, Request, Response, Result}; // Copied from https://docs.rs/prometheus/0.12.0/src/prometheus/histogram.rs.html#885-889 #[inline] @@ -13,29 +16,27 @@ fn duration_to_seconds(d: Duration) -> f64 { lazy_static! { static ref HTTP_CONNS_COUNTER: IntCounterVec = register_int_counter_vec!( - "http_requests_total", "Number of processed HTTP requests", + "http_requests_total", + "Number of processed HTTP requests", &["code", "method", "url"] - ).unwrap(); - + ) + .unwrap(); static ref HTTP_REQUEST_DURATION_HISTOGRAM: HistogramVec = register_histogram_vec!( - "http_request_duration_seconds", "Duration of HTTP requests", + "http_request_duration_seconds", + "Duration of HTTP requests", &["code", "method", "url"] - ).unwrap(); + ) + .unwrap(); } pub struct InstrumentationMiddleware {} - #[async_trait] impl tide::Middleware for InstrumentationMiddleware where S: Send + Sync + Clone + 'static, { - async fn handle( - &self, - req: Request, - next: Next<'_, S>, - ) -> Result { + async fn handle(&self, req: Request, next: Next<'_, S>) -> Result { let url = req.url().to_string(); let method = req.method().to_string(); // Execute the request @@ -45,8 +46,12 @@ where // Get the code from the response let code = res.status().to_string(); - HTTP_CONNS_COUNTER.with_label_values(&[&code, &method, &url]).inc(); - HTTP_REQUEST_DURATION_HISTOGRAM.with_label_values(&[&code, &method, &url]).observe(elapsed); + HTTP_CONNS_COUNTER + .with_label_values(&[&code, &method, &url]) + .inc(); + HTTP_REQUEST_DURATION_HISTOGRAM + .with_label_values(&[&code, &method, &url]) + .observe(elapsed); Ok(res) } @@ -54,7 +59,7 @@ where pub async fn gather(_: Request) -> Result where - S: Send + Sync + Clone + S: Send + Sync + Clone, { let mut buffer: Vec = vec![]; let encoder = TextEncoder::new(); diff --git a/src/micropub/post.rs b/src/micropub/post.rs index def518a..070c822 100644 --- a/src/micropub/post.rs +++ b/src/micropub/post.rs @@ -133,17 +133,17 @@ pub fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde let default_channel = me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string(); body["properties"]["channel"] = json!([default_channel]); - }, + } Some("h-card") => { let default_channel = me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string(); body["properties"]["channel"] = json!([default_channel]); - }, + } Some("h-food") => { let default_channel = me.join(FOOD_CHANNEL_PATH).unwrap().to_string(); body["properties"]["channel"] = json!([default_channel]); - }, + } // TODO h-event /*"h-event" => { let default_channel @@ -239,14 +239,7 @@ pub async fn new_post( Ok(exists) => { if exists { if let Err(err) = storage - .update_post( - &channel, - json!({ - "add": { - "children": [uid] - } - }), - ) + .update_post(&channel, json!({"add": {"children": [uid]}})) .await { return error_json!( @@ -258,7 +251,10 @@ pub async fn new_post( ) ); } - } else if channel == default_channel || channel == vcards_channel || channel == food_channel { + } else if channel == default_channel + || channel == vcards_channel + || channel == food_channel + { if let Err(err) = create_feed(storage, &uid, &channel, &user).await { return error_json!( 500, -- cgit 1.4.1