From 8826d9446e6c492db2243b9921e59ce496027bef Mon Sep 17 00:00:00 2001 From: Vika Date: Wed, 9 Apr 2025 23:31:02 +0300 Subject: cargo fmt Change-Id: I80e81ebba3f0cdf8c094451c9fe3ee4126b8c888 --- src/database/mod.rs | 165 ++++++++++++++++++++++++++++++++-------------------- 1 file changed, 102 insertions(+), 63 deletions(-) (limited to 'src/database/mod.rs') diff --git a/src/database/mod.rs b/src/database/mod.rs index 4390ae7..de51c2c 100644 --- a/src/database/mod.rs +++ b/src/database/mod.rs @@ -177,7 +177,7 @@ impl StorageError { Self { msg: Cow::Borrowed(msg), source: None, - kind + kind, } } /// Create a StorageError using another arbitrary Error as a source. @@ -219,27 +219,34 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { fn post_exists(&self, url: &str) -> impl Future> + Send; /// Load a post from the database in MF2-JSON format, deserialized from JSON. - fn get_post(&self, url: &str) -> impl Future>> + Send; + fn get_post(&self, url: &str) + -> impl Future>> + Send; /// Save a post to the database as an MF2-JSON structure. /// /// Note that the `post` object MUST have `post["properties"]["uid"][0]` defined. - fn put_post(&self, post: &serde_json::Value, user: &url::Url) -> impl Future> + Send; + fn put_post( + &self, + post: &serde_json::Value, + user: &url::Url, + ) -> impl Future> + Send; /// Add post to feed. Some database implementations might have optimized ways to do this. #[tracing::instrument(skip(self))] fn add_to_feed(&self, feed: &str, post: &str) -> impl Future> + Send { tracing::debug!("Inserting {} into {} using `update_post`", post, feed); - self.update_post(feed, serde_json::from_value( - serde_json::json!({"add": {"children": [post]}})).unwrap() + self.update_post( + feed, + serde_json::from_value(serde_json::json!({"add": {"children": [post]}})).unwrap(), ) } /// Remove post from feed. Some database implementations might have optimized ways to do this. #[tracing::instrument(skip(self))] fn remove_from_feed(&self, feed: &str, post: &str) -> impl Future> + Send { tracing::debug!("Removing {} into {} using `update_post`", post, feed); - self.update_post(feed, serde_json::from_value( - serde_json::json!({"delete": {"children": [post]}})).unwrap() + self.update_post( + feed, + serde_json::from_value(serde_json::json!({"delete": {"children": [post]}})).unwrap(), ) } @@ -254,7 +261,11 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { /// /// Default implementation calls [`Storage::update_with`] and uses /// [`update.apply`][MicropubUpdate::apply] to update the post. - fn update_post(&self, url: &str, update: MicropubUpdate) -> impl Future> + Send { + fn update_post( + &self, + url: &str, + update: MicropubUpdate, + ) -> impl Future> + Send { let fut = self.update_with(url, |post| { update.apply(post); }); @@ -274,12 +285,17 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { /// /// Returns old post and the new post after editing. fn update_with( - &self, url: &str, f: F + &self, + url: &str, + f: F, ) -> impl Future> + Send; /// Get a list of channels available for the user represented by /// the `user` domain to write to. - fn get_channels(&self, user: &url::Url) -> impl Future>> + Send; + fn get_channels( + &self, + user: &url::Url, + ) -> impl Future>> + Send; /// Fetch a feed at `url` and return an h-feed object containing /// `limit` posts after a post by url `after`, filtering the content @@ -329,7 +345,7 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { url: &'_ str, cursor: Option<&'_ str>, limit: usize, - user: Option<&url::Url> + user: Option<&url::Url>, ) -> impl Future)>>> + Send; /// Deletes a post from the database irreversibly. Must be idempotent. @@ -339,7 +355,11 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { fn get_setting(&self, user: &url::Url) -> impl Future> + Send; /// Commits a setting to the setting store. - fn set_setting(&self, user: &url::Url, value: S::Data) -> impl Future> + Send; + fn set_setting( + &self, + user: &url::Url, + value: S::Data, + ) -> impl Future> + Send; /// Add (or update) a webmention on a certian post. /// @@ -355,11 +375,19 @@ pub trait Storage: std::fmt::Debug + Clone + Send + Sync { /// /// Besides, it may even allow for nice tricks like storing the /// webmentions separately and rehydrating them on feed reads. - fn add_or_update_webmention(&self, target: &str, mention_type: MentionType, mention: serde_json::Value) -> impl Future> + Send; + fn add_or_update_webmention( + &self, + target: &str, + mention_type: MentionType, + mention: serde_json::Value, + ) -> impl Future> + Send; /// Return a stream of all posts ever made by a certain user, in /// reverse-chronological order. - fn all_posts<'this>(&'this self, user: &url::Url) -> impl Future + Send + 'this>> + Send; + fn all_posts<'this>( + &'this self, + user: &url::Url, + ) -> impl Future + Send + 'this>> + Send; } #[cfg(test)] @@ -464,7 +492,8 @@ mod tests { "replace": { "content": ["Different test content"] } - })).unwrap(), + })) + .unwrap(), ) .await .unwrap(); @@ -511,7 +540,10 @@ mod tests { .put_post(&feed, &"https://fireburn.ru/".parse().unwrap()) .await .unwrap(); - let chans = backend.get_channels(&"https://fireburn.ru/".parse().unwrap()).await.unwrap(); + let chans = backend + .get_channels(&"https://fireburn.ru/".parse().unwrap()) + .await + .unwrap(); assert_eq!(chans.len(), 1); assert_eq!( chans[0], @@ -526,16 +558,16 @@ mod tests { backend .set_setting::( &"https://fireburn.ru/".parse().unwrap(), - "Vika's Hideout".to_owned() + "Vika's Hideout".to_owned(), ) .await .unwrap(); assert_eq!( backend - .get_setting::(&"https://fireburn.ru/".parse().unwrap()) - .await - .unwrap() - .as_ref(), + .get_setting::(&"https://fireburn.ru/".parse().unwrap()) + .await + .unwrap() + .as_ref(), "Vika's Hideout" ); } @@ -597,11 +629,9 @@ mod tests { async fn test_feed_pagination(backend: Backend) { let posts = { - let mut posts = std::iter::from_fn( - || Some(gen_random_post("fireburn.ru")) - ) - .take(40) - .collect::>(); + let mut posts = std::iter::from_fn(|| Some(gen_random_post("fireburn.ru"))) + .take(40) + .collect::>(); // Reverse the array so it's in reverse-chronological order posts.reverse(); @@ -629,7 +659,10 @@ mod tests { .put_post(post, &"https://fireburn.ru/".parse().unwrap()) .await .unwrap(); - backend.add_to_feed(key, post["properties"]["uid"][0].as_str().unwrap()).await.unwrap(); + backend + .add_to_feed(key, post["properties"]["uid"][0].as_str().unwrap()) + .await + .unwrap(); } let limit: usize = 10; @@ -648,23 +681,16 @@ mod tests { .unwrap() .iter() .map(|post| post["properties"]["uid"][0].as_str().unwrap()) - .collect::>() - [0..10], + .collect::>()[0..10], posts .iter() .map(|post| post["properties"]["uid"][0].as_str().unwrap()) - .collect::>() - [0..10] + .collect::>()[0..10] ); tracing::debug!("Continuing with cursor: {:?}", cursor); let (result2, cursor2) = backend - .read_feed_with_cursor( - key, - cursor.as_deref(), - limit, - None, - ) + .read_feed_with_cursor(key, cursor.as_deref(), limit, None) .await .unwrap() .unwrap(); @@ -676,12 +702,7 @@ mod tests { tracing::debug!("Continuing with cursor: {:?}", cursor); let (result3, cursor3) = backend - .read_feed_with_cursor( - key, - cursor2.as_deref(), - limit, - None, - ) + .read_feed_with_cursor(key, cursor2.as_deref(), limit, None) .await .unwrap() .unwrap(); @@ -693,12 +714,7 @@ mod tests { tracing::debug!("Continuing with cursor: {:?}", cursor); let (result4, _) = backend - .read_feed_with_cursor( - key, - cursor3.as_deref(), - limit, - None, - ) + .read_feed_with_cursor(key, cursor3.as_deref(), limit, None) .await .unwrap() .unwrap(); @@ -725,24 +741,43 @@ mod tests { async fn test_webmention_addition(db: Backend) { let post = gen_random_post("fireburn.ru"); - db.put_post(&post, &"https://fireburn.ru/".parse().unwrap()).await.unwrap(); + db.put_post(&post, &"https://fireburn.ru/".parse().unwrap()) + .await + .unwrap(); const TYPE: MentionType = MentionType::Reply; let target = post["properties"]["uid"][0].as_str().unwrap(); let mut reply = gen_random_mention("aaronparecki.com", TYPE, target); - let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); + let (read_post, _) = db + .read_feed_with_cursor(target, None, 20, None) + .await + .unwrap() + .unwrap(); assert_eq!(post, read_post); - db.add_or_update_webmention(target, TYPE, reply.clone()).await.unwrap(); + db.add_or_update_webmention(target, TYPE, reply.clone()) + .await + .unwrap(); - let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); + let (read_post, _) = db + .read_feed_with_cursor(target, None, 20, None) + .await + .unwrap() + .unwrap(); assert_eq!(read_post["properties"]["comment"][0], reply); - reply["properties"]["content"][0] = json!(rand::random::().to_string()); + reply["properties"]["content"][0] = + json!(rand::random::().to_string()); - db.add_or_update_webmention(target, TYPE, reply.clone()).await.unwrap(); - let (read_post, _) = db.read_feed_with_cursor(target, None, 20, None).await.unwrap().unwrap(); + db.add_or_update_webmention(target, TYPE, reply.clone()) + .await + .unwrap(); + let (read_post, _) = db + .read_feed_with_cursor(target, None, 20, None) + .await + .unwrap() + .unwrap(); assert_eq!(read_post["properties"]["comment"][0], reply); } @@ -752,16 +787,20 @@ mod tests { let post = { let mut post = gen_random_post("fireburn.ru"); let urls = post["properties"]["url"].as_array_mut().unwrap(); - urls.push(serde_json::Value::String( - PERMALINK.to_owned() - )); + urls.push(serde_json::Value::String(PERMALINK.to_owned())); post }; - db.put_post(&post, &"https://fireburn.ru/".parse().unwrap()).await.unwrap(); + db.put_post(&post, &"https://fireburn.ru/".parse().unwrap()) + .await + .unwrap(); for i in post["properties"]["url"].as_array().unwrap() { - let (read_post, _) = db.read_feed_with_cursor(i.as_str().unwrap(), None, 20, None).await.unwrap().unwrap(); + let (read_post, _) = db + .read_feed_with_cursor(i.as_str().unwrap(), None, 20, None) + .await + .unwrap() + .unwrap(); assert_eq!(read_post, post); } } @@ -786,7 +825,7 @@ mod tests { async fn $func_name() { let tempdir = tempfile::tempdir().expect("Failed to create tempdir"); let backend = super::super::FileStorage { - root_dir: tempdir.path().to_path_buf() + root_dir: tempdir.path().to_path_buf(), }; super::$func_name(backend).await } @@ -800,7 +839,7 @@ mod tests { #[tracing_test::traced_test] async fn $func_name( pool_opts: sqlx::postgres::PgPoolOptions, - connect_opts: sqlx::postgres::PgConnectOptions + connect_opts: sqlx::postgres::PgConnectOptions, ) -> Result<(), sqlx::Error> { let db = { //use sqlx::ConnectOptions; -- cgit 1.4.1