diff options
author | Vika <vika@fireburn.ru> | 2021-05-17 18:44:59 +0300 |
---|---|---|
committer | Vika <vika@fireburn.ru> | 2021-05-17 18:44:59 +0300 |
commit | 9a2791d745f4a9f0307eb69b50de5629f51564cc (patch) | |
tree | 4c20804b1112e5302b4eae7be54de98c13e72759 /src/micropub | |
parent | 9c3ede5d1ec339a59d5f8274d8043928cafb5a44 (diff) | |
download | kittybox-9a2791d745f4a9f0307eb69b50de5629f51564cc.tar.zst |
Make rustfmt happy
Diffstat (limited to 'src/micropub')
-rw-r--r-- | src/micropub/get.rs | 17 | ||||
-rw-r--r-- | src/micropub/mod.rs | 2 | ||||
-rw-r--r-- | src/micropub/post.rs | 624 |
3 files changed, 439 insertions, 204 deletions
diff --git a/src/micropub/get.rs b/src/micropub/get.rs index e106883..525bf12 100644 --- a/src/micropub/get.rs +++ b/src/micropub/get.rs @@ -1,23 +1,26 @@ -use tide::prelude::{Deserialize, json}; -use tide::{Request, Response, Result}; -use crate::ApplicationState; -use crate::database::{MicropubChannel,Storage}; +use crate::database::{MicropubChannel, Storage}; use crate::indieauth::User; +use crate::ApplicationState; +use tide::prelude::{json, Deserialize}; +use tide::{Request, Response, Result}; #[derive(Deserialize)] struct QueryOptions { q: String, - url: Option<String> + url: Option<String>, } pub async fn get_handler<Backend>(req: Request<ApplicationState<Backend>>) -> Result where - Backend: Storage + Send + Sync + Backend: Storage + Send + Sync, { let user = req.ext::<User>().unwrap(); let backend = &req.state().storage; let media_endpoint = &req.state().media_endpoint; - let query = req.query::<QueryOptions>().unwrap_or(QueryOptions { q: "".to_string(), url: None }); + let query = req.query::<QueryOptions>().unwrap_or(QueryOptions { + q: "".to_string(), + url: None, + }); match &*query.q { "config" => { let channels: Vec<MicropubChannel>; diff --git a/src/micropub/mod.rs b/src/micropub/mod.rs index 9bc553c..68a3134 100644 --- a/src/micropub/mod.rs +++ b/src/micropub/mod.rs @@ -2,5 +2,5 @@ pub mod get; pub mod post; pub use get::get_handler; +pub use post::normalize_mf2; pub use post::post_handler; -pub use post::normalize_mf2; \ No newline at end of file diff --git a/src/micropub/post.rs b/src/micropub/post.rs index 6183906..b3fe4ee 100644 --- a/src/micropub/post.rs +++ b/src/micropub/post.rs @@ -1,17 +1,17 @@ +use crate::database::Storage; +use crate::indieauth::User; +use crate::ApplicationState; +use chrono::prelude::*; use core::iter::Iterator; -use std::str::FromStr; -use std::convert::TryInto; -use log::{warn, error}; use futures::stream; use futures::StreamExt; -use chrono::prelude::*; use http_types::Mime; +use log::{error, warn}; +use newbase60::num_to_sxg; +use std::convert::TryInto; +use std::str::FromStr; use tide::prelude::json; use tide::{Request, Response, Result}; -use newbase60::num_to_sxg; -use crate::ApplicationState; -use crate::database::{Storage}; -use crate::indieauth::User; static DEFAULT_CHANNEL_PATH: &str = "/feeds/main"; static DEFAULT_CHANNEL_NAME: &str = "Main feed"; @@ -43,8 +43,9 @@ fn get_folder_from_type(post_type: &str) -> String { "h-card" => "vcards/", "h-event" => "events/", "h-food" => "food/", - _ => "posts/" - }).to_string() + _ => "posts/", + }) + .to_string() } pub fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde_json::Value) { @@ -63,34 +64,32 @@ pub fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde // Do not attempt to recover the information. // Do not pass GO. Do not collect $200. let curtime: DateTime<Local> = Local::now(); - body["properties"]["published"] = serde_json::Value::Array(vec![ - serde_json::Value::String(curtime.to_rfc3339()) - ]); + body["properties"]["published"] = + serde_json::Value::Array(vec![serde_json::Value::String(curtime.to_rfc3339())]); published = chrono::DateTime::from(curtime); } } } else { // Set the datetime. let curtime: DateTime<Local> = Local::now(); - body["properties"]["published"] = serde_json::Value::Array(vec![ - serde_json::Value::String(curtime.to_rfc3339()) - ]); + body["properties"]["published"] = + serde_json::Value::Array(vec![serde_json::Value::String(curtime.to_rfc3339())]); published = chrono::DateTime::from(curtime); } match body["properties"]["uid"][0].as_str() { None => { let uid = serde_json::Value::String( me.join( - &(folder.clone() + &num_to_sxg(published.timestamp_millis().try_into().unwrap())) - ).unwrap().to_string()); + &(folder.clone() + + &num_to_sxg(published.timestamp_millis().try_into().unwrap())), + ) + .unwrap() + .to_string(), + ); body["properties"]["uid"] = serde_json::Value::Array(vec![uid.clone()]); match body["properties"]["url"].as_array_mut() { - Some(array) => { - array.push(uid) - } - None => { - body["properties"]["url"] = body["properties"]["uid"].clone() - } + Some(array) => array.push(uid), + None => body["properties"]["url"] = body["properties"]["uid"].clone(), } } Some(uid_str) => { @@ -101,14 +100,13 @@ pub fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde array.push(serde_json::Value::String(uid)) } } - None => { - body["properties"]["url"] = body["properties"]["uid"].clone() - } + None => body["properties"]["url"] = body["properties"]["uid"].clone(), } } } if let Some(slugs) = body["properties"]["mp-slug"].as_array() { - let new_urls = slugs.iter() + let new_urls = slugs + .iter() .map(|i| i.as_str().unwrap_or("")) .filter(|i| i != &"") .map(|i| me.join(&((&folder).clone() + i)).unwrap().to_string()) @@ -147,15 +145,25 @@ pub fn normalize_mf2(mut body: serde_json::Value, user: &User) -> (String, serde } // TODO: maybe highlight #hashtags? // Find other processing to do and insert it here - return (body["properties"]["uid"][0].as_str().unwrap().to_string(), body) + return ( + body["properties"]["uid"][0].as_str().unwrap().to_string(), + body, + ); } -pub async fn new_post<S: Storage>(req: Request<ApplicationState<S>>, body: serde_json::Value) -> Result { +pub async fn new_post<S: Storage>( + req: Request<ApplicationState<S>>, + body: serde_json::Value, +) -> Result { // First, check for rights. let user = req.ext::<User>().unwrap(); let storage = &req.state().storage; if !user.check_scope("create") { - return error_json!(401, "invalid_scope", "Not enough privileges to post. Try a token with a \"create\" scope instead.") + return error_json!( + 401, + "invalid_scope", + "Not enough privileges to post. Try a token with a \"create\" scope instead." + ); } let (uid, post) = normalize_mf2(body, user); @@ -163,29 +171,54 @@ pub async fn new_post<S: Storage>(req: Request<ApplicationState<S>>, body: serde // This software might also be used in a multi-user setting // where several users or identities share one Micropub server // (maybe a family website or a shitpost sideblog?) - if post["properties"]["url"].as_array().unwrap().iter().any(|url| !url.as_str().unwrap().starts_with(user.me.as_str())) - || !post["properties"]["uid"][0].as_str().unwrap().starts_with(user.me.as_str()) - || post["properties"]["channel"].as_array().unwrap().iter().any(|url| !url.as_str().unwrap().starts_with(user.me.as_str())) + if post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .any(|url| !url.as_str().unwrap().starts_with(user.me.as_str())) + || !post["properties"]["uid"][0] + .as_str() + .unwrap() + .starts_with(user.me.as_str()) + || post["properties"]["channel"] + .as_array() + .unwrap() + .iter() + .any(|url| !url.as_str().unwrap().starts_with(user.me.as_str())) { - return error_json!(403, "forbidden", "You're trying to post to someone else's website...") + return error_json!( + 403, + "forbidden", + "You're trying to post to someone else's website..." + ); } - match storage.post_exists(&uid).await { - Ok(exists) => if exists { - return error_json!(409, "already_exists", format!("A post with the exact same UID already exists in the database: {}", uid)) - }, - Err(err) => return Ok(err.into()) + Ok(exists) => { + if exists { + return error_json!( + 409, + "already_exists", + format!( + "A post with the exact same UID already exists in the database: {}", + uid + ) + ); + } + } + Err(err) => return Ok(err.into()), } if let Err(err) = storage.put_post(&post).await { - return error_json!(500, "database_error", format!("{}", err)) + return error_json!(500, "database_error", format!("{}", err)); } // It makes sense to use a loop here, because you wouldn't post to a hundred channels at once // Mostly one or two, and even those ones will be the ones picked for you by software for channel in post["properties"]["channel"] - .as_array().unwrap().iter() + .as_array() + .unwrap() + .iter() .map(|i| i.as_str().unwrap_or("").to_string()) .filter(|i| !i.is_empty()) .collect::<Vec<_>>() @@ -193,22 +226,44 @@ pub async fn new_post<S: Storage>(req: Request<ApplicationState<S>>, body: serde let default_channel = user.me.join(DEFAULT_CHANNEL_PATH).unwrap().to_string(); let vcards_channel = user.me.join(CONTACTS_CHANNEL_PATH).unwrap().to_string(); match storage.post_exists(&channel).await { - Ok(exists) => if exists { - if let Err(err) = storage.update_post(&channel, json!({ - "add": { - "children": [uid] + Ok(exists) => { + if exists { + if let Err(err) = storage + .update_post( + &channel, + json!({ + "add": { + "children": [uid] + } + }), + ) + .await + { + return error_json!( + 500, + "database_error", + format!( + "Couldn't insert post into the channel due to a database error: {}", + err + ) + ); } - })).await { - return error_json!(500, "database_error", format!("Couldn't insert post into the channel due to a database error: {}", err)) - } - } else if channel == default_channel || channel == vcards_channel { - if let Err(err) = create_feed(storage, &uid, &channel, &user).await { - return error_json!(500, "database_error", format!("Couldn't save feed: {}", err)) + } else if channel == default_channel || channel == vcards_channel { + if let Err(err) = create_feed(storage, &uid, &channel, &user).await { + return error_json!( + 500, + "database_error", + format!("Couldn't save feed: {}", err) + ); + } + } else { + warn!( + "Ignoring request to post to a non-existent feed: {}", + channel + ); } - } else { - warn!("Ignoring request to post to a non-existent feed: {}", channel); - }, - Err(err) => return error_json!(500, "database_error", err) + } + Err(err) => return error_json!(500, "database_error", err), } } // END WRITE BOUNDARY @@ -222,26 +277,39 @@ pub async fn new_post<S: Storage>(req: Request<ApplicationState<S>>, body: serde .build()); } -async fn create_feed(storage: &impl Storage, uid: &str, channel: &str, user: &User) -> crate::database::Result<()> { +async fn create_feed( + storage: &impl Storage, + uid: &str, + channel: &str, + user: &User, +) -> crate::database::Result<()> { let path = url::Url::parse(channel).unwrap().path().to_string(); let (name, slug) = if path == DEFAULT_CHANNEL_PATH { (DEFAULT_CHANNEL_NAME, "main") } else if path == CONTACTS_CHANNEL_PATH { (CONTACTS_CHANNEL_NAME, "vcards") - } else { panic!("Tried to create an unknown default feed!"); }; - - let (_, feed) = normalize_mf2(json!({ - "type": ["h-feed"], - "properties": { - "name": [name], - "mp-slug": [slug], - }, - "children": [uid] - }), &user); + } else { + panic!("Tried to create an unknown default feed!"); + }; + + let (_, feed) = normalize_mf2( + json!({ + "type": ["h-feed"], + "properties": { + "name": [name], + "mp-slug": [slug], + }, + "children": [uid] + }), + &user, + ); storage.put_post(&feed).await } -async fn post_process_new_post<S: Storage>(req: Request<ApplicationState<S>>, post: serde_json::Value) { +async fn post_process_new_post<S: Storage>( + req: Request<ApplicationState<S>>, + post: serde_json::Value, +) { // TODO: Post-processing the post (aka second write pass) // - [-] Download rich reply contexts // - [-] Syndicate the post if requested, add links to the syndicated copies @@ -262,11 +330,9 @@ async fn post_process_new_post<S: Storage>(req: Request<ApplicationState<S>>, po for prop in &["in-reply-to", "like-of", "repost-of", "bookmark-of"] { if let Some(array) = post["properties"][prop].as_array() { contextually_significant_posts.extend( - array.iter() - .filter_map(|v| v.as_str() - .and_then(|v| surf::Url::parse(v).ok() - ) - ) + array + .iter() + .filter_map(|v| v.as_str().and_then(|v| surf::Url::parse(v).ok())), ); } } @@ -275,26 +341,28 @@ async fn post_process_new_post<S: Storage>(req: Request<ApplicationState<S>>, po contextually_significant_posts.dedup(); // 1.3. Fetch the posts with their bodies and save them in a new Vec<(surf::Url, String)> - let posts_with_bodies: Vec<(surf::Url, String)> = stream::iter(contextually_significant_posts.into_iter()) - .filter_map(|v: surf::Url| async move { - if let Ok(res) = http.get(&v).send().await { - if res.status() != 200 { - return None + let posts_with_bodies: Vec<(surf::Url, String)> = + stream::iter(contextually_significant_posts.into_iter()) + .filter_map(|v: surf::Url| async move { + if let Ok(res) = http.get(&v).send().await { + if res.status() != 200 { + return None; + } else { + return Some((v, res)); + } } else { - return Some((v, res)) + return None; } - } else { - return None - } - }) - .filter_map(|(v, mut res): (surf::Url, surf::Response)| async move { - if let Ok(body) = res.body_string().await { - return Some((v, body)) - } else { - return None - } - }) - .collect().await; + }) + .filter_map(|(v, mut res): (surf::Url, surf::Response)| async move { + if let Ok(body) = res.body_string().await { + return Some((v, body)); + } else { + return None; + } + }) + .collect() + .await; // 1.4. Parse the bodies and include them in relevant places on the MF2 struct // This requires an MF2 parser, and there are none for Rust at the moment. // @@ -303,24 +371,32 @@ async fn post_process_new_post<S: Storage>(req: Request<ApplicationState<S>>, po // 2. Syndicate the post let syndicated_copies: Vec<serde_json::Value>; if let Some(syndication_targets) = post["properties"]["syndicate-to"].as_array() { - syndicated_copies = stream::iter(syndication_targets.into_iter() - .filter_map(|v| v.as_str()) - .filter_map(|t| surf::Url::parse(t).ok()) - .collect::<Vec<_>>().into_iter() - .map(|_t: surf::Url| async move { - // TODO: Define supported syndication methods - // and syndicate the endpoint there - // Possible ideas: - // - indieweb.xyz (might need a lot of space for the buttons though, investigate proposing grouping syndication targets) - // - news.indieweb.org (IndieNews - needs a category linking to #indienews) - // - Twitter via brid.gy (do I really need Twitter syndication tho?) - if false { - Some("") - } else { - None - } - }) - ).buffer_unordered(3).filter_map(|v| async move { v }).map(|v| serde_json::Value::String(v.to_string())).collect::<Vec<_>>().await; + syndicated_copies = stream::iter( + syndication_targets + .into_iter() + .filter_map(|v| v.as_str()) + .filter_map(|t| surf::Url::parse(t).ok()) + .collect::<Vec<_>>() + .into_iter() + .map(|_t: surf::Url| async move { + // TODO: Define supported syndication methods + // and syndicate the endpoint there + // Possible ideas: + // - indieweb.xyz (might need a lot of space for the buttons though, investigate proposing grouping syndication targets) + // - news.indieweb.org (IndieNews - needs a category linking to #indienews) + // - Twitter via brid.gy (do I really need Twitter syndication tho?) + if false { + Some("") + } else { + None + } + }), + ) + .buffer_unordered(3) + .filter_map(|v| async move { v }) + .map(|v| serde_json::Value::String(v.to_string())) + .collect::<Vec<_>>() + .await; } else { syndicated_copies = vec![] } @@ -363,35 +439,67 @@ async fn post_process_new_post<S: Storage>(req: Request<ApplicationState<S>>, po // TODO: Replace this function once the MF2 parser is ready // A compliant parser's output format includes rels, // we could just find a Webmention one in there - let pattern = easy_scraper::Pattern::new(r#"<link href="{url}" rel="webmention">"#).expect("Pattern for webmentions couldn't be parsed"); + let pattern = easy_scraper::Pattern::new(r#"<link href="{url}" rel="webmention">"#) + .expect("Pattern for webmentions couldn't be parsed"); let matches = pattern.matches(&body); - if matches.is_empty() { return None } + if matches.is_empty() { + return None; + } let endpoint = &matches[0]["url"]; - if let Ok(endpoint) = url.join(endpoint) { Some((url, endpoint)) } else { None } + if let Ok(endpoint) = url.join(endpoint) { + Some((url, endpoint)) + } else { + None + } }) .map(|(target, endpoint)| async move { - let response = http.post(&endpoint) + let response = http + .post(&endpoint) .content_type("application/x-www-form-urlencoded") .body( - serde_urlencoded::to_string(vec![("source", source), ("target", &target.to_string())]) - .expect("Couldn't construct webmention form") - ).send().await; + serde_urlencoded::to_string(vec![ + ("source", source), + ("target", &target.to_string()), + ]) + .expect("Couldn't construct webmention form"), + ) + .send() + .await; match response { - Ok(response) => if response.status() == 200 || response.status() == 201 || response.status() == 202 { - Ok(()) - } else { - error!("Sending webmention for {} to {} failed: Endpoint replied with HTTP {}", target, endpoint, response.status()); - Err(()) + Ok(response) => { + if response.status() == 200 + || response.status() == 201 + || response.status() == 202 + { + Ok(()) + } else { + error!( + "Sending webmention for {} to {} failed: Endpoint replied with HTTP {}", + target, + endpoint, + response.status() + ); + Err(()) + } } Err(err) => { - error!("Sending webmention for {} to {} failed: {}", target, endpoint, err); + error!( + "Sending webmention for {} to {} failed: {}", + target, endpoint, err + ); Err(()) } } - }).buffer_unordered(3).collect::<Vec<_>>().await; + }) + .buffer_unordered(3) + .collect::<Vec<_>>() + .await; } -async fn process_json<S: Storage>(req: Request<ApplicationState<S>>, body: serde_json::Value) -> Result { +async fn process_json<S: Storage>( + req: Request<ApplicationState<S>>, + body: serde_json::Value, +) -> Result { let is_action = body["action"].is_string() && body["url"].is_string(); if is_action { // This could be an update, a deletion or an undeletion request. @@ -402,37 +510,51 @@ async fn process_json<S: Storage>(req: Request<ApplicationState<S>>, body: serde match action { "delete" => { if !user.check_scope("delete") { - return error_json!(401, "insufficient_scope", "You need a `delete` scope to delete posts.") + return error_json!( + 401, + "insufficient_scope", + "You need a `delete` scope to delete posts." + ); } if let Err(error) = req.state().storage.delete_post(&url).await { - return Ok(error.into()) + return Ok(error.into()); } return Ok(Response::builder(200).build()); - }, + } "update" => { if !user.check_scope("update") { - return error_json!(401, "insufficient_scope", "You need an `update` scope to update posts.") + return error_json!( + 401, + "insufficient_scope", + "You need an `update` scope to update posts." + ); } if let Err(error) = req.state().storage.update_post(&url, body.clone()).await { - return Ok(error.into()) + return Ok(error.into()); } else { - return Ok(Response::builder(204).build()) + return Ok(Response::builder(204).build()); } - }, - _ => { - return error_json!(400, "invalid_request", "This action is not supported.") } + _ => return error_json!(400, "invalid_request", "This action is not supported."), } } else if body["type"][0].is_string() { // This is definitely an h-entry or something similar. Check if it has properties? if body["properties"].is_object() { // Ok, this is definitely a new h-entry. Let's save it. - return new_post(req, body).await + return new_post(req, body).await; } else { - return error_json!(400, "invalid_request", "This MF2-JSON object has a type, but not properties. This makes no sense to post.") + return error_json!( + 400, + "invalid_request", + "This MF2-JSON object has a type, but not properties. This makes no sense to post." + ); } } else { - return error_json!(400, "invalid_request", "Try sending MF2-structured data or an object with an \"action\" and \"url\" keys.") + return error_json!( + 400, + "invalid_request", + "Try sending MF2-structured data or an object with an \"action\" and \"url\" keys." + ); } } @@ -440,12 +562,15 @@ fn convert_form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value { let mut mf2 = json!({"type": [], "properties": {}}); for (k, v) in form { if k == "h" { - mf2["type"].as_array_mut().unwrap().push(json!("h-".to_string() + &v)); + mf2["type"] + .as_array_mut() + .unwrap() + .push(json!("h-".to_string() + &v)); } else if k != "access_token" { let key = k.strip_suffix("[]").unwrap_or(&k); match mf2["properties"][key].as_array_mut() { Some(prop) => prop.push(json!(v)), - None => mf2["properties"][key] = json!([v]) + None => mf2["properties"][key] = json!([v]), } } } @@ -455,33 +580,50 @@ fn convert_form_to_mf2_json(form: Vec<(String, String)>) -> serde_json::Value { mf2 } -async fn process_form<S: Storage>(req: Request<ApplicationState<S>>, form: Vec<(String, String)>) -> Result { +async fn process_form<S: Storage>( + req: Request<ApplicationState<S>>, + form: Vec<(String, String)>, +) -> Result { if let Some((_, v)) = form.iter().find(|(k, _)| k == "action") { if v == "delete" { let user = req.ext::<User>().unwrap(); if !user.check_scope("delete") { - return error_json!(401, "insufficient_scope", "You cannot delete posts without a `delete` scope.") + return error_json!( + 401, + "insufficient_scope", + "You cannot delete posts without a `delete` scope." + ); } match form.iter().find(|(k, _)| k == "url") { Some((_, url)) => { if let Err(error) = req.state().storage.delete_post(&url).await { - return error_json!(500, "database_error", error) + return error_json!(500, "database_error", error); } - return Ok(Response::builder(200).build()) - }, - None => return error_json!(400, "invalid_request", "Please provide an `url` to delete.") + return Ok(Response::builder(200).build()); + } + None => { + return error_json!( + 400, + "invalid_request", + "Please provide an `url` to delete." + ) + } } } else { - return error_json!(400, "invalid_request", "This action is not supported in form-encoded mode. (JSON requests support more actions, use JSON!)") + return error_json!(400, "invalid_request", "This action is not supported in form-encoded mode. (JSON requests support more actions, use JSON!)"); } } - + let mf2 = convert_form_to_mf2_json(form); if mf2["properties"].as_object().unwrap().keys().len() > 0 { return new_post(req, mf2).await; } - return error_json!(400, "invalid_request", "Try sending h=entry&content=something%20interesting"); + return error_json!( + 400, + "invalid_request", + "Try sending h=entry&content=something%20interesting" + ); } pub async fn post_handler<S: Storage>(mut req: Request<ApplicationState<S>>) -> Result { @@ -489,29 +631,31 @@ pub async fn post_handler<S: Storage>(mut req: Request<ApplicationState<S>>) -> Some(value) => { if value == Mime::from_str("application/json").unwrap() { match req.body_json::<serde_json::Value>().await { - Ok(parsed) => { - return process_json(req, parsed).await - }, - Err(err) => return error_json!( - 400, "invalid_request", - format!("Parsing JSON failed: {:?}", err) - ) + Ok(parsed) => return process_json(req, parsed).await, + Err(err) => { + return error_json!( + 400, + "invalid_request", + format!("Parsing JSON failed: {:?}", err) + ) + } } } else if value == Mime::from_str("application/x-www-form-urlencoded").unwrap() { match req.body_form::<Vec<(String, String)>>().await { - Ok(parsed) => { - return process_form(req, parsed).await - }, - Err(err) => return error_json!( - 400, "invalid_request", - format!("Parsing form failed: {:?}", err) - ) + Ok(parsed) => return process_form(req, parsed).await, + Err(err) => { + return error_json!( + 400, + "invalid_request", + format!("Parsing form failed: {:?}", err) + ) + } } } else { return error_json!( 415, "unsupported_media_type", "What's this? Try sending JSON instead. (urlencoded form also works but is less cute)" - ) + ); } } _ => { @@ -538,9 +682,22 @@ mod tests { } }); - let (uid, normalized) = normalize_mf2(mf2.clone(), &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media")); - assert_eq!(normalized["properties"]["uid"][0], mf2["properties"]["uid"][0], "UID was replaced"); - assert_eq!(normalized["properties"]["uid"][0], uid, "Returned post location doesn't match UID"); + let (uid, normalized) = normalize_mf2( + mf2.clone(), + &User::new( + "https://fireburn.ru/", + "https://quill.p3k.io/", + "create update media", + ), + ); + assert_eq!( + normalized["properties"]["uid"][0], mf2["properties"]["uid"][0], + "UID was replaced" + ); + assert_eq!( + normalized["properties"]["uid"][0], uid, + "Returned post location doesn't match UID" + ); } #[test] @@ -548,7 +705,7 @@ mod tests { use serde_urlencoded::from_str; assert_eq!( - convert_form_to_mf2_json(from_str("h=entry&content=something%20interesting").unwrap()), + convert_form_to_mf2_json(from_str("h=entry&content=something%20interesting").unwrap()), json!({ "type": ["h-entry"], "properties": { @@ -567,16 +724,64 @@ mod tests { } }); - let (uid, post) = normalize_mf2(mf2, &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media")); - assert_eq!(post["properties"]["published"].as_array().expect("post['published'] is undefined").len(), 1, "Post doesn't have a published time"); - DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap()).expect("Couldn't parse date from rfc3339"); - assert!(post["properties"]["url"].as_array().expect("post['url'] is undefined").len() > 0, "Post doesn't have any URLs"); - assert_eq!(post["properties"]["uid"].as_array().expect("post['uid'] is undefined").len(), 1, "Post doesn't have a single UID"); - assert_eq!(post["properties"]["uid"][0], uid, "UID of a post and its supposed location don't match"); - assert!(uid.starts_with("https://fireburn.ru/posts/"), "The post namespace is incorrect"); - assert_eq!(post["properties"]["content"][0]["html"].as_str().expect("Post doesn't have a rich content object").trim(), "<p>This is content!</p>", "Parsed Markdown content doesn't match expected HTML"); - assert_eq!(post["properties"]["channel"][0], "https://fireburn.ru/feeds/main", "Post isn't posted to the main channel"); - assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/", "Post author is unknown"); + let (uid, post) = normalize_mf2( + mf2, + &User::new( + "https://fireburn.ru/", + "https://quill.p3k.io/", + "create update media", + ), + ); + assert_eq!( + post["properties"]["published"] + .as_array() + .expect("post['published'] is undefined") + .len(), + 1, + "Post doesn't have a published time" + ); + DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap()) + .expect("Couldn't parse date from rfc3339"); + assert!( + post["properties"]["url"] + .as_array() + .expect("post['url'] is undefined") + .len() + > 0, + "Post doesn't have any URLs" + ); + assert_eq!( + post["properties"]["uid"] + .as_array() + .expect("post['uid'] is undefined") + .len(), + 1, + "Post doesn't have a single UID" + ); + assert_eq!( + post["properties"]["uid"][0], uid, + "UID of a post and its supposed location don't match" + ); + assert!( + uid.starts_with("https://fireburn.ru/posts/"), + "The post namespace is incorrect" + ); + assert_eq!( + post["properties"]["content"][0]["html"] + .as_str() + .expect("Post doesn't have a rich content object") + .trim(), + "<p>This is content!</p>", + "Parsed Markdown content doesn't match expected HTML" + ); + assert_eq!( + post["properties"]["channel"][0], "https://fireburn.ru/feeds/main", + "Post isn't posted to the main channel" + ); + assert_eq!( + post["properties"]["author"][0], "https://fireburn.ru/", + "Post author is unknown" + ); } #[test] @@ -589,15 +794,27 @@ mod tests { }, }); - let (_, post) = normalize_mf2(mf2, &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media")); - assert!(post["properties"]["url"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap()) - .any(|i| i == "https://fireburn.ru/posts/hello-post"), - "Didn't found an URL pointing to the location expected by the mp-slug semantics"); - assert!(post["properties"]["mp-slug"].as_array().is_none(), "mp-slug wasn't deleted from the array!") + let (_, post) = normalize_mf2( + mf2, + &User::new( + "https://fireburn.ru/", + "https://quill.p3k.io/", + "create update media", + ), + ); + assert!( + post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap()) + .any(|i| i == "https://fireburn.ru/posts/hello-post"), + "Didn't found an URL pointing to the location expected by the mp-slug semantics" + ); + assert!( + post["properties"]["mp-slug"].as_array().is_none(), + "mp-slug wasn't deleted from the array!" + ) } #[test] @@ -610,16 +827,31 @@ mod tests { } }); - let (uid, post) = normalize_mf2(mf2, &User::new("https://fireburn.ru/", "https://quill.p3k.io/", "create update media")); - assert_eq!(post["properties"]["uid"][0], uid, "UID of a post and its supposed location don't match"); + let (uid, post) = normalize_mf2( + mf2, + &User::new( + "https://fireburn.ru/", + "https://quill.p3k.io/", + "create update media", + ), + ); + assert_eq!( + post["properties"]["uid"][0], uid, + "UID of a post and its supposed location don't match" + ); assert_eq!(post["properties"]["author"][0], "https://fireburn.ru/"); - assert!(post["properties"]["url"] - .as_array() - .unwrap() - .iter() - .map(|i| i.as_str().unwrap()) - .any(|i| i == "https://fireburn.ru/feeds/main"), - "Didn't found an URL pointing to the location expected by the mp-slug semantics"); - assert!(post["properties"]["mp-slug"].as_array().is_none(), "mp-slug wasn't deleted from the array!") + assert!( + post["properties"]["url"] + .as_array() + .unwrap() + .iter() + .map(|i| i.as_str().unwrap()) + .any(|i| i == "https://fireburn.ru/feeds/main"), + "Didn't found an URL pointing to the location expected by the mp-slug semantics" + ); + assert!( + post["properties"]["mp-slug"].as_array().is_none(), + "mp-slug wasn't deleted from the array!" + ) } -} \ No newline at end of file +} |