From 949a961c19ba994c3f0846e7b54d9a55a94d7b9a Mon Sep 17 00:00:00 2001 From: Vika Date: Thu, 29 Jul 2021 18:14:30 +0300 Subject: Appease rustfmt, clippy and cargo check --- src/micropub/post.rs | 116 +++++++++++++++++++++++++++++---------------------- 1 file changed, 65 insertions(+), 51 deletions(-) (limited to 'src/micropub/post.rs') diff --git a/src/micropub/post.rs b/src/micropub/post.rs index 8667451..f317da5 100644 --- a/src/micropub/post.rs +++ b/src/micropub/post.rs @@ -6,7 +6,7 @@ use core::iter::Iterator; use futures::stream; use futures::StreamExt; use http_types::Mime; -use log::{error, warn, info}; +use log::{error, info, warn}; use newbase60::num_to_sxg; use std::convert::TryInto; use std::str::FromStr; @@ -172,9 +172,9 @@ pub async fn new_post( // where several users or identities share one Micropub server // (maybe a family website or a shitpost sideblog?) if !post["properties"]["uid"][0] - .as_str() - .unwrap() - .starts_with(user.me.as_str()) + .as_str() + .unwrap() + .starts_with(user.me.as_str()) || post["properties"]["channel"] .as_array() .unwrap() @@ -430,52 +430,60 @@ async fn post_process_new_post( // We'll need the bodies here to get their endpoints let source = &uid; stream::iter(posts_with_bodies.into_iter()) - .filter_map(|(url, response, body): (surf::Url, surf::Response, String)| async move { - // Check Link headers first - // the first webmention endpoint will be returned - if let Some(values) = response.header("Link") { - let mut iter = values.iter().flat_map(|i| i.as_str().split(',')); - - for link in iter { - let mut split = link.split(";"); - - match split.next() { - Some(uri) => { - if let Some(uri) = uri.strip_prefix('<') { - if let Some(uri) = uri.strip_suffix('>') { - for prop in split { - let lowercased = prop.to_ascii_lowercase(); - if &lowercased == "rel=\"webmention\"" || &lowercased == "rel=webmention" { - if let Ok(endpoint) = url.join(uri) { - return Some((url, endpoint)); + .filter_map( + |(url, response, body): (surf::Url, surf::Response, String)| async move { + // Check Link headers first + // the first webmention endpoint will be returned + if let Some(values) = response.header("Link") { + let iter = values.iter().flat_map(|i| i.as_str().split(',')); + + for link in iter { + let mut split = link.split(';'); + + match split.next() { + Some(uri) => { + if let Some(uri) = uri.strip_prefix('<') { + if let Some(uri) = uri.strip_suffix('>') { + for prop in split { + let lowercased = prop.to_ascii_lowercase(); + if &lowercased == "rel=\"webmention\"" + || &lowercased == "rel=webmention" + { + if let Ok(endpoint) = url.join(uri) { + return Some((url, endpoint)); + } } } } } } - }, - None => continue + None => continue, + } } } - } - // TODO: Replace this function once the MF2 parser is ready - // A compliant parser's output format includes rels, - // we could just find a Webmention one in there - let pattern = easy_scraper::Pattern::new(r#""#) - .expect("Pattern for webmentions couldn't be parsed"); - let matches = pattern.matches(&body); - if matches.is_empty() { - return None; - } - let endpoint = &matches[0]["url"]; - if let Ok(endpoint) = url.join(endpoint) { - Some((url, endpoint)) - } else { - None - } - }) + // TODO: Replace this function once the MF2 parser is ready + // A compliant parser's output format includes rels, + // we could just find a Webmention one in there + let pattern = easy_scraper::Pattern::new(r#""#) + .expect("Pattern for webmentions couldn't be parsed"); + let matches = pattern.matches(&body); + if matches.is_empty() { + return None; + } + let endpoint = &matches[0]["url"]; + if let Ok(endpoint) = url.join(endpoint) { + Some((url, endpoint)) + } else { + None + } + }, + ) .map(|(target, endpoint)| async move { - info!("Sending webmention to {} about {}", source, &target.to_string()); + info!( + "Sending webmention to {} about {}", + source, + &target.to_string() + ); let response = http .post(&endpoint) .content_type("application/x-www-form-urlencoded") @@ -543,12 +551,14 @@ async fn process_json( // authorization endpoint is supposed to reject any auth request trying to get this // scope. It is intended for TRUSTED external services that need to modify the // database while ignoring any access controls - if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() && !user.check_scope("kittybox_internal:do_what_thou_wilt") { + if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() + && !user.check_scope("kittybox_internal:do_what_thou_wilt") + { return error_json!( 403, "forbidden", "You're not allowed to delete someone else's posts." - ) + ); } if let Err(error) = req.state().storage.delete_post(&url).await { return Ok(error.into()); @@ -563,12 +573,14 @@ async fn process_json( "You need an `update` scope to update posts." ); } - if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() && !user.check_scope("kittybox_internal:do_what_thou_wilt") { + if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() + && !user.check_scope("kittybox_internal:do_what_thou_wilt") + { return error_json!( 403, "forbidden", "You're not allowed to delete someone else's posts." - ) + ); } if let Err(error) = req.state().storage.update_post(&url, body.clone()).await { Ok(error.into()) @@ -637,12 +649,15 @@ async fn process_form( } match form.iter().find(|(k, _)| k == "url") { Some((_, url)) => { - if (url::Url::parse(url)?.origin().ascii_serialization() + "/") != user.me.as_str() && !user.check_scope("kittybox_internal:do_what_thou_wilt") { + if (url::Url::parse(url)?.origin().ascii_serialization() + "/") + != user.me.as_str() + && !user.check_scope("kittybox_internal:do_what_thou_wilt") + { return error_json!( 403, "forbidden", "You're not allowed to delete someone else's posts." - ) + ); } if let Err(error) = req.state().storage.delete_post(&url).await { return error_json!(500, "database_error", error); @@ -791,11 +806,10 @@ mod tests { DateTime::parse_from_rfc3339(post["properties"]["published"][0].as_str().unwrap()) .expect("Couldn't parse date from rfc3339"); assert!( - post["properties"]["url"] + !post["properties"]["url"] .as_array() .expect("post['url'] is undefined") - .len() - > 0, + .is_empty(), "Post doesn't have any URLs" ); assert_eq!( -- cgit 1.4.1